diff --git a/app/build.gradle b/app/build.gradle index 4426dbeeb86..d908cbb5518 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -369,6 +369,10 @@ dependencies { testImplementation("com.squareup.okhttp3:mockwebserver:$okhttpVersion") testImplementation("com.google.dagger:hilt-android-testing:2.59.1") testImplementation("org.robolectric:robolectric:4.16.1") + + // Computer Vision - for background effects during video calls + implementation 'com.google.mediapipe:tasks-vision:0.10.26' + implementation "io.github.crow-misia.libyuv:libyuv-android:0.43.2" } tasks.register('installGitHooks', Copy) { diff --git a/app/src/main/assets/selfie_segmenter.tflite b/app/src/main/assets/selfie_segmenter.tflite new file mode 100644 index 00000000000..a4ebd4777ed Binary files /dev/null and b/app/src/main/assets/selfie_segmenter.tflite differ diff --git a/app/src/main/java/com/nextcloud/talk/activities/CallActivity.kt b/app/src/main/java/com/nextcloud/talk/activities/CallActivity.kt index 753c72b08ca..ef919f9d02a 100644 --- a/app/src/main/java/com/nextcloud/talk/activities/CallActivity.kt +++ b/app/src/main/java/com/nextcloud/talk/activities/CallActivity.kt @@ -73,6 +73,9 @@ import com.nextcloud.talk.call.ReactionAnimator import com.nextcloud.talk.call.components.ParticipantGrid import com.nextcloud.talk.call.components.SelfVideoView import com.nextcloud.talk.call.components.screenshare.ScreenShareComponent +import com.nextcloud.talk.camera.BackgroundBlurFrameProcessor +import com.nextcloud.talk.camera.BlurBackgroundViewModel +import com.nextcloud.talk.camera.BlurBackgroundViewModel.BackgroundBlurOn import com.nextcloud.talk.chat.ChatActivity import com.nextcloud.talk.data.user.model.User import com.nextcloud.talk.databinding.CallActivityBinding @@ -185,7 +188,6 @@ import java.util.Objects import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger import javax.inject.Inject -import kotlin.String import kotlin.math.abs @AutoInjector(NextcloudTalkApplication::class) @@ -214,6 +216,7 @@ class CallActivity : CallBaseActivity() { var audioManager: WebRtcAudioManager? = null var callRecordingViewModel: CallRecordingViewModel? = null var raiseHandViewModel: RaiseHandViewModel? = null + val blurBackgroundViewModel: BlurBackgroundViewModel = BlurBackgroundViewModel() private var mReceiver: BroadcastReceiver? = null private var peerConnectionFactory: PeerConnectionFactory? = null private var screenSharePeerConnectionFactory: PeerConnectionFactory? = null @@ -539,6 +542,20 @@ class CallActivity : CallBaseActivity() { } } + private fun initBackgroundBlurViewModel(surfaceTextureHelper: SurfaceTextureHelper) { + blurBackgroundViewModel.viewState.observe(this) { state -> + val isOn = state == BackgroundBlurOn + + val processor = if (isOn) { + BackgroundBlurFrameProcessor(context, surfaceTextureHelper) + } else { + null + } + + videoSource?.setVideoProcessor(processor) + } + } + private fun processExtras(extras: Bundle) { roomId = extras.getString(KEY_ROOM_ID, "") roomToken = extras.getString(KEY_ROOM_TOKEN, "") @@ -1116,6 +1133,7 @@ class CallActivity : CallBaseActivity() { videoSource = peerConnectionFactory!!.createVideoSource(false) videoCapturer!!.initialize(surfaceTextureHelper, applicationContext, videoSource!!.capturerObserver) + initBackgroundBlurViewModel(surfaceTextureHelper) } localVideoTrack = peerConnectionFactory!!.createVideoTrack("NCv0", videoSource) localStream!!.addTrack(localVideoTrack) @@ -1250,6 +1268,7 @@ class CallActivity : CallBaseActivity() { binding!!.cameraButton.setImageResource(R.drawable.ic_videocam_white_24px) } else { binding!!.cameraButton.setImageResource(R.drawable.ic_videocam_off_white_24px) + blurBackgroundViewModel.turnOffBlur() } toggleMedia(videoOn, true) } else if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) { @@ -1326,6 +1345,10 @@ class CallActivity : CallBaseActivity() { raiseHandViewModel!!.clickHandButton() } + fun toggleBackgroundBlur() { + blurBackgroundViewModel.toggleBackgroundBlur() + } + public override fun onDestroy() { if (signalingMessageReceiver != null) { signalingMessageReceiver!!.removeListener(localParticipantMessageListener) diff --git a/app/src/main/java/com/nextcloud/talk/camera/BackgroundBlurFrameProcessor.kt b/app/src/main/java/com/nextcloud/talk/camera/BackgroundBlurFrameProcessor.kt new file mode 100644 index 00000000000..69a68b6c903 --- /dev/null +++ b/app/src/main/java/com/nextcloud/talk/camera/BackgroundBlurFrameProcessor.kt @@ -0,0 +1,205 @@ +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2025 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +package com.nextcloud.talk.camera + +import android.content.Context +import android.os.Handler +import android.os.HandlerThread +import android.util.Log +import io.github.crow_misia.libyuv.AbgrBuffer +import io.github.crow_misia.libyuv.I420Buffer +import io.github.crow_misia.libyuv.PlanePrimitive +import org.webrtc.JavaI420Buffer +import org.webrtc.SurfaceTextureHelper +import org.webrtc.VideoFrame +import org.webrtc.VideoProcessor +import org.webrtc.VideoSink +import org.webrtc.YuvHelper +import java.nio.ByteBuffer +import java.util.concurrent.ConcurrentHashMap + +class BackgroundBlurFrameProcessor(val context: Context, val surfaceTextureHelper: SurfaceTextureHelper) : + VideoProcessor, + ImageSegmenterHelper.SegmenterListener { + + companion object { + val TAG: String = this::class.java.simpleName + const val GPU_THREAD: String = "BackgroundBlur" + } + + private var sink: VideoSink? = null + private var segmenterHelper: ImageSegmenterHelper? = null + private var backgroundBlurGPUProcessor: BackgroundBlurGPUProcessor? = null + + // This is to hold meta information between MediaPipe and GPU Render threads, in a thread safe way + private val rotationMap = ConcurrentHashMap() + private val frameBufferMap = ConcurrentHashMap() + + // Dedicated Thread for OpenGL Operations + private var glThread: HandlerThread? = null + private var glHandler: Handler? = null + + // SegmentationListener Interface + + override fun onError(error: String, errorCode: Int) { + Log.e(TAG, "Error $errorCode: $error") + } + + override fun onResults(resultBundle: ImageSegmenterHelper.ResultBundle) { + val rotation = rotationMap[resultBundle.inferenceTime] ?: 0f + val frameBuffer = frameBufferMap[resultBundle.inferenceTime] + + // Remove once used to prevent mem leaks + rotationMap.remove(resultBundle.inferenceTime) + frameBufferMap.remove(resultBundle.inferenceTime) + + if (frameBuffer == null) { + Log.e(TAG, "Critical Error in onResults: FrameBufferMap[${resultBundle.inferenceTime}] was null") + return + } + + glHandler?.post { + // This block runs safely on gpu thread + backgroundBlurGPUProcessor?.let { scaler -> + try { + val drawArray = scaler.process( + resultBundle.mask, + frameBuffer, + resultBundle.width, + resultBundle.height, + rotation + ) + + val webRTCBuffer = drawArray.convertToWebRTCBuffer(resultBundle.width, resultBundle.height) + val videoFrame = VideoFrame(webRTCBuffer, 0, resultBundle.inferenceTime) + + // This should run on the CaptureThread + surfaceTextureHelper.handler.post { + Log.d(TAG, "Sent VideoFrame to sink on :${Thread.currentThread().name}") + sink?.onFrame(videoFrame) + + // webRTCBuffer usually needs release() if it's not a JavaI420Buffer wrapper that auto-GCs, + // but JavaI420Buffer.wrap() relies on GC. + videoFrame.release() + } + + + } catch (e: Exception) { + Log.e(TAG, "Error processing frame on GL Thread", e) + } + } + } + } + + // Video Processor Interface + + override fun onCapturerStarted(success: Boolean) { + segmenterHelper = ImageSegmenterHelper(context = context, imageSegmenterListener = this) + + glThread = HandlerThread(GPU_THREAD).apply { start() } + glHandler = Handler(glThread!!.looper) + glHandler?.post { + backgroundBlurGPUProcessor = BackgroundBlurGPUProcessor(context) + backgroundBlurGPUProcessor?.init() + } + } + + override fun onCapturerStopped() { + segmenterHelper?.destroyImageSegmenter() + glHandler?.post { + backgroundBlurGPUProcessor?.release() + backgroundBlurGPUProcessor = null + + // Quit thread after cleanup + glThread?.quitSafely() + glThread = null + glHandler = null + } + } + + override fun onFrameCaptured(videoFrame: VideoFrame) { + val i420WebRTCBuffer = videoFrame.buffer.toI420() + val width = videoFrame.buffer.width + val height = videoFrame.buffer.height + val rotation = 180.0f - videoFrame.rotation + val videoFrameBuffer = i420WebRTCBuffer?.convertToABGR() + + i420WebRTCBuffer?.release() + + videoFrameBuffer?.let { + rotationMap[videoFrame.timestampNs] = rotation + frameBufferMap[videoFrame.timestampNs] = it + segmenterHelper?.segmentFrame(it, width, height, videoFrame.timestampNs) + } ?: { + Log.e(TAG, "onFrameCaptured:: Video Frame was null!") + sink?.onFrame(videoFrame) + } + } + + override fun setSink(sink: VideoSink?) { + this.sink = sink + } + + fun VideoFrame.I420Buffer.convertToABGR() : ByteBuffer { + val dataYSize = dataY.limit() - dataY.position() + val dataUSize = dataU.limit() - dataU.position() + val dataVSize = dataV.limit() - dataV.position() + + val planeY = PlanePrimitive.create(strideY, dataY, dataYSize) + val planeU = PlanePrimitive.create(strideU, dataU, dataUSize) + val planeV = PlanePrimitive.create(strideV, dataV, dataVSize) + + val libYuvI420Buffer = I420Buffer.wrap(planeY, planeU, planeV, width, height) + val libYuvABGRBuffer = AbgrBuffer.allocate(width, height) + libYuvI420Buffer.convertTo(libYuvABGRBuffer) + + return libYuvABGRBuffer.asBuffer() + } + + fun ByteArray.convertToWebRTCBuffer(width: Int, height: Int): JavaI420Buffer { + val src = ByteBuffer.allocateDirect(this.size) + src.put(this) + + val srcStride = width * 4 + val yPlaneSize = width * height + val uvPlaneSize = (width / 2) * (height / 2) + + val dstYStride = width + val dstUStride = width / 2 + val dstVStride = width / 2 + + val dstYBuffer = ByteBuffer.allocateDirect(yPlaneSize) + val dstUBuffer = ByteBuffer.allocateDirect(uvPlaneSize) + val dstVBuffer = ByteBuffer.allocateDirect(uvPlaneSize) + + YuvHelper.ABGRToI420( + src, + srcStride, + dstYBuffer, + dstYStride, + dstUBuffer, + dstUStride, + dstVBuffer, + dstVStride, + width, + height + ) + + return JavaI420Buffer.wrap( + width, + height, + dstYBuffer, + dstYStride, + dstUBuffer, + dstUStride, + dstVBuffer, + dstVStride, + null + ) + } +} diff --git a/app/src/main/java/com/nextcloud/talk/camera/BackgroundBlurGPUProcessor.kt b/app/src/main/java/com/nextcloud/talk/camera/BackgroundBlurGPUProcessor.kt new file mode 100644 index 00000000000..19fb3bcb8be --- /dev/null +++ b/app/src/main/java/com/nextcloud/talk/camera/BackgroundBlurGPUProcessor.kt @@ -0,0 +1,593 @@ +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2026 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +package com.nextcloud.talk.camera + +import android.content.Context +import android.opengl.GLES20 +import com.nextcloud.talk.R +import org.webrtc.EglBase +import java.nio.ByteBuffer +import java.nio.ByteOrder +import java.nio.FloatBuffer +import kotlin.math.cos +import kotlin.math.sin + +/** + * OpenGL has a lot of boilerplate, because you have to deal with two different memory spaces, CPU and GPU + * Therefore it operates a lot like a state machine under the hood, hence why we need to explicitly define + * variables, bind those variables to memory in the gpu, activate/deactivate that memory, send/read data from + * the GPU, and release memory after we're done using it. In addition, a lot of these functions have terrible + * documentation, with vague naming, and unclear parameters. I would recommend just copying the function name + * and Ctrl + F [here](https://developer.android.com/reference/android/opengl/GLES20.html) + */ +class BackgroundBlurGPUProcessor(val context: Context) { + + companion object { + // Quad Coordinates (Full Screen) + private val QUAD_COORDS = floatArrayOf( + -1.0f, 1.0f, // Top Left + -1.0f, -1.0f, // Bottom Left + 1.0f, 1.0f, // Top Right + 1.0f, -1.0f // Bottom Right + ) + + // Texture Coordinates + private val TEX_COORDS = floatArrayOf( + 0.0f, 0.0f, // Top Left + 0.0f, 1.0f, // Bottom Left + 1.0f, 0.0f, // Top Right + 1.0f, 1.0f // Bottom Right + ) + } + + private var eglBase: EglBase? = null + + // Programs (Gpu Objects) for running gaussian blur + private var gaussianBlurProgramId: Int = 0 + private var maskingOperationProgramId: Int = 0 + + // Mask and original video frame identifiers to their objects on the gpu + private var maskInputTextureId: Int = 0 + private var frameInputTextureId: Int = 0 + + // FBO (Frame buffer object) these hold the temporary state of the blurring and masking between operations + // Each FBO is an identifier that is linked to an object in gpu memory, that can be read from the texture + // (another object in gpu memory, it's like a 2d Array) which is linked to each FBO + private var blurFBOAId: Int = 0 + private var blurFBOTextureAId: Int = 0 + + private var blurFBOBId: Int = 0 + private var blurFBOTextureBId: Int = 0 + + private var blurFBOCId: Int = 0 + private var blurFBOTextureCId: Int = 0 + + // These are linked to variables in the shader code + private var positionHandle: Int = 0 + private var texCoordHandle: Int = 0 + private var maskTextureUniformHandle: Int = 0 + private var frameTextureUniformHandle: Int = 0 + private var blurredTextureUniformHandle: Int = 0 + private var blurDirectionUniformHandle: Int = 0 + + // Shaders identifiers, reference their location in gpu memory + private var gaussianBlurShaderId: Int = 0 + private var maskingOpShaderId: Int = 0 + + // Geometry Buffers, tell the program how to orient the textures + private lateinit var vertexBuffer: FloatBuffer + private lateinit var texCoordBuffer: FloatBuffer + + // Output Buffer, this is what is returned from process() + private var cachedWidth = -1 + private var cachedHeight = -1 + private var pixelBuffer: ByteBuffer? = null + + fun init() { + // OpenGL needs some surface to draw onto, even if it's just a 1x1 pixel + eglBase = EglBase.create() + try { + eglBase?.createDummyPbufferSurface() + } catch (_: Exception) { + eglBase?.createPbufferSurface(1, 1) + } + eglBase?.makeCurrent() + + setupVertexAndTextureBuffer() + + // Compile Shaders and Link Program + val vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, R.raw.background_blur_vertex) + gaussianBlurShaderId = loadShader(GLES20.GL_FRAGMENT_SHADER, R.raw.gaussian_blur_frag_shader) + maskingOpShaderId = loadShader(GLES20.GL_FRAGMENT_SHADER, R.raw.seg_mask_frag_shader) + + setupGaussianBlurProgram(vertexShader) + + setupSegmentationMaskProgram(vertexShader) + + // Initialize handle variables to point to gpu memory + positionHandle = GLES20.glGetAttribLocation(gaussianBlurProgramId, "a_Position") + texCoordHandle = GLES20.glGetAttribLocation(gaussianBlurProgramId, "a_TexCoord") + + // Initialize the locations of the shader code uniform variables + maskTextureUniformHandle = GLES20.glGetUniformLocation(maskingOperationProgramId, "u_MaskTexture") + frameTextureUniformHandle = GLES20.glGetUniformLocation(maskingOperationProgramId, "u_FrameTexture") + blurredTextureUniformHandle = GLES20.glGetUniformLocation(gaussianBlurProgramId, "u_BlurredTexture") + blurDirectionUniformHandle = GLES20.glGetUniformLocation(gaussianBlurProgramId, "u_Direction") + + setupMaskInputTexture() + + setupFrameInputTexture() + } + + private fun setupVertexAndTextureBuffer() { + vertexBuffer = ByteBuffer + .allocateDirect(QUAD_COORDS.size * 4) + .order(ByteOrder.nativeOrder()) + .asFloatBuffer() + + vertexBuffer + .put(QUAD_COORDS) + .position(0) + + // Setup Texture Coordinate Buffer + texCoordBuffer = ByteBuffer + .allocateDirect(TEX_COORDS.size * 4) + .order(ByteOrder.nativeOrder()) + .asFloatBuffer() + + texCoordBuffer + .put(TEX_COORDS) + .position(0) + } + + // An OpenGL Program is like a compiled executable (a .exe file) + // glAttachShader is like adding a .c source file to a project + // glLinkProgram is the compiler that turns those files into the executable + // glUseProgram loads the executable + // glDrawArrays runs the executable + + private fun setupGaussianBlurProgram(vertexShader: Int) { + gaussianBlurProgramId = GLES20.glCreateProgram() + GLES20.glAttachShader(gaussianBlurProgramId, vertexShader) + GLES20.glAttachShader(gaussianBlurProgramId, gaussianBlurShaderId) + GLES20.glLinkProgram(gaussianBlurProgramId) + } + + private fun setupSegmentationMaskProgram(vertexShader: Int) { + maskingOperationProgramId = GLES20.glCreateProgram() + GLES20.glAttachShader(maskingOperationProgramId, vertexShader) + GLES20.glAttachShader(maskingOperationProgramId, maskingOpShaderId) + GLES20.glLinkProgram(maskingOperationProgramId) + } + + private fun setupFrameInputTexture() { + // Generate Frame Input Texture Holder + val frameTextures = IntArray(1) + GLES20.glGenTextures(1, frameTextures, 0) + frameInputTextureId = frameTextures[0] + + // Configure Input Texture parameters + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameInputTextureId) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE) + } + + private fun setupMaskInputTexture() { + // Generate Mask Input Texture Holder + val textures = IntArray(1) + GLES20.glGenTextures(1, textures, 0) + maskInputTextureId = textures[0] + + // Configure Input Texture parameters + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, maskInputTextureId) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE) + } + + /** + * This function takes in the selfie segmentation mask data, which is a matrix of int 0 or 1 + * and runs it through the GPU pipeline to process it according to the shader code. + * + * NOTE: The output byte array is 4x the size of the input, to represent 4 int RGBA values + * + * @param maskData The raw 0/1 byte array from MediaPipe + * @param width Width of the mask + * @param height Height of the mask + * + * @return RGBA ByteArray where pixels are either (0,0,0,255) or (255,255,255,255) + */ + fun process(maskData: ByteArray, frameBuffer: ByteBuffer, width: Int, height: Int, rotation: Float): ByteArray { + // Setup Framebuffer if dimensions changed + prepareFramebuffers(width, height) + + uploadMaskToTexture(maskData, width, height) + + uploadVideoFrameToTexture(frameBuffer, width, height) + + setUpVertexAndTextureCoordBuffers(rotation) + + GLES20.glViewport(0, 0, width, height) + + GLES20.glUseProgram(gaussianBlurProgramId) + + horizontalPass() + + verticalPass() + + maskingOperation(rotation) + + // We rewind the buffer to ensure we read from the start + pixelBuffer!!.rewind() + + // Read Pixels Back to CPU from FBO C Texture (since it's the last one to call bind frame buffer) + // This is a blocking operation as it waits for drawing to be completed before reading + // shouldn't be a problem as this code should live on the dedicated GPU Processing thread + GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer) + + cleanUpVertexAndTextureBuffers() + + return pixelBuffer!!.array() + } + + private fun horizontalPass() { + // glBindFrameBuffer sets dst to blurFBOA, glBindTexture sets src to original video + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blurFBOAId) + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameInputTextureId) + GLES20.glUniform2f(blurDirectionUniformHandle, 1.0f, 0.0f) + GLES20.glUniform1i(blurredTextureUniformHandle, 0) + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4) // render horizontal pass -> fbo a texture + } + + private fun verticalPass() { + // glBindFrameBuffer sets dst to blurFBOB, glBindTexture sets src to blurFBOTextureA + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blurFBOBId) + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, blurFBOTextureAId) + GLES20.glUniform2f(blurDirectionUniformHandle, 0.0f, 1.0f) + GLES20.glUniform1i(blurredTextureUniformHandle, 0) + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4) // render vertical pass -> fbo b texture + } + + private fun maskingOperation(rotation: Float) { + GLES20.glUseProgram(maskingOperationProgramId) + blurredTextureUniformHandle = GLES20.glGetUniformLocation(maskingOperationProgramId, "u_BlurredTexture") + + positionHandle = GLES20.glGetAttribLocation(maskingOperationProgramId, "a_Position") + texCoordHandle = GLES20.glGetAttribLocation(maskingOperationProgramId, "a_TexCoord") + + setUpVertexAndTextureCoordBuffers(rotation) + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, maskInputTextureId) + GLES20.glUniform1i(maskTextureUniformHandle, 0) // Mask to Unit 0 + + GLES20.glActiveTexture(GLES20.GL_TEXTURE1) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameInputTextureId) + GLES20.glUniform1i(frameTextureUniformHandle, 1) // Video Frame to Unit 1 + + // glBindFrameBuffer sets dst to blurFBOC, glBindTexture sets src to blurFBOTextureB + GLES20.glActiveTexture(GLES20.GL_TEXTURE2) + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blurFBOCId) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, blurFBOTextureBId) + GLES20.glUniform1i(blurredTextureUniformHandle, 2) + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4) // render final image -> fbo c texture + } + + private fun cleanUpVertexAndTextureBuffers() { + GLES20.glDisableVertexAttribArray(positionHandle) + GLES20.glDisableVertexAttribArray(texCoordHandle) + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0) // Unbind FBO to return to default display + } + + private fun setUpVertexAndTextureCoordBuffers(rotation: Float) { + GLES20.glEnableVertexAttribArray(positionHandle) + GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer) + + texCoordBuffer = getRotatedTexCoords(rotation) + + GLES20.glEnableVertexAttribArray(texCoordHandle) + GLES20.glVertexAttribPointer(texCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texCoordBuffer) + } + + // Basically first you move the coordinates to the center, + // Multiply x by the aspect ratio for a correct scale + // then you perform the multiplication operation, + // Divide X by ratio to return to original scale + // before moving them back to the origin + private fun getRotatedTexCoords(angleDegrees: Float): FloatBuffer { + val radians = Math.toRadians(angleDegrees.toDouble()) + val cosA = cos(radians).toFloat() + val sinA = sin(radians).toFloat() + val ratio = 4 / 3.0f + + // Texture center points + val cx = 0.5f + val cy = 0.5f + + val rotatedCoords = FloatArray(TEX_COORDS.size) + + for (i in TEX_COORDS.indices step 2) { + val u = TEX_COORDS[i] + val v = TEX_COORDS[i + 1] + + var translatedU = u - cx + val translatedV = v - cy + + translatedU *= ratio + // 2D matrix rotation + var rotatedU = translatedU * cosA - translatedV * sinA // [ cos(A) -sin(A) ] [ x ] [ x*cos(A) - y*sin(A) ] + val rotatedV = translatedU * sinA + translatedV * cosA // [ sin(A) cos(A) ] [ y ] = [ x*sin(A) + y*cos(A) ] + + rotatedU /= ratio + + rotatedCoords[i] = rotatedU + cx + rotatedCoords[i + 1] = rotatedV + cy + } + + return ByteBuffer.allocateDirect(rotatedCoords.size * 4) + .order(ByteOrder.nativeOrder()) + .asFloatBuffer() + .apply { + put(rotatedCoords) + position(0) + } + } + + private fun prepareFramebuffers(width: Int, height: Int) { + if (cachedWidth == width && cachedHeight == height && blurFBOAId != 0) { + return // Size hasn't changed, reuse FBO + } + + cachedWidth = width + cachedHeight = height + + // Cleanup old FBO/Texture if they exist + cleanUpOldState() + + // Create Frame Buffer Object A and Texture A - to hold horizontal gaussian blur pass + setUpFboTextureA(width, height) + setUpFboA() + + // Create Frame Buffer Object B and Texture B - to hold the final vertical + horizontal pass + setUpFboTextureB(width, height) + setUpFboB() + + // Create Frame Buffer Object C and Texture C - to hold the final output image after masking operation + setUpFboTextureC(width, height) + setUpFboC() + + // Check status + val status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) + if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) { + throw RuntimeException("Framebuffer invalid status: $status") + } + + // Allocate CPU buffer for reading result (4 bytes per pixel for RGBA) + pixelBuffer = ByteBuffer.allocate(width * height * 4) + } + + private fun uploadMaskToTexture(maskData: ByteArray, width: Int, height: Int) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, maskInputTextureId) + + val buffer = ByteBuffer.wrap(maskData) + // Note: Using GL_LUMINANCE for single channel byte input + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, + 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, buffer + ) + + GLES20.glUniform1i(maskTextureUniformHandle, 0) + } + + private fun uploadVideoFrameToTexture(frameBuffer: ByteBuffer, width: Int, height: Int) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE1) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameInputTextureId) + + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, + 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, frameBuffer + ) + + GLES20.glUniform1i(frameTextureUniformHandle, 1) + } + + private fun setUpFboA() { + val fbos = IntArray(1) + GLES20.glGenFramebuffers(1, fbos, 0) + blurFBOAId = fbos[0] + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blurFBOAId) + + // Attach texture to FBO + GLES20.glFramebufferTexture2D( + GLES20.GL_FRAMEBUFFER, + GLES20.GL_COLOR_ATTACHMENT0, + GLES20.GL_TEXTURE_2D, + blurFBOTextureAId, + 0 + ) + } + + private fun setUpFboTextureA(width: Int, height: Int) { + val texs = IntArray(1) + GLES20.glGenTextures(1, texs, 0) + blurFBOTextureAId = texs[0] + + GLES20.glActiveTexture(GLES20.GL_TEXTURE2) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, blurFBOTextureAId) + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, + 0, + GLES20.GL_RGBA, + width, + height, + 0, + GLES20.GL_RGBA, + GLES20.GL_UNSIGNED_BYTE, + null + ) + + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE) + } + + private fun setUpFboB() { + val fbos = IntArray(1) + GLES20.glGenFramebuffers(1, fbos, 0) + blurFBOBId = fbos[0] + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blurFBOBId) + + // Attach texture to FBO + GLES20.glFramebufferTexture2D( + GLES20.GL_FRAMEBUFFER, + GLES20.GL_COLOR_ATTACHMENT0, + GLES20.GL_TEXTURE_2D, + blurFBOTextureBId, + 0 + ) + } + + private fun setUpFboTextureB(width: Int, height: Int) { + val texs = IntArray(1) + GLES20.glGenTextures(1, texs, 0) + blurFBOTextureBId = texs[0] + + GLES20.glActiveTexture(GLES20.GL_TEXTURE3) + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, blurFBOTextureBId) + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, + 0, + GLES20.GL_RGBA, + width, + height, + 0, + GLES20.GL_RGBA, + GLES20.GL_UNSIGNED_BYTE, + null + ) + + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE) + } + + private fun setUpFboC() { + val fbos = IntArray(1) + GLES20.glGenFramebuffers(1, fbos, 0) + blurFBOCId = fbos[0] + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blurFBOCId) + + // Attach texture to FBO + GLES20.glFramebufferTexture2D( + GLES20.GL_FRAMEBUFFER, + GLES20.GL_COLOR_ATTACHMENT0, + GLES20.GL_TEXTURE_2D, + blurFBOTextureCId, + 0 + ) + } + + private fun setUpFboTextureC(width: Int, height: Int) { + val texs = IntArray(1) + GLES20.glGenTextures(1, texs, 0) + blurFBOTextureCId = texs[0] + + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, blurFBOTextureCId) + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, + 0, + GLES20.GL_RGBA, + width, + height, + 0, + GLES20.GL_RGBA, + GLES20.GL_UNSIGNED_BYTE, + null + ) + + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE) + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE) + } + + private fun cleanUpOldState() { + if (blurFBOAId != 0) { + val fbos = intArrayOf(blurFBOAId) + val texs = intArrayOf(blurFBOTextureAId) + GLES20.glDeleteFramebuffers(1, fbos, 0) + GLES20.glDeleteTextures(1, texs, 0) + } + + if (blurFBOBId != 0) { + val fbos = intArrayOf(blurFBOBId) + val texs = intArrayOf(blurFBOTextureBId) + GLES20.glDeleteFramebuffers(1, fbos, 0) + GLES20.glDeleteTextures(1, texs, 0) + } + + if (blurFBOCId != 0) { + val fbos = intArrayOf(blurFBOCId) + val texs = intArrayOf(blurFBOTextureCId) + GLES20.glDeleteFramebuffers(1, fbos, 0) + GLES20.glDeleteTextures(1, texs, 0) + } + } + + private fun loadShader(type: Int, id: Int): Int { + val input = context.resources.openRawResource(id) + val name = context.resources.getResourceName(id) + val shaderCode = input.bufferedReader().use { it.readText() } // defaults to UTF-8 + + val shader = GLES20.glCreateShader(type) + GLES20.glShaderSource(shader, shaderCode) + GLES20.glCompileShader(shader) + + // Compilation check (optional but recommended for debugging) + val compileStatus = IntArray(1) + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0) + if (compileStatus[0] == 0) { + val error = GLES20.glGetShaderInfoLog(shader) + GLES20.glDeleteShader(shader) + throw RuntimeException("Shader Compilation Error($name): $error") + } + + return shader + } + + fun release() { + if (eglBase != null) { + GLES20.glDeleteProgram(gaussianBlurProgramId) // this also deletes the shaders too + GLES20.glDeleteProgram(maskingOperationProgramId) + val textures = intArrayOf( + maskInputTextureId, + frameInputTextureId, + blurFBOTextureAId, + blurFBOTextureBId, + blurFBOTextureCId + ) + GLES20.glDeleteTextures(2, textures, 0) + val fbos = intArrayOf(blurFBOAId, blurFBOBId, blurFBOCId) + GLES20.glDeleteFramebuffers(1, fbos, 0) + + eglBase?.release() + eglBase = null + } + } +} diff --git a/app/src/main/java/com/nextcloud/talk/camera/BlurBackgroundViewModel.kt b/app/src/main/java/com/nextcloud/talk/camera/BlurBackgroundViewModel.kt new file mode 100644 index 00000000000..0d05225d04e --- /dev/null +++ b/app/src/main/java/com/nextcloud/talk/camera/BlurBackgroundViewModel.kt @@ -0,0 +1,38 @@ +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2025 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +package com.nextcloud.talk.camera + +import androidx.lifecycle.LiveData +import androidx.lifecycle.MutableLiveData +import androidx.lifecycle.ViewModel + +class BlurBackgroundViewModel : ViewModel() { + + sealed interface ViewState + + object BackgroundBlurOn : ViewState + object BackgroundBlurOff : ViewState + + private val _viewState: MutableLiveData = MutableLiveData(BackgroundBlurOff) + val viewState: LiveData + get() = _viewState + + fun toggleBackgroundBlur() { + val isOn = _viewState.value == BackgroundBlurOn + + if (isOn) { + _viewState.value = BackgroundBlurOff + } else { + _viewState.value = BackgroundBlurOn + } + } + + fun turnOffBlur() { + _viewState.value = BackgroundBlurOff + } +} diff --git a/app/src/main/java/com/nextcloud/talk/camera/ImageSegmenterHelper.kt b/app/src/main/java/com/nextcloud/talk/camera/ImageSegmenterHelper.kt new file mode 100644 index 00000000000..66401d54efb --- /dev/null +++ b/app/src/main/java/com/nextcloud/talk/camera/ImageSegmenterHelper.kt @@ -0,0 +1,161 @@ +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2025 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +package com.nextcloud.talk.camera + +import android.content.Context +import android.util.Log +import com.google.mediapipe.framework.image.ByteBufferExtractor +import com.google.mediapipe.framework.image.ByteBufferImageBuilder +import com.google.mediapipe.framework.image.MPImage +import com.google.mediapipe.tasks.core.BaseOptions +import com.google.mediapipe.tasks.core.Delegate +import com.google.mediapipe.tasks.vision.core.RunningMode +import com.google.mediapipe.tasks.vision.imagesegmenter.ImageSegmenter +import com.google.mediapipe.tasks.vision.imagesegmenter.ImageSegmenterResult +import java.nio.ByteBuffer + +class ImageSegmenterHelper(val context: Context, var imageSegmenterListener: SegmenterListener? = null) { + + private var imageSegmenter: ImageSegmenter? = null + + init { + setupImageSegmenter() + } + + // Segmenter must be closed when creating a new one to avoid returning results to a non-existent object + fun destroyImageSegmenter() { + imageSegmenter?.close() + imageSegmenter = null + } + + /** + * Initialize the image segmenter using current settings on the + * thread that is using it. CPU can be used with detectors + * that are created on the main thread and used on a background thread, but + * the GPU delegate needs to be used on the thread that initialized the + * segmenter + * + * @throws IllegalStateException + */ + fun setupImageSegmenter() { + val baseOptionsBuilder = BaseOptions.builder().apply { + setDelegate(Delegate.CPU) + setModelAssetPath(MODEL_SELFIE_SEGMENTER_PATH) + } + + if (imageSegmenterListener == null) { + throw IllegalStateException("ImageSegmenterListener must be set.") + } + + runCatching { + val baseOptions = baseOptionsBuilder.build() + val optionsBuilder = ImageSegmenter.ImageSegmenterOptions.builder() + .setRunningMode(RunningMode.LIVE_STREAM) + .setBaseOptions(baseOptions) + .setOutputCategoryMask(true) + .setOutputConfidenceMasks(false) + .setResultListener(this::returnSegmentationResult) + .setErrorListener(this::returnSegmentationHelperError) + + imageSegmenter = ImageSegmenter.createFromOptions(context, optionsBuilder.build()) + }.getOrElse { e -> + when (e) { + is IllegalStateException -> { + imageSegmenterListener?.onError( + "Image segmenter failed to initialize. See error logs for details" + ) + Log.e(TAG, "Image segmenter failed to load model with error: ${e.message}") + } + + is RuntimeException -> { + // This occurs if the model being used does not support GPU + imageSegmenterListener?.onError( + "Image segmenter failed to initialize. See error logs for details", + GPU_ERROR + ) + Log.e(TAG, "Image segmenter failed to load model with error: ${e.message}") + } + } + } + } + + /** + * Runs image segmentation on live streaming cameras frame-by-frame and + * returns the results asynchronously to the given `imageSegmenterListener` + * + * @throws IllegalArgumentException + */ + fun segmentFrame(byteBuffer: ByteBuffer, width: Int, height: Int, videoFrameTimeStamp: Long) { + val mpImage = ByteBufferImageBuilder(byteBuffer, width, height, MPImage.IMAGE_FORMAT_RGBA ).build() + + imageSegmenter?.segmentAsync(mpImage, videoFrameTimeStamp) + } + + // MPImage isn't necessary, but the listener requires it + private fun returnSegmentationResult(result: ImageSegmenterResult, image: MPImage) { + // We only need the first mask for this sample because we are using + // the OutputType CATEGORY_MASK, which only provides a single mask. + val mpImage = result.categoryMask().get() + + val mask = ByteBufferExtractor.extract(mpImage) + + val data = ByteArray(mask.capacity()) + + (mask.duplicate().rewind() as ByteBuffer).get(data) + + imageSegmenterListener?.onResults( + ResultBundle( + data, + result.timestampMs(), // videoFrameTimeStamp + mpImage.width, + mpImage.height + ) + ) + } + + // Return errors thrown during segmentation to this ImageSegmenterHelper's caller + private fun returnSegmentationHelperError(error: RuntimeException) { + imageSegmenterListener?.onError( + error.message ?: "An unknown error has occurred" + ) + } + + // Wraps results from inference, the time it takes for inference to be performed. + data class ResultBundle(val mask: ByteArray, val inferenceTime: Long, val width: Int, val height: Int) { + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + + other as ResultBundle + + if (inferenceTime != other.inferenceTime) return false + if (!mask.contentEquals(other.mask)) return false + + return true + } + + override fun hashCode(): Int { + var result = inferenceTime.hashCode() + result = 31 * result + mask.contentHashCode() + return result + } + } + + companion object { + const val OTHER_ERROR = 0 + const val GPU_ERROR = 1 + const val MODEL_SELFIE_SEGMENTER_PATH = "selfie_segmenter.tflite" + const val RGB_MAX = 255.0 + private const val TAG = "ImageSegmenterHelper" + } + + interface SegmenterListener { + fun onError(error: String, errorCode: Int = OTHER_ERROR) + fun onResults(resultBundle: ResultBundle) + } +} diff --git a/app/src/main/java/com/nextcloud/talk/ui/dialog/MoreCallActionsDialog.kt b/app/src/main/java/com/nextcloud/talk/ui/dialog/MoreCallActionsDialog.kt index 74e1d9306c0..4eea53d4712 100644 --- a/app/src/main/java/com/nextcloud/talk/ui/dialog/MoreCallActionsDialog.kt +++ b/app/src/main/java/com/nextcloud/talk/ui/dialog/MoreCallActionsDialog.kt @@ -18,6 +18,7 @@ import com.google.android.material.bottomsheet.BottomSheetDialog import com.nextcloud.talk.R import com.nextcloud.talk.activities.CallActivity import com.nextcloud.talk.application.NextcloudTalkApplication +import com.nextcloud.talk.camera.BlurBackgroundViewModel import com.nextcloud.talk.databinding.DialogMoreCallActionsBinding import com.nextcloud.talk.raisehand.viewmodel.RaiseHandViewModel import com.nextcloud.talk.ui.theme.ViewThemeUtils @@ -86,6 +87,10 @@ class MoreCallActionsDialog(private val callActivity: CallActivity) : BottomShee binding.raiseHand.setOnClickListener { callActivity.clickRaiseOrLowerHandButton() } + + binding.backgroundBlur.setOnClickListener { + callActivity.toggleBackgroundBlur() + } } private fun initEmojiBar() { @@ -181,6 +186,18 @@ class MoreCallActionsDialog(private val callActivity: CallActivity) : BottomShee else -> {} } } + + callActivity.blurBackgroundViewModel.viewState.observe(this) { state -> + when (state) { + BlurBackgroundViewModel.BackgroundBlurOff -> { + binding.backgroundBlurText.text = context.getText(R.string.turn_on_background_blur) + } + + BlurBackgroundViewModel.BackgroundBlurOn -> { + binding.backgroundBlurText.text = context.getText(R.string.turn_off_background_blur) + } + } + } } companion object { diff --git a/app/src/main/res/drawable/background_replace_24px.xml b/app/src/main/res/drawable/background_replace_24px.xml new file mode 100644 index 00000000000..fa19ae2398d --- /dev/null +++ b/app/src/main/res/drawable/background_replace_24px.xml @@ -0,0 +1,16 @@ + + + + \ No newline at end of file diff --git a/app/src/main/res/layout/dialog_more_call_actions.xml b/app/src/main/res/layout/dialog_more_call_actions.xml index 45240742c0f..6e4fc4ec11d 100644 --- a/app/src/main/res/layout/dialog_more_call_actions.xml +++ b/app/src/main/res/layout/dialog_more_call_actions.xml @@ -117,4 +117,38 @@ + + + + + + + + + diff --git a/app/src/main/res/raw/background_blur_vertex.glsl b/app/src/main/res/raw/background_blur_vertex.glsl new file mode 100644 index 00000000000..a678d7fcf37 --- /dev/null +++ b/app/src/main/res/raw/background_blur_vertex.glsl @@ -0,0 +1,17 @@ +#version 300 es +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2026 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +in vec4 a_Position; +in vec2 a_TexCoord; + +out vec2 v_TexCoord; + +void main() { + gl_Position = a_Position; + v_TexCoord = a_TexCoord; +} \ No newline at end of file diff --git a/app/src/main/res/raw/gaussian_blur_frag_shader.glsl b/app/src/main/res/raw/gaussian_blur_frag_shader.glsl new file mode 100644 index 00000000000..62401aad6f1 --- /dev/null +++ b/app/src/main/res/raw/gaussian_blur_frag_shader.glsl @@ -0,0 +1,89 @@ +#version 300 es +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2026 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +precision mediump float; + +uniform sampler2D u_BlurredTexture; +uniform vec2 u_Direction; + +in vec2 v_TexCoord; +out vec4 FragColor; + +// Hard coded Gaussian blur weights and offsets, obtained from +// https://lisyarus.github.io/blog/posts/blur-coefficients-generator.html +const int SAMPLE_COUNT = 19; + +const float OFFSETS[19] = float[19]( +-17.381076274832935, +-15.394230424364446, +-13.407541291721701, +-11.420990335232771, +-9.434557915309014, +-7.448223675960468, +-5.461967313484028, +-3.475769408144678, +-1.489609431487625, +0.4965349085037341, +2.4826862657413393, +4.468862236297167, +6.4550869992703435, +8.441379804986935, +10.427760570612987, +12.414249746021257, +14.400867399097198, +16.387632648451888, +18.0 +); + +const float WEIGHTS[19] = float[19]( +0.001960284463396841, +0.004861807629662949, +0.01080079750703472, +0.02149275164743333, +0.03830956291913094, +0.06116524317539527, +0.08747606946505149, +0.1120632810827934, +0.12859526767421955, +0.13218368064800906, +0.12170845606525565, +0.10038126557565077, +0.0741606178432157, +0.04907754925768548, +0.029092234724842715, +0.015447267260520894, +0.007346906112284092, +0.0031299390474930173, +0.0007470179009241477 +); + +// blurDirection is: +// vec2(1,0) for horizontal pass +// vec2(0,1) for vertical pass +// The sourceTexture is whats being blurred +// pixelCoord is in [0..1] +// +// You can read more about Gaussian Blurring here: +// https://www.intel.com/content/www/us/en/developer/articles/technical/an-investigation-of-fast-real-time-gpu-based-image-blur-algorithms.html +vec4 blur(in sampler2D sourceTexture, vec2 blurDirection, vec2 pixelCoord) +{ + vec4 result = vec4(0.0); + ivec2 isize = textureSize(sourceTexture, 0); + vec2 size = vec2(isize); + for (int i = 0; i < SAMPLE_COUNT; ++i) + { + vec2 offset = blurDirection * OFFSETS[i] / size; + float weight = WEIGHTS[i]; + result += texture(sourceTexture, pixelCoord + offset) * weight; + } + return result; +} + +void main() { + FragColor = blur(u_BlurredTexture, u_Direction, v_TexCoord); +} \ No newline at end of file diff --git a/app/src/main/res/raw/seg_mask_frag_shader.glsl b/app/src/main/res/raw/seg_mask_frag_shader.glsl new file mode 100644 index 00000000000..b174408f7f6 --- /dev/null +++ b/app/src/main/res/raw/seg_mask_frag_shader.glsl @@ -0,0 +1,43 @@ +#version 300 es +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2026 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +precision mediump float; + +uniform sampler2D u_FrameTexture; +uniform sampler2D u_BlurredTexture; +uniform sampler2D u_MaskTexture; + +in vec2 v_TexCoord; +out vec4 FragColor; + +bool checkNotInBounds(vec2 coords) { + return coords.x < 0.0 || coords.x > 1.0 || coords.y < 0.0 || coords.y > 1.0; +} + +void main() { + if (checkNotInBounds(v_TexCoord)) { + FragColor = vec4(0.0, 0.0, 0.0, 1.0); + return; + } + + // Since input is byte 1, OpenGL normalizes it to ~0.0039 (1/255) + // 0.0 -> 0.0 (Black) + // (1/255) -> 1.0 (White) + float maskValue = texture(u_MaskTexture, v_TexCoord).r * 255.0; + + vec4 blurredValue = texture(u_BlurredTexture, v_TexCoord); + + vec4 originalValue = texture(u_FrameTexture, v_TexCoord); + + // Only blur the white pixels + if (maskValue > 0.0) { + FragColor = blurredValue; + } else { + FragColor = originalValue; + } +} \ No newline at end of file diff --git a/app/src/main/res/raw/testing_frag_shader.glsl b/app/src/main/res/raw/testing_frag_shader.glsl new file mode 100644 index 00000000000..21227f636d1 --- /dev/null +++ b/app/src/main/res/raw/testing_frag_shader.glsl @@ -0,0 +1,18 @@ +#version 300 es +/* + * Nextcloud Talk - Android Client + * + * SPDX-FileCopyrightText: 2026 Julius Linus + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +precision mediump float; + +uniform sampler2D u_FrameTexture; + +in vec2 v_TexCoord; +out vec4 FragColor; + +void main() { + FragColor = texture(u_FrameTexture, v_TexCoord); +} \ No newline at end of file diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml index dc9685787aa..1a729b8ef68 100644 --- a/app/src/main/res/values/strings.xml +++ b/app/src/main/res/values/strings.xml @@ -941,4 +941,6 @@ How to translate with transifex: No connection to server - Scheduled messages could not be loaded Show app switcher Nextcloud app suggestions in account chooser dialog + Turn on background blur + Turn off background blur diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 406c7eab203..f2e0a44a8a9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -14,6 +14,13 @@ + + + + + + + @@ -21486,6 +21493,14 @@ + + + + + + + +