Add dual concurrent camera for devices that support it (#258)
* Refactor camera coroutine into separate session types for concurrent camera
Also sets concurrent camera to on if supported by device.
Refactors some camera utility code into its own file, CameraExt
* Replace LLB toggle with concurrent camera mode toggle
LLB toggle currently isn't hooked up, so use its slot for
concurrent camera toggle.
* Apply concurrent camera mode constraints
Disable UI components such as HDR and Capture mode toggle when
concurrent camera mode enabled
Show image/video toggle when concurrent camera mode enabled
* Only enable dual camera mode when device supports it
* Disable concurrent camera in image-only capture mode
* Fix VIDEO_CAPTURE_EXTERNAL_UNSUPPORTED_TAG String
* Ensure debug info only shows for primary camera in concurrent camera mode
* Fix screen flash events
Replaced unnecessary coroutine scope with a buffered channel.
This has equivalent functionality to the old implementation but
doesn't need an extra coroutine scope to send events.
Using a Channel rather than a SharedFlow is also more
representative of how these events should be handled, since
only a single consumer should handle each event.
Also ensured we're checking the lens facing direction for whether
we initially turn on front flash
diff --git a/core/camera/build.gradle.kts b/core/camera/build.gradle.kts
index 50d0b3b..cc471c3 100644
--- a/core/camera/build.gradle.kts
+++ b/core/camera/build.gradle.kts
@@ -84,6 +84,10 @@
kotlin {
jvmToolchain(17)
}
+
+ kotlinOptions {
+ freeCompilerArgs += "-Xcontext-receivers"
+ }
}
dependencies {
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraExt.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraExt.kt
new file mode 100644
index 0000000..df24af5
--- /dev/null
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraExt.kt
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.jetpackcamera.core.camera
+
+import android.annotation.SuppressLint
+import android.hardware.camera2.CameraCharacteristics
+import androidx.annotation.OptIn
+import androidx.camera.camera2.interop.Camera2CameraInfo
+import androidx.camera.camera2.interop.ExperimentalCamera2Interop
+import androidx.camera.core.CameraInfo
+import androidx.camera.core.CameraSelector
+import androidx.camera.core.DynamicRange as CXDynamicRange
+import androidx.camera.core.ExperimentalImageCaptureOutputFormat
+import androidx.camera.core.ImageCapture
+import androidx.camera.core.Preview
+import androidx.camera.core.UseCase
+import androidx.camera.core.UseCaseGroup
+import androidx.camera.video.Recorder
+import androidx.camera.video.VideoCapture
+import com.google.jetpackcamera.settings.model.DynamicRange
+import com.google.jetpackcamera.settings.model.ImageOutputFormat
+import com.google.jetpackcamera.settings.model.LensFacing
+
+val CameraInfo.appLensFacing: LensFacing
+ get() = when (this.lensFacing) {
+ CameraSelector.LENS_FACING_FRONT -> LensFacing.FRONT
+ CameraSelector.LENS_FACING_BACK -> LensFacing.BACK
+ else -> throw IllegalArgumentException(
+ "Unknown CameraSelector.LensFacing -> LensFacing mapping. " +
+ "[CameraSelector.LensFacing: ${this.lensFacing}]"
+ )
+ }
+
+fun CXDynamicRange.toSupportedAppDynamicRange(): DynamicRange? {
+ return when (this) {
+ CXDynamicRange.SDR -> DynamicRange.SDR
+ CXDynamicRange.HLG_10_BIT -> DynamicRange.HLG10
+ // All other dynamic ranges unsupported. Return null.
+ else -> null
+ }
+}
+
+fun DynamicRange.toCXDynamicRange(): CXDynamicRange {
+ return when (this) {
+ com.google.jetpackcamera.settings.model.DynamicRange.SDR -> CXDynamicRange.SDR
+ com.google.jetpackcamera.settings.model.DynamicRange.HLG10 -> CXDynamicRange.HLG_10_BIT
+ }
+}
+
+fun LensFacing.toCameraSelector(): CameraSelector = when (this) {
+ LensFacing.FRONT -> CameraSelector.DEFAULT_FRONT_CAMERA
+ LensFacing.BACK -> CameraSelector.DEFAULT_BACK_CAMERA
+}
+
+@SuppressLint("RestrictedApi")
+fun CameraSelector.toAppLensFacing(): LensFacing = when (this.lensFacing) {
+ CameraSelector.LENS_FACING_FRONT -> LensFacing.FRONT
+ CameraSelector.LENS_FACING_BACK -> LensFacing.BACK
+ else -> throw IllegalArgumentException(
+ "Unknown CameraSelector -> LensFacing mapping. [CameraSelector: $this]"
+ )
+}
+
+val CameraInfo.sensorLandscapeRatio: Float
+ @OptIn(ExperimentalCamera2Interop::class)
+ get() = Camera2CameraInfo.from(this)
+ .getCameraCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)
+ ?.let { sensorRect ->
+ if (sensorRect.width() > sensorRect.height()) {
+ sensorRect.width().toFloat() / sensorRect.height()
+ } else {
+ sensorRect.height().toFloat() / sensorRect.width()
+ }
+ } ?: Float.NaN
+
+@OptIn(ExperimentalImageCaptureOutputFormat::class)
+fun Int.toAppImageFormat(): ImageOutputFormat? {
+ return when (this) {
+ ImageCapture.OUTPUT_FORMAT_JPEG -> ImageOutputFormat.JPEG
+ ImageCapture.OUTPUT_FORMAT_JPEG_ULTRA_HDR -> ImageOutputFormat.JPEG_ULTRA_HDR
+ // All other output formats unsupported. Return null.
+ else -> null
+ }
+}
+
+/**
+ * Checks if preview stabilization is supported by the device.
+ *
+ */
+val CameraInfo.isPreviewStabilizationSupported: Boolean
+ get() = Preview.getPreviewCapabilities(this).isStabilizationSupported
+
+/**
+ * Checks if video stabilization is supported by the device.
+ *
+ */
+val CameraInfo.isVideoStabilizationSupported: Boolean
+ get() = Recorder.getVideoCapabilities(this).isStabilizationSupported
+
+fun CameraInfo.filterSupportedFixedFrameRates(desired: Set<Int>): Set<Int> {
+ return buildSet {
+ this@filterSupportedFixedFrameRates.supportedFrameRateRanges.forEach { e ->
+ if (e.upper == e.lower && desired.contains(e.upper)) {
+ add(e.upper)
+ }
+ }
+ }
+}
+
+val CameraInfo.supportedImageFormats: Set<ImageOutputFormat>
+ @OptIn(ExperimentalImageCaptureOutputFormat::class)
+ get() = ImageCapture.getImageCaptureCapabilities(this).supportedOutputFormats
+ .mapNotNull(Int::toAppImageFormat)
+ .toSet()
+
+fun UseCaseGroup.getVideoCapture() = getUseCaseOrNull<VideoCapture<Recorder>>()
+fun UseCaseGroup.getImageCapture() = getUseCaseOrNull<ImageCapture>()
+
+private inline fun <reified T : UseCase> UseCaseGroup.getUseCaseOrNull(): T? {
+ return useCases.filterIsInstance<T>().singleOrNull()
+}
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt
new file mode 100644
index 0000000..fbed566
--- /dev/null
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt
@@ -0,0 +1,728 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.jetpackcamera.core.camera
+
+import android.Manifest
+import android.content.ContentValues
+import android.content.Context
+import android.content.pm.PackageManager
+import android.hardware.camera2.CameraCaptureSession
+import android.hardware.camera2.CaptureRequest
+import android.hardware.camera2.CaptureResult
+import android.hardware.camera2.TotalCaptureResult
+import android.net.Uri
+import android.os.Build
+import android.os.SystemClock
+import android.provider.MediaStore
+import android.util.Log
+import android.util.Range
+import androidx.annotation.OptIn
+import androidx.camera.camera2.interop.Camera2CameraInfo
+import androidx.camera.camera2.interop.Camera2Interop
+import androidx.camera.camera2.interop.ExperimentalCamera2Interop
+import androidx.camera.core.Camera
+import androidx.camera.core.CameraControl
+import androidx.camera.core.CameraEffect
+import androidx.camera.core.CameraInfo
+import androidx.camera.core.CameraSelector
+import androidx.camera.core.ExperimentalImageCaptureOutputFormat
+import androidx.camera.core.FocusMeteringAction
+import androidx.camera.core.ImageCapture
+import androidx.camera.core.Preview
+import androidx.camera.core.SurfaceOrientedMeteringPointFactory
+import androidx.camera.core.TorchState
+import androidx.camera.core.UseCaseGroup
+import androidx.camera.core.ViewPort
+import androidx.camera.core.resolutionselector.AspectRatioStrategy
+import androidx.camera.core.resolutionselector.ResolutionSelector
+import androidx.camera.video.FileOutputOptions
+import androidx.camera.video.MediaStoreOutputOptions
+import androidx.camera.video.Recorder
+import androidx.camera.video.Recording
+import androidx.camera.video.VideoCapture
+import androidx.camera.video.VideoRecordEvent
+import androidx.camera.video.VideoRecordEvent.Finalize.ERROR_NONE
+import androidx.concurrent.futures.await
+import androidx.core.content.ContextCompat
+import androidx.core.content.ContextCompat.checkSelfPermission
+import androidx.lifecycle.asFlow
+import com.google.jetpackcamera.core.camera.effects.SingleSurfaceForcingEffect
+import com.google.jetpackcamera.settings.model.AspectRatio
+import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.DeviceRotation
+import com.google.jetpackcamera.settings.model.DynamicRange
+import com.google.jetpackcamera.settings.model.FlashMode
+import com.google.jetpackcamera.settings.model.ImageOutputFormat
+import com.google.jetpackcamera.settings.model.LensFacing
+import com.google.jetpackcamera.settings.model.Stabilization
+import java.io.File
+import java.util.Date
+import java.util.concurrent.Executor
+import kotlin.coroutines.ContinuationInterceptor
+import kotlin.math.abs
+import kotlinx.atomicfu.atomic
+import kotlinx.coroutines.CoroutineDispatcher
+import kotlinx.coroutines.CoroutineStart
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.asExecutor
+import kotlinx.coroutines.coroutineScope
+import kotlinx.coroutines.currentCoroutineContext
+import kotlinx.coroutines.flow.StateFlow
+import kotlinx.coroutines.flow.collectLatest
+import kotlinx.coroutines.flow.filterNotNull
+import kotlinx.coroutines.flow.first
+import kotlinx.coroutines.flow.map
+import kotlinx.coroutines.flow.onCompletion
+import kotlinx.coroutines.flow.update
+import kotlinx.coroutines.launch
+
+private const val TAG = "CameraSession"
+
+context(CameraSessionContext)
+internal suspend fun runSingleCameraSession(
+ sessionSettings: PerpetualSessionSettings.SingleCamera,
+ useCaseMode: CameraUseCase.UseCaseMode,
+ // TODO(tm): ImageCapture should go through an event channel like VideoCapture
+ onImageCaptureCreated: (ImageCapture) -> Unit = {}
+) = coroutineScope {
+ val lensFacing = sessionSettings.cameraInfo.appLensFacing
+ Log.d(TAG, "Starting new single camera session for $lensFacing")
+
+ val initialTransientSettings = transientSettings
+ .filterNotNull()
+ .first()
+
+ val useCaseGroup = createUseCaseGroup(
+ cameraInfo = sessionSettings.cameraInfo,
+ initialTransientSettings = initialTransientSettings,
+ stabilizePreviewMode = sessionSettings.stabilizePreviewMode,
+ stabilizeVideoMode = sessionSettings.stabilizeVideoMode,
+ aspectRatio = sessionSettings.aspectRatio,
+ targetFrameRate = sessionSettings.targetFrameRate,
+ dynamicRange = sessionSettings.dynamicRange,
+ imageFormat = sessionSettings.imageFormat,
+ useCaseMode = useCaseMode,
+ effect = when (sessionSettings.captureMode) {
+ CaptureMode.SINGLE_STREAM -> SingleSurfaceForcingEffect(this@coroutineScope)
+ CaptureMode.MULTI_STREAM -> null
+ }
+ ).apply {
+ getImageCapture()?.let(onImageCaptureCreated)
+ }
+
+ cameraProvider.runWith(sessionSettings.cameraInfo.cameraSelector, useCaseGroup) { camera ->
+ Log.d(TAG, "Camera session started")
+
+ launch {
+ processFocusMeteringEvents(camera.cameraControl)
+ }
+
+ launch {
+ processVideoControlEvents(
+ camera,
+ useCaseGroup.getVideoCapture(),
+ captureTypeSuffix = when (sessionSettings.captureMode) {
+ CaptureMode.MULTI_STREAM -> "MultiStream"
+ CaptureMode.SINGLE_STREAM -> "SingleStream"
+ }
+ )
+ }
+
+ launch {
+ camera.cameraInfo.torchState.asFlow().collectLatest { torchState ->
+ currentCameraState.update { old ->
+ old.copy(torchEnabled = torchState == TorchState.ON)
+ }
+ }
+ }
+
+ applyDeviceRotation(initialTransientSettings.deviceRotation, useCaseGroup)
+ processTransientSettingEvents(
+ camera,
+ useCaseGroup,
+ initialTransientSettings,
+ transientSettings
+ )
+ }
+}
+
+context(CameraSessionContext)
+internal suspend fun processTransientSettingEvents(
+ camera: Camera,
+ useCaseGroup: UseCaseGroup,
+ initialTransientSettings: TransientSessionSettings,
+ transientSettings: StateFlow<TransientSessionSettings?>
+) {
+ var prevTransientSettings = initialTransientSettings
+ transientSettings.filterNotNull().collectLatest { newTransientSettings ->
+ // Apply camera control settings
+ if (prevTransientSettings.zoomScale != newTransientSettings.zoomScale) {
+ camera.cameraInfo.zoomState.value?.let { zoomState ->
+ val finalScale =
+ (zoomState.zoomRatio * newTransientSettings.zoomScale).coerceIn(
+ zoomState.minZoomRatio,
+ zoomState.maxZoomRatio
+ )
+ camera.cameraControl.setZoomRatio(finalScale)
+ currentCameraState.update { old ->
+ old.copy(zoomScale = finalScale)
+ }
+ }
+ }
+
+ useCaseGroup.getImageCapture()?.let { imageCapture ->
+ if (prevTransientSettings.flashMode != newTransientSettings.flashMode) {
+ setFlashModeInternal(
+ imageCapture = imageCapture,
+ flashMode = newTransientSettings.flashMode,
+ isFrontFacing = camera.cameraInfo.appLensFacing == LensFacing.FRONT
+ )
+ }
+ }
+
+ if (prevTransientSettings.deviceRotation
+ != newTransientSettings.deviceRotation
+ ) {
+ Log.d(
+ TAG,
+ "Updating device rotation from " +
+ "${prevTransientSettings.deviceRotation} -> " +
+ "${newTransientSettings.deviceRotation}"
+ )
+ applyDeviceRotation(newTransientSettings.deviceRotation, useCaseGroup)
+ }
+
+ prevTransientSettings = newTransientSettings
+ }
+}
+
+internal fun applyDeviceRotation(deviceRotation: DeviceRotation, useCaseGroup: UseCaseGroup) {
+ val targetRotation = deviceRotation.toUiSurfaceRotation()
+ useCaseGroup.useCases.forEach {
+ when (it) {
+ is Preview -> {
+ // Preview's target rotation should not be updated with device rotation.
+ // Instead, preview rotation should match the display rotation.
+ // When Preview is created, it is initialized with the display rotation.
+ // This will need to be updated separately if the display rotation is not
+ // locked. Currently the app is locked to portrait orientation.
+ }
+
+ is ImageCapture -> {
+ it.targetRotation = targetRotation
+ }
+
+ is VideoCapture<*> -> {
+ it.targetRotation = targetRotation
+ }
+ }
+ }
+}
+
+context(CameraSessionContext)
+internal fun createUseCaseGroup(
+ cameraInfo: CameraInfo,
+ initialTransientSettings: TransientSessionSettings,
+ stabilizePreviewMode: Stabilization,
+ stabilizeVideoMode: Stabilization,
+ aspectRatio: AspectRatio,
+ targetFrameRate: Int,
+ dynamicRange: DynamicRange,
+ imageFormat: ImageOutputFormat,
+ useCaseMode: CameraUseCase.UseCaseMode,
+ effect: CameraEffect? = null
+): UseCaseGroup {
+ val previewUseCase =
+ createPreviewUseCase(
+ cameraInfo,
+ aspectRatio,
+ stabilizePreviewMode
+ )
+ val imageCaptureUseCase = if (useCaseMode != CameraUseCase.UseCaseMode.VIDEO_ONLY) {
+ createImageUseCase(cameraInfo, aspectRatio, dynamicRange, imageFormat)
+ } else {
+ null
+ }
+ val videoCaptureUseCase = if (useCaseMode != CameraUseCase.UseCaseMode.IMAGE_ONLY) {
+ createVideoUseCase(
+ cameraInfo,
+ aspectRatio,
+ targetFrameRate,
+ stabilizeVideoMode,
+ dynamicRange,
+ backgroundDispatcher
+ )
+ } else {
+ null
+ }
+
+ imageCaptureUseCase?.let {
+ setFlashModeInternal(
+ imageCapture = imageCaptureUseCase,
+ flashMode = initialTransientSettings.flashMode,
+ isFrontFacing = cameraInfo.appLensFacing == LensFacing.FRONT
+ )
+ }
+
+ return UseCaseGroup.Builder().apply {
+ Log.d(
+ TAG,
+ "Setting initial device rotation to ${initialTransientSettings.deviceRotation}"
+ )
+ setViewPort(
+ ViewPort.Builder(
+ aspectRatio.ratio,
+ // Initialize rotation to Preview's rotation, which comes from Display rotation
+ previewUseCase.targetRotation
+ ).build()
+ )
+ addUseCase(previewUseCase)
+ imageCaptureUseCase?.let {
+ if (dynamicRange == DynamicRange.SDR ||
+ imageFormat == ImageOutputFormat.JPEG_ULTRA_HDR
+ ) {
+ addUseCase(imageCaptureUseCase)
+ }
+ }
+
+ // Not to bind VideoCapture when Ultra HDR is enabled to keep the app design simple.
+ videoCaptureUseCase?.let {
+ if (imageFormat == ImageOutputFormat.JPEG) {
+ addUseCase(videoCaptureUseCase)
+ }
+ }
+
+ effect?.let { addEffect(it) }
+ }.build()
+}
+
+@OptIn(ExperimentalImageCaptureOutputFormat::class)
+private fun createImageUseCase(
+ cameraInfo: CameraInfo,
+ aspectRatio: AspectRatio,
+ dynamicRange: DynamicRange,
+ imageFormat: ImageOutputFormat
+): ImageCapture {
+ val builder = ImageCapture.Builder()
+ builder.setResolutionSelector(
+ getResolutionSelector(cameraInfo.sensorLandscapeRatio, aspectRatio)
+ )
+ if (dynamicRange != DynamicRange.SDR && imageFormat == ImageOutputFormat.JPEG_ULTRA_HDR
+ ) {
+ builder.setOutputFormat(ImageCapture.OUTPUT_FORMAT_JPEG_ULTRA_HDR)
+ }
+ return builder.build()
+}
+
+private fun createVideoUseCase(
+ cameraInfo: CameraInfo,
+ aspectRatio: AspectRatio,
+ targetFrameRate: Int,
+ stabilizeVideoMode: Stabilization,
+ dynamicRange: DynamicRange,
+ backgroundDispatcher: CoroutineDispatcher
+): VideoCapture<Recorder> {
+ val sensorLandscapeRatio = cameraInfo.sensorLandscapeRatio
+ val recorder = Recorder.Builder()
+ .setAspectRatio(
+ getAspectRatioForUseCase(sensorLandscapeRatio, aspectRatio)
+ )
+ .setExecutor(backgroundDispatcher.asExecutor()).build()
+ return VideoCapture.Builder(recorder).apply {
+ // set video stabilization
+ if (stabilizeVideoMode == Stabilization.ON) {
+ setVideoStabilizationEnabled(true)
+ }
+ // set target fps
+ if (targetFrameRate != TARGET_FPS_AUTO) {
+ setTargetFrameRate(Range(targetFrameRate, targetFrameRate))
+ }
+
+ setDynamicRange(dynamicRange.toCXDynamicRange())
+ }.build()
+}
+
+private fun getAspectRatioForUseCase(sensorLandscapeRatio: Float, aspectRatio: AspectRatio): Int {
+ return when (aspectRatio) {
+ AspectRatio.THREE_FOUR -> androidx.camera.core.AspectRatio.RATIO_4_3
+ AspectRatio.NINE_SIXTEEN -> androidx.camera.core.AspectRatio.RATIO_16_9
+ else -> {
+ // Choose the aspect ratio which maximizes FOV by being closest to the sensor ratio
+ if (
+ abs(sensorLandscapeRatio - AspectRatio.NINE_SIXTEEN.landscapeRatio.toFloat()) <
+ abs(sensorLandscapeRatio - AspectRatio.THREE_FOUR.landscapeRatio.toFloat())
+ ) {
+ androidx.camera.core.AspectRatio.RATIO_16_9
+ } else {
+ androidx.camera.core.AspectRatio.RATIO_4_3
+ }
+ }
+ }
+}
+
+context(CameraSessionContext)
+private fun createPreviewUseCase(
+ cameraInfo: CameraInfo,
+ aspectRatio: AspectRatio,
+ stabilizePreviewMode: Stabilization
+): Preview = Preview.Builder().apply {
+ updateCameraStateWithCaptureResults(targetCameraInfo = cameraInfo)
+
+ // set preview stabilization
+ if (stabilizePreviewMode == Stabilization.ON) {
+ setPreviewStabilizationEnabled(true)
+ }
+
+ setResolutionSelector(
+ getResolutionSelector(cameraInfo.sensorLandscapeRatio, aspectRatio)
+ )
+}.build()
+ .apply {
+ setSurfaceProvider { surfaceRequest ->
+ surfaceRequests.update { surfaceRequest }
+ }
+ }
+
+private fun getResolutionSelector(
+ sensorLandscapeRatio: Float,
+ aspectRatio: AspectRatio
+): ResolutionSelector {
+ val aspectRatioStrategy = when (aspectRatio) {
+ AspectRatio.THREE_FOUR -> AspectRatioStrategy.RATIO_4_3_FALLBACK_AUTO_STRATEGY
+ AspectRatio.NINE_SIXTEEN -> AspectRatioStrategy.RATIO_16_9_FALLBACK_AUTO_STRATEGY
+ else -> {
+ // Choose the resolution selector strategy which maximizes FOV by being closest
+ // to the sensor aspect ratio
+ if (
+ abs(sensorLandscapeRatio - AspectRatio.NINE_SIXTEEN.landscapeRatio.toFloat()) <
+ abs(sensorLandscapeRatio - AspectRatio.THREE_FOUR.landscapeRatio.toFloat())
+ ) {
+ AspectRatioStrategy.RATIO_16_9_FALLBACK_AUTO_STRATEGY
+ } else {
+ AspectRatioStrategy.RATIO_4_3_FALLBACK_AUTO_STRATEGY
+ }
+ }
+ }
+ return ResolutionSelector.Builder().setAspectRatioStrategy(aspectRatioStrategy).build()
+}
+
+context(CameraSessionContext)
+private fun setFlashModeInternal(
+ imageCapture: ImageCapture,
+ flashMode: FlashMode,
+ isFrontFacing: Boolean
+) {
+ val isScreenFlashRequired =
+ isFrontFacing && (flashMode == FlashMode.ON || flashMode == FlashMode.AUTO)
+
+ if (isScreenFlashRequired) {
+ imageCapture.screenFlash = object : ImageCapture.ScreenFlash {
+ override fun apply(
+ expirationTimeMillis: Long,
+ listener: ImageCapture.ScreenFlashListener
+ ) {
+ Log.d(TAG, "ImageCapture.ScreenFlash: apply")
+ screenFlashEvents.trySend(
+ CameraUseCase.ScreenFlashEvent(CameraUseCase.ScreenFlashEvent.Type.APPLY_UI) {
+ listener.onCompleted()
+ }
+ )
+ }
+
+ override fun clear() {
+ Log.d(TAG, "ImageCapture.ScreenFlash: clear")
+ screenFlashEvents.trySend(
+ CameraUseCase.ScreenFlashEvent(CameraUseCase.ScreenFlashEvent.Type.CLEAR_UI) {}
+ )
+ }
+ }
+ }
+
+ imageCapture.flashMode = when (flashMode) {
+ FlashMode.OFF -> ImageCapture.FLASH_MODE_OFF // 2
+
+ FlashMode.ON -> if (isScreenFlashRequired) {
+ ImageCapture.FLASH_MODE_SCREEN // 3
+ } else {
+ ImageCapture.FLASH_MODE_ON // 1
+ }
+
+ FlashMode.AUTO -> if (isScreenFlashRequired) {
+ ImageCapture.FLASH_MODE_SCREEN // 3
+ } else {
+ ImageCapture.FLASH_MODE_AUTO // 0
+ }
+ }
+ Log.d(TAG, "Set flash mode to: ${imageCapture.flashMode}")
+}
+
+private suspend fun startVideoRecordingInternal(
+ initialMuted: Boolean,
+ videoCaptureUseCase: VideoCapture<Recorder>,
+ captureTypeSuffix: String,
+ context: Context,
+ videoCaptureUri: Uri?,
+ shouldUseUri: Boolean,
+ onVideoRecord: (CameraUseCase.OnVideoRecordEvent) -> Unit
+): Recording {
+ Log.d(TAG, "recordVideo")
+ // todo(b/336886716): default setting to enable or disable audio when permission is granted
+
+ // ok. there is a difference between MUTING and ENABLING audio
+ // audio must be enabled in order to be muted
+ // if the video recording isnt started with audio enabled, you will not be able to unmute it
+ // the toggle should only affect whether or not the audio is muted.
+ // the permission will determine whether or not the audio is enabled.
+ val audioEnabled = checkSelfPermission(
+ context,
+ Manifest.permission.RECORD_AUDIO
+ ) == PackageManager.PERMISSION_GRANTED
+
+ val pendingRecord = if (shouldUseUri) {
+ val fileOutputOptions = FileOutputOptions.Builder(
+ File(videoCaptureUri!!.path!!)
+ ).build()
+ videoCaptureUseCase.output.prepareRecording(context, fileOutputOptions)
+ } else {
+ val name = "JCA-recording-${Date()}-$captureTypeSuffix.mp4"
+ val contentValues =
+ ContentValues().apply {
+ put(MediaStore.Video.Media.DISPLAY_NAME, name)
+ }
+ val mediaStoreOutput =
+ MediaStoreOutputOptions.Builder(
+ context.contentResolver,
+ MediaStore.Video.Media.EXTERNAL_CONTENT_URI
+ )
+ .setContentValues(contentValues)
+ .build()
+ videoCaptureUseCase.output.prepareRecording(context, mediaStoreOutput)
+ }
+ pendingRecord.apply {
+ if (audioEnabled) {
+ withAudioEnabled()
+ }
+ }
+ val callbackExecutor: Executor =
+ (
+ currentCoroutineContext()[ContinuationInterceptor] as?
+ CoroutineDispatcher
+ )?.asExecutor() ?: ContextCompat.getMainExecutor(context)
+ return pendingRecord.start(callbackExecutor) { onVideoRecordEvent ->
+ Log.d(TAG, onVideoRecordEvent.toString())
+ when (onVideoRecordEvent) {
+ is VideoRecordEvent.Finalize -> {
+ when (onVideoRecordEvent.error) {
+ ERROR_NONE ->
+ onVideoRecord(
+ CameraUseCase.OnVideoRecordEvent.OnVideoRecorded(
+ onVideoRecordEvent.outputResults.outputUri
+ )
+ )
+
+ else ->
+ onVideoRecord(
+ CameraUseCase.OnVideoRecordEvent.OnVideoRecordError(
+ onVideoRecordEvent.cause
+ )
+ )
+ }
+ }
+
+ is VideoRecordEvent.Status -> {
+ onVideoRecord(
+ CameraUseCase.OnVideoRecordEvent.OnVideoRecordStatus(
+ onVideoRecordEvent.recordingStats.audioStats
+ .audioAmplitude
+ )
+ )
+ }
+ }
+ }.apply {
+ mute(initialMuted)
+ }
+}
+
+private suspend fun runVideoRecording(
+ camera: Camera,
+ videoCapture: VideoCapture<Recorder>,
+ captureTypeSuffix: String,
+ context: Context,
+ transientSettings: StateFlow<TransientSessionSettings?>,
+ videoCaptureUri: Uri?,
+ shouldUseUri: Boolean,
+ onVideoRecord: (CameraUseCase.OnVideoRecordEvent) -> Unit
+) {
+ var currentSettings = transientSettings.filterNotNull().first()
+
+ startVideoRecordingInternal(
+ initialMuted = currentSettings.audioMuted,
+ videoCapture,
+ captureTypeSuffix,
+ context,
+ videoCaptureUri,
+ shouldUseUri,
+ onVideoRecord
+ ).use { recording ->
+
+ fun TransientSessionSettings.isFlashModeOn() = flashMode == FlashMode.ON
+ val isFrontCameraSelector =
+ camera.cameraInfo.cameraSelector == CameraSelector.DEFAULT_FRONT_CAMERA
+
+ if (currentSettings.isFlashModeOn()) {
+ if (!isFrontCameraSelector) {
+ camera.cameraControl.enableTorch(true).await()
+ } else {
+ Log.d(TAG, "Unable to enable torch for front camera.")
+ }
+ }
+
+ transientSettings.filterNotNull()
+ .onCompletion {
+ // Could do some fancier tracking of whether the torch was enabled before
+ // calling this.
+ camera.cameraControl.enableTorch(false)
+ }
+ .collectLatest { newTransientSettings ->
+ if (currentSettings.audioMuted != newTransientSettings.audioMuted) {
+ recording.mute(newTransientSettings.audioMuted)
+ }
+ if (currentSettings.isFlashModeOn() != newTransientSettings.isFlashModeOn()) {
+ if (!isFrontCameraSelector) {
+ camera.cameraControl.enableTorch(newTransientSettings.isFlashModeOn())
+ } else {
+ Log.d(TAG, "Unable to update torch for front camera.")
+ }
+ }
+ currentSettings = newTransientSettings
+ }
+ }
+}
+
+context(CameraSessionContext)
+internal suspend fun processFocusMeteringEvents(cameraControl: CameraControl) {
+ surfaceRequests.map { surfaceRequest ->
+ surfaceRequest?.resolution?.run {
+ Log.d(
+ TAG,
+ "Waiting to process focus points for surface with resolution: " +
+ "$width x $height"
+ )
+ SurfaceOrientedMeteringPointFactory(width.toFloat(), height.toFloat())
+ }
+ }.collectLatest { meteringPointFactory ->
+ for (event in focusMeteringEvents) {
+ meteringPointFactory?.apply {
+ Log.d(TAG, "tapToFocus, processing event: $event")
+ val meteringPoint = createPoint(event.x, event.y)
+ val action = FocusMeteringAction.Builder(meteringPoint).build()
+ cameraControl.startFocusAndMetering(action)
+ } ?: run {
+ Log.w(TAG, "Ignoring event due to no SurfaceRequest: $event")
+ }
+ }
+ }
+}
+
+context(CameraSessionContext)
+internal suspend fun processVideoControlEvents(
+ camera: Camera,
+ videoCapture: VideoCapture<Recorder>?,
+ captureTypeSuffix: String
+) = coroutineScope {
+ var recordingJob: Job? = null
+
+ for (event in videoCaptureControlEvents) {
+ when (event) {
+ is VideoCaptureControlEvent.StartRecordingEvent -> {
+ if (videoCapture == null) {
+ throw RuntimeException(
+ "Attempted video recording with null videoCapture"
+ )
+ }
+
+ recordingJob = launch(start = CoroutineStart.UNDISPATCHED) {
+ runVideoRecording(
+ camera,
+ videoCapture,
+ captureTypeSuffix,
+ context,
+ transientSettings,
+ event.videoCaptureUri,
+ event.shouldUseUri,
+ event.onVideoRecord
+ )
+ }
+ }
+
+ VideoCaptureControlEvent.StopRecordingEvent -> {
+ recordingJob?.cancel()
+ recordingJob = null
+ }
+ }
+ }
+}
+
+/**
+ * Applies a CaptureCallback to the provided image capture builder
+ */
+context(CameraSessionContext)
+@OptIn(ExperimentalCamera2Interop::class)
+private fun Preview.Builder.updateCameraStateWithCaptureResults(
+ targetCameraInfo: CameraInfo
+): Preview.Builder {
+ val isFirstFrameTimestampUpdated = atomic(false)
+ val targetCameraLogicalId = Camera2CameraInfo.from(targetCameraInfo).cameraId
+ Camera2Interop.Extender(this).setSessionCaptureCallback(
+ object : CameraCaptureSession.CaptureCallback() {
+ override fun onCaptureCompleted(
+ session: CameraCaptureSession,
+ request: CaptureRequest,
+ result: TotalCaptureResult
+ ) {
+ super.onCaptureCompleted(session, request, result)
+ val logicalCameraId = session.device.id
+ if (logicalCameraId != targetCameraLogicalId) return
+ try {
+ val physicalCameraId = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ result.get(CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)
+ } else {
+ null
+ }
+ currentCameraState.update { old ->
+ if (old.debugInfo.logicalCameraId != logicalCameraId ||
+ old.debugInfo.physicalCameraId != physicalCameraId
+ ) {
+ old.copy(debugInfo = DebugInfo(logicalCameraId, physicalCameraId))
+ } else {
+ old
+ }
+ }
+ if (!isFirstFrameTimestampUpdated.value) {
+ currentCameraState.update { old ->
+ old.copy(
+ sessionFirstFrameTimestamp = SystemClock.elapsedRealtimeNanos()
+ )
+ }
+ isFirstFrameTimestampUpdated.value = true
+ }
+ } catch (_: Exception) {
+ }
+ }
+ }
+ )
+ return this
+}
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSessionContext.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSessionContext.kt
new file mode 100644
index 0000000..1425bbb
--- /dev/null
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSessionContext.kt
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.jetpackcamera.core.camera
+
+import android.content.Context
+import androidx.camera.core.SurfaceRequest
+import androidx.camera.lifecycle.ProcessCameraProvider
+import kotlinx.coroutines.CoroutineDispatcher
+import kotlinx.coroutines.channels.Channel
+import kotlinx.coroutines.channels.SendChannel
+import kotlinx.coroutines.flow.MutableStateFlow
+import kotlinx.coroutines.flow.StateFlow
+
+/**
+ * Context that can be shared by all functions in a camera session.
+ *
+ * Can be used to confer context (such as reactive state or session-wide parameters)
+ * on context receivers using [with] in a camera session.
+ */
+internal data class CameraSessionContext(
+ val context: Context,
+ val cameraProvider: ProcessCameraProvider,
+ val backgroundDispatcher: CoroutineDispatcher,
+ val screenFlashEvents: SendChannel<CameraUseCase.ScreenFlashEvent>,
+ val focusMeteringEvents: Channel<CameraEvent.FocusMeteringEvent>,
+ val videoCaptureControlEvents: Channel<VideoCaptureControlEvent>,
+ val currentCameraState: MutableStateFlow<CameraState>,
+ val surfaceRequests: MutableStateFlow<SurfaceRequest?>,
+ val transientSettings: StateFlow<TransientSessionSettings?>
+)
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSessionSettings.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSessionSettings.kt
new file mode 100644
index 0000000..b96c6a3
--- /dev/null
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSessionSettings.kt
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.jetpackcamera.core.camera
+
+import androidx.camera.core.CameraInfo
+import com.google.jetpackcamera.settings.model.AspectRatio
+import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.DeviceRotation
+import com.google.jetpackcamera.settings.model.DynamicRange
+import com.google.jetpackcamera.settings.model.FlashMode
+import com.google.jetpackcamera.settings.model.ImageOutputFormat
+import com.google.jetpackcamera.settings.model.Stabilization
+
+/**
+ * Camera settings that persist as long as a camera is running.
+ *
+ * Any change in these settings will require calling [ProcessCameraProvider.runWith] with
+ * updates [CameraSelector] and/or [UseCaseGroup]
+ */
+internal sealed interface PerpetualSessionSettings {
+ val aspectRatio: AspectRatio
+
+ data class SingleCamera(
+ val cameraInfo: CameraInfo,
+ override val aspectRatio: AspectRatio,
+ val captureMode: CaptureMode,
+ val targetFrameRate: Int,
+ val stabilizePreviewMode: Stabilization,
+ val stabilizeVideoMode: Stabilization,
+ val dynamicRange: DynamicRange,
+ val imageFormat: ImageOutputFormat
+ ) : PerpetualSessionSettings
+
+ data class ConcurrentCamera(
+ val primaryCameraInfo: CameraInfo,
+ val secondaryCameraInfo: CameraInfo,
+ override val aspectRatio: AspectRatio
+ ) : PerpetualSessionSettings
+}
+
+/**
+ * Camera settings that can change while the camera is running.
+ *
+ * Any changes in these settings can be applied either directly to use cases via their
+ * setter methods or to [androidx.camera.core.CameraControl].
+ * The use cases typically will not need to be re-bound.
+ */
+internal data class TransientSessionSettings(
+ val audioMuted: Boolean,
+ val deviceRotation: DeviceRotation,
+ val flashMode: FlashMode,
+ val zoomScale: Float
+)
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraUseCase.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraUseCase.kt
index 26e72b8..10084bd 100644
--- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraUseCase.kt
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraUseCase.kt
@@ -22,6 +22,7 @@
import com.google.jetpackcamera.settings.model.AspectRatio
import com.google.jetpackcamera.settings.model.CameraAppSettings
import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.ConcurrentCameraMode
import com.google.jetpackcamera.settings.model.DeviceRotation
import com.google.jetpackcamera.settings.model.DynamicRange
import com.google.jetpackcamera.settings.model.FlashMode
@@ -29,7 +30,7 @@
import com.google.jetpackcamera.settings.model.LensFacing
import com.google.jetpackcamera.settings.model.LowLightBoost
import com.google.jetpackcamera.settings.model.Stabilization
-import kotlinx.coroutines.flow.SharedFlow
+import kotlinx.coroutines.channels.ReceiveChannel
import kotlinx.coroutines.flow.StateFlow
/**
@@ -81,7 +82,7 @@
fun getSurfaceRequest(): StateFlow<SurfaceRequest?>
- fun getScreenFlashEvents(): SharedFlow<ScreenFlashEvent>
+ fun getScreenFlashEvents(): ReceiveChannel<ScreenFlashEvent>
fun getCurrentSettings(): StateFlow<CameraAppSettings?>
@@ -101,6 +102,8 @@
fun setDeviceRotation(deviceRotation: DeviceRotation)
+ suspend fun setConcurrentCameraMode(concurrentCameraMode: ConcurrentCameraMode)
+
suspend fun setLowLightBoost(lowLightBoost: LowLightBoost)
suspend fun setImageFormat(imageFormat: ImageOutputFormat)
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraUseCase.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraUseCase.kt
index 3db618d..7d44497 100644
--- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraUseCase.kt
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraUseCase.kt
@@ -15,71 +15,30 @@
*/
package com.google.jetpackcamera.core.camera
-import android.Manifest
import android.app.Application
import android.content.ContentResolver
import android.content.ContentValues
-import android.content.pm.PackageManager
-import android.hardware.camera2.CameraCaptureSession
-import android.hardware.camera2.CameraCharacteristics
-import android.hardware.camera2.CaptureRequest
-import android.hardware.camera2.CaptureResult
-import android.hardware.camera2.TotalCaptureResult
import android.net.Uri
-import android.os.Build
import android.os.Environment
-import android.os.SystemClock
import android.provider.MediaStore
import android.util.Log
-import android.util.Range
-import androidx.annotation.OptIn
-import androidx.camera.camera2.interop.Camera2CameraInfo
-import androidx.camera.camera2.interop.Camera2Interop
-import androidx.camera.camera2.interop.ExperimentalCamera2Interop
-import androidx.camera.core.AspectRatio.RATIO_16_9
-import androidx.camera.core.AspectRatio.RATIO_4_3
-import androidx.camera.core.Camera
-import androidx.camera.core.CameraControl
-import androidx.camera.core.CameraEffect
import androidx.camera.core.CameraInfo
import androidx.camera.core.CameraSelector
import androidx.camera.core.DynamicRange as CXDynamicRange
-import androidx.camera.core.ExperimentalImageCaptureOutputFormat
-import androidx.camera.core.FocusMeteringAction
import androidx.camera.core.ImageCapture
import androidx.camera.core.ImageCapture.OutputFileOptions
-import androidx.camera.core.ImageCapture.ScreenFlash
import androidx.camera.core.ImageCaptureException
-import androidx.camera.core.Preview
-import androidx.camera.core.SurfaceOrientedMeteringPointFactory
import androidx.camera.core.SurfaceRequest
-import androidx.camera.core.TorchState
-import androidx.camera.core.UseCase
-import androidx.camera.core.UseCaseGroup
-import androidx.camera.core.ViewPort
-import androidx.camera.core.resolutionselector.AspectRatioStrategy
-import androidx.camera.core.resolutionselector.ResolutionSelector
import androidx.camera.core.takePicture
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.lifecycle.awaitInstance
-import androidx.camera.video.FileOutputOptions
-import androidx.camera.video.MediaStoreOutputOptions
import androidx.camera.video.Recorder
-import androidx.camera.video.Recording
-import androidx.camera.video.VideoCapture
-import androidx.camera.video.VideoRecordEvent
-import androidx.camera.video.VideoRecordEvent.Finalize.ERROR_NONE
-import androidx.concurrent.futures.await
-import androidx.core.content.ContextCompat
-import androidx.core.content.ContextCompat.checkSelfPermission
-import androidx.lifecycle.asFlow
-import com.google.jetpackcamera.core.camera.CameraUseCase.ScreenFlashEvent.Type
-import com.google.jetpackcamera.core.camera.effects.SingleSurfaceForcingEffect
import com.google.jetpackcamera.settings.SettableConstraintsRepository
import com.google.jetpackcamera.settings.model.AspectRatio
import com.google.jetpackcamera.settings.model.CameraAppSettings
import com.google.jetpackcamera.settings.model.CameraConstraints
import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.ConcurrentCameraMode
import com.google.jetpackcamera.settings.model.DeviceRotation
import com.google.jetpackcamera.settings.model.DynamicRange
import com.google.jetpackcamera.settings.model.FlashMode
@@ -90,40 +49,24 @@
import com.google.jetpackcamera.settings.model.SupportedStabilizationMode
import com.google.jetpackcamera.settings.model.SystemConstraints
import dagger.hilt.android.scopes.ViewModelScoped
-import java.io.File
import java.io.FileNotFoundException
import java.text.SimpleDateFormat
import java.util.Calendar
-import java.util.Date
import java.util.Locale
-import java.util.concurrent.Executor
import javax.inject.Inject
-import kotlin.coroutines.ContinuationInterceptor
-import kotlin.math.abs
import kotlin.properties.Delegates
-import kotlinx.atomicfu.atomic
import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.CoroutineStart
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.asExecutor
import kotlinx.coroutines.channels.Channel
import kotlinx.coroutines.channels.trySendBlocking
import kotlinx.coroutines.coroutineScope
-import kotlinx.coroutines.currentCoroutineContext
-import kotlinx.coroutines.flow.MutableSharedFlow
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
-import kotlinx.coroutines.flow.asSharedFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.flow.collectLatest
import kotlinx.coroutines.flow.distinctUntilChanged
import kotlinx.coroutines.flow.filterNotNull
-import kotlinx.coroutines.flow.first
import kotlinx.coroutines.flow.map
-import kotlinx.coroutines.flow.onCompletion
import kotlinx.coroutines.flow.update
-import kotlinx.coroutines.launch
private const val TAG = "CameraXCameraUseCase"
const val TARGET_FPS_AUTO = 0
@@ -139,78 +82,31 @@
@Inject
constructor(
private val application: Application,
- private val coroutineScope: CoroutineScope,
private val defaultDispatcher: CoroutineDispatcher,
private val constraintsRepository: SettableConstraintsRepository
) : CameraUseCase {
private lateinit var cameraProvider: ProcessCameraProvider
- /**
- * Applies a CaptureCallback to the provided image capture builder
- */
- @OptIn(ExperimentalCamera2Interop::class)
- private fun setOnCaptureCompletedCallback(previewBuilder: Preview.Builder) {
- val isFirstFrameTimestampUpdated = atomic(false)
- val captureCallback = object : CameraCaptureSession.CaptureCallback() {
- private var physicalCameraId: String? = null
- private var logicalCameraId: String? = null
- override fun onCaptureCompleted(
- session: CameraCaptureSession,
- request: CaptureRequest,
- result: TotalCaptureResult
- ) {
- super.onCaptureCompleted(session, request, result)
- var physicalCameraId: String? = null
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
- physicalCameraId = result.get(
- CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID
- )
- }
- val logicalCameraId = session.device.id
- if (!physicalCameraId.equals(this.physicalCameraId) ||
- logicalCameraId != this.logicalCameraId
- ) {
- _currentCameraState.update { old ->
- old.copy(
- debugInfo = DebugInfo(logicalCameraId, physicalCameraId)
- )
- }
- }
- try {
- if (!isFirstFrameTimestampUpdated.value) {
- _currentCameraState.update { old ->
- old.copy(
- sessionFirstFrameTimestamp = SystemClock.elapsedRealtimeNanos()
- )
- }
- isFirstFrameTimestampUpdated.value = true
- }
- } catch (_: Exception) {
- }
- }
- }
-
- // Create an Extender to attach Camera2 options
- val imageCaptureExtender = Camera2Interop.Extender(previewBuilder)
-
- // Attach the Camera2 CaptureCallback
- imageCaptureExtender.setSessionCaptureCallback(captureCallback)
- }
-
private var imageCaptureUseCase: ImageCapture? = null
- private lateinit var captureMode: CaptureMode
private lateinit var systemConstraints: SystemConstraints
private var useCaseMode by Delegates.notNull<CameraUseCase.UseCaseMode>()
- private val screenFlashEvents: MutableSharedFlow<CameraUseCase.ScreenFlashEvent> =
- MutableSharedFlow()
+ private val screenFlashEvents: Channel<CameraUseCase.ScreenFlashEvent> =
+ Channel(capacity = Channel.UNLIMITED)
private val focusMeteringEvents =
Channel<CameraEvent.FocusMeteringEvent>(capacity = Channel.CONFLATED)
private val videoCaptureControlEvents = Channel<VideoCaptureControlEvent>()
private val currentSettings = MutableStateFlow<CameraAppSettings?>(null)
+ // Could be improved by setting initial value only when camera is initialized
+ private val _currentCameraState = MutableStateFlow(CameraState())
+ override fun getCurrentCameraState(): StateFlow<CameraState> = _currentCameraState.asStateFlow()
+
+ private val _surfaceRequest = MutableStateFlow<SurfaceRequest?>(null)
+ override fun getSurfaceRequest(): StateFlow<SurfaceRequest?> = _surfaceRequest.asStateFlow()
+
override suspend fun initialize(
cameraAppSettings: CameraAppSettings,
useCaseMode: CameraUseCase.UseCaseMode
@@ -230,6 +126,10 @@
// Build and update the system constraints
systemConstraints = SystemConstraints(
availableLenses = availableCameraLenses,
+ concurrentCamerasSupported = cameraProvider.availableConcurrentCameraInfos.any {
+ it.map { cameraInfo -> cameraInfo.cameraSelector.toAppLensFacing() }
+ .toSet() == setOf(LensFacing.FRONT, LensFacing.BACK)
+ },
perLensConstraints = buildMap {
val availableCameraInfos = cameraProvider.availableCameraInfos
for (lensFacing in availableCameraLenses) {
@@ -241,17 +141,18 @@
.toSet()
val supportedStabilizationModes = buildSet {
- if (isPreviewStabilizationSupported(camInfo)) {
+ if (camInfo.isPreviewStabilizationSupported) {
add(SupportedStabilizationMode.ON)
}
- if (isVideoStabilizationSupported(camInfo)) {
+ if (camInfo.isVideoStabilizationSupported) {
add(SupportedStabilizationMode.HIGH_QUALITY)
}
}
- val supportedFixedFrameRates = getSupportedFrameRates(camInfo)
- val supportedImageFormats = getSupportedImageFormats(camInfo)
+ val supportedFixedFrameRates =
+ camInfo.filterSupportedFixedFrameRates(FIXED_FRAME_RATES)
+ val supportedImageFormats = camInfo.supportedImageFormats
val hasFlashUnit = camInfo.hasFlashUnit()
put(
@@ -284,39 +185,9 @@
.tryApplyImageFormatConstraints()
.tryApplyFrameRateConstraints()
.tryApplyStabilizationConstraints()
+ .tryApplyConcurrentCameraModeConstraints()
}
- /**
- * Camera settings that persist as long as a camera is running.
- *
- * Any change in these settings will require calling [ProcessCameraProvider.runWith] with
- * updates [CameraSelector] and/or [UseCaseGroup]
- */
- private data class PerpetualSessionSettings(
- val cameraSelector: CameraSelector,
- val aspectRatio: AspectRatio,
- val captureMode: CaptureMode,
- val targetFrameRate: Int,
- val stabilizePreviewMode: Stabilization,
- val stabilizeVideoMode: Stabilization,
- val dynamicRange: DynamicRange,
- val imageFormat: ImageOutputFormat
- )
-
- /**
- * Camera settings that can change while the camera is running.
- *
- * Any changes in these settings can be applied either directly to use cases via their
- * setter methods or to [androidx.camera.core.CameraControl].
- * The use cases typically will not need to be re-bound.
- */
- private data class TransientSessionSettings(
- val audioMuted: Boolean,
- val deviceRotation: DeviceRotation,
- val flashMode: FlashMode,
- val zoomScale: Float
- )
-
override suspend fun runCamera() = coroutineScope {
Log.d(TAG, "runCamera")
@@ -331,257 +202,90 @@
zoomScale = currentCameraSettings.zoomScale
)
- val cameraSelector = when (currentCameraSettings.cameraLensFacing) {
- LensFacing.FRONT -> CameraSelector.DEFAULT_FRONT_CAMERA
- LensFacing.BACK -> CameraSelector.DEFAULT_BACK_CAMERA
- }
+ when (currentCameraSettings.concurrentCameraMode) {
+ ConcurrentCameraMode.OFF -> {
+ val cameraSelector = when (currentCameraSettings.cameraLensFacing) {
+ LensFacing.FRONT -> CameraSelector.DEFAULT_FRONT_CAMERA
+ LensFacing.BACK -> CameraSelector.DEFAULT_BACK_CAMERA
+ }
- PerpetualSessionSettings(
- cameraSelector = cameraSelector,
- aspectRatio = currentCameraSettings.aspectRatio,
- captureMode = currentCameraSettings.captureMode,
- targetFrameRate = currentCameraSettings.targetFrameRate,
- stabilizePreviewMode = currentCameraSettings.previewStabilization,
- stabilizeVideoMode = currentCameraSettings.videoCaptureStabilization,
- dynamicRange = currentCameraSettings.dynamicRange,
- imageFormat = currentCameraSettings.imageFormat
- )
- }.distinctUntilChanged()
- .collectLatest { sessionSettings ->
- Log.d(TAG, "Starting new camera session")
- val cameraInfo = sessionSettings.cameraSelector.filter(
- cameraProvider.availableCameraInfos
- ).first()
-
- val lensFacing = sessionSettings.cameraSelector.toAppLensFacing()
- val cameraConstraints = checkNotNull(
- systemConstraints.perLensConstraints[lensFacing]
- ) {
- "Unable to retrieve CameraConstraints for $lensFacing. " +
- "Was the use case initialized?"
- }
-
- val initialTransientSettings = transientSettings
- .filterNotNull()
- .first()
-
- val useCaseGroup = createUseCaseGroup(
- cameraInfo,
- sessionSettings,
- initialTransientSettings,
- cameraConstraints.supportedStabilizationModes,
- effect = when (sessionSettings.captureMode) {
- CaptureMode.SINGLE_STREAM -> SingleSurfaceForcingEffect(coroutineScope)
- CaptureMode.MULTI_STREAM -> null
- }
- )
-
- var prevTransientSettings = initialTransientSettings
- cameraProvider.runWith(sessionSettings.cameraSelector, useCaseGroup) { camera ->
- Log.d(TAG, "Camera session started")
-
- launch {
- processFocusMeteringEvents(camera.cameraControl)
- }
-
- launch {
- processVideoControlEvents(
- camera,
- useCaseGroup.getVideoCapture(),
- sessionSettings,
- transientSettings
+ PerpetualSessionSettings.SingleCamera(
+ cameraInfo = cameraProvider.getCameraInfo(cameraSelector),
+ aspectRatio = currentCameraSettings.aspectRatio,
+ captureMode = currentCameraSettings.captureMode,
+ targetFrameRate = currentCameraSettings.targetFrameRate,
+ stabilizePreviewMode = currentCameraSettings.previewStabilization,
+ stabilizeVideoMode = currentCameraSettings.videoCaptureStabilization,
+ dynamicRange = currentCameraSettings.dynamicRange,
+ imageFormat = currentCameraSettings.imageFormat
)
}
-
- launch {
- cameraInfo.torchState.asFlow().collectLatest { torchState ->
- _currentCameraState.update { old ->
- old.copy(torchEnabled = torchState == TorchState.ON)
+ ConcurrentCameraMode.DUAL -> {
+ val primaryFacing = currentCameraSettings.cameraLensFacing
+ val secondaryFacing = primaryFacing.flip()
+ cameraProvider.availableConcurrentCameraInfos.firstNotNullOf {
+ var primaryCameraInfo: CameraInfo? = null
+ var secondaryCameraInfo: CameraInfo? = null
+ it.forEach { cameraInfo ->
+ if (cameraInfo.appLensFacing == primaryFacing) {
+ primaryCameraInfo = cameraInfo
+ } else if (cameraInfo.appLensFacing == secondaryFacing) {
+ secondaryCameraInfo = cameraInfo
+ }
}
- }
- }
- applyDeviceRotation(initialTransientSettings.deviceRotation, useCaseGroup)
- transientSettings.filterNotNull().collectLatest { newTransientSettings ->
- // Apply camera control settings
- if (prevTransientSettings.zoomScale != newTransientSettings.zoomScale) {
- cameraInfo.zoomState.value?.let { zoomState ->
- val finalScale =
- (zoomState.zoomRatio * newTransientSettings.zoomScale).coerceIn(
- zoomState.minZoomRatio,
- zoomState.maxZoomRatio
+ primaryCameraInfo?.let { nonNullPrimary ->
+ secondaryCameraInfo?.let { nonNullSecondary ->
+ PerpetualSessionSettings.ConcurrentCamera(
+ primaryCameraInfo = nonNullPrimary,
+ secondaryCameraInfo = nonNullSecondary,
+ aspectRatio = currentCameraSettings.aspectRatio
)
- camera.cameraControl.setZoomRatio(finalScale)
- _currentCameraState.update { old ->
- old.copy(zoomScale = finalScale)
}
}
}
-
- if (imageCaptureUseCase != null &&
- prevTransientSettings.flashMode != newTransientSettings.flashMode
- ) {
- setFlashModeInternal(
- flashMode = newTransientSettings.flashMode,
- isFrontFacing = sessionSettings.cameraSelector
- == CameraSelector.DEFAULT_FRONT_CAMERA
- )
- }
-
- if (prevTransientSettings.deviceRotation
- != newTransientSettings.deviceRotation
- ) {
- Log.d(
- TAG,
- "Updating device rotation from " +
- "${prevTransientSettings.deviceRotation} -> " +
- "${newTransientSettings.deviceRotation}"
- )
- applyDeviceRotation(newTransientSettings.deviceRotation, useCaseGroup)
- }
-
- prevTransientSettings = newTransientSettings
}
}
- }
- }
-
- private fun applyDeviceRotation(deviceRotation: DeviceRotation, useCaseGroup: UseCaseGroup) {
- val targetRotation = deviceRotation.toUiSurfaceRotation()
- useCaseGroup.useCases.forEach {
- when (it) {
- is Preview -> {
- // Preview's target rotation should not be updated with device rotation.
- // Instead, preview rotation should match the display rotation.
- // When Preview is created, it is initialized with the display rotation.
- // This will need to be updated separately if the display rotation is not
- // locked. Currently the app is locked to portrait orientation.
- }
-
- is ImageCapture -> {
- it.targetRotation = targetRotation
- }
-
- is VideoCapture<*> -> {
- it.targetRotation = targetRotation
- }
- }
- }
- }
-
- private suspend fun processFocusMeteringEvents(cameraControl: CameraControl) {
- getSurfaceRequest().map { surfaceRequest ->
- surfaceRequest?.resolution?.run {
- Log.d(
- TAG,
- "Waiting to process focus points for surface with resolution: " +
- "$width x $height"
- )
- SurfaceOrientedMeteringPointFactory(width.toFloat(), height.toFloat())
- }
- }.collectLatest { meteringPointFactory ->
- for (event in focusMeteringEvents) {
- meteringPointFactory?.apply {
- Log.d(TAG, "tapToFocus, processing event: $event")
- val meteringPoint = createPoint(event.x, event.y)
- val action = FocusMeteringAction.Builder(meteringPoint).build()
- cameraControl.startFocusAndMetering(action)
- } ?: run {
- Log.w(TAG, "Ignoring event due to no SurfaceRequest: $event")
- }
- }
- }
- }
-
- private suspend fun processVideoControlEvents(
- camera: Camera,
- videoCapture: VideoCapture<Recorder>?,
- sessionSettings: PerpetualSessionSettings,
- transientSettings: StateFlow<TransientSessionSettings?>
- ) = coroutineScope {
- var recordingJob: Job? = null
-
- for (event in videoCaptureControlEvents) {
- when (event) {
- is VideoCaptureControlEvent.StartRecordingEvent -> {
- if (videoCapture == null) {
- throw RuntimeException(
- "Attempted video recording with null videoCapture"
+ }.distinctUntilChanged()
+ .collectLatest { sessionSettings ->
+ coroutineScope {
+ with(
+ CameraSessionContext(
+ context = application,
+ cameraProvider = cameraProvider,
+ backgroundDispatcher = defaultDispatcher,
+ screenFlashEvents = screenFlashEvents,
+ focusMeteringEvents = focusMeteringEvents,
+ videoCaptureControlEvents = videoCaptureControlEvents,
+ currentCameraState = _currentCameraState,
+ surfaceRequests = _surfaceRequest,
+ transientSettings = transientSettings
)
- }
+ ) {
+ try {
+ when (sessionSettings) {
+ is PerpetualSessionSettings.SingleCamera -> runSingleCameraSession(
+ sessionSettings,
+ useCaseMode = useCaseMode
+ ) { imageCapture ->
+ imageCaptureUseCase = imageCapture
+ }
- recordingJob = launch(start = CoroutineStart.UNDISPATCHED) {
- runVideoRecording(
- camera,
- videoCapture,
- sessionSettings,
- transientSettings,
- event.videoCaptureUri,
- event.shouldUseUri,
- event.onVideoRecord
- )
- }
- }
-
- VideoCaptureControlEvent.StopRecordingEvent -> {
- recordingJob?.cancel()
- recordingJob = null
- }
- }
- }
- }
-
- private suspend fun runVideoRecording(
- camera: Camera,
- videoCapture: VideoCapture<Recorder>,
- sessionSettings: PerpetualSessionSettings,
- transientSettings: StateFlow<TransientSessionSettings?>,
- videoCaptureUri: Uri?,
- shouldUseUri: Boolean,
- onVideoRecord: (CameraUseCase.OnVideoRecordEvent) -> Unit
- ) {
- var currentSettings = transientSettings.filterNotNull().first()
-
- startVideoRecordingInternal(
- initialMuted = currentSettings.audioMuted,
- videoCaptureUri,
- shouldUseUri,
- videoCapture,
- onVideoRecord
- ).use { recording ->
-
- fun TransientSessionSettings.isFlashModeOn() = flashMode == FlashMode.ON
- val isFrontCameraSelector =
- sessionSettings.cameraSelector == CameraSelector.DEFAULT_FRONT_CAMERA
-
- if (currentSettings.isFlashModeOn()) {
- if (!isFrontCameraSelector) {
- camera.cameraControl.enableTorch(true).await()
- } else {
- Log.d(TAG, "Unable to enable torch for front camera.")
- }
- }
-
- transientSettings.filterNotNull()
- .onCompletion {
- // Could do some fancier tracking of whether the torch was enabled before
- // calling this.
- camera.cameraControl.enableTorch(false)
- }
- .collectLatest { newTransientSettings ->
- if (currentSettings.audioMuted != newTransientSettings.audioMuted) {
- recording.mute(newTransientSettings.audioMuted)
- }
- if (currentSettings.isFlashModeOn() != newTransientSettings.isFlashModeOn()) {
- if (!isFrontCameraSelector) {
- camera.cameraControl.enableTorch(newTransientSettings.isFlashModeOn())
- } else {
- Log.d(TAG, "Unable to update torch for front camera.")
+ is PerpetualSessionSettings.ConcurrentCamera ->
+ runConcurrentCameraSession(
+ sessionSettings,
+ useCaseMode = CameraUseCase.UseCaseMode.VIDEO_ONLY
+ )
+ }
+ } finally {
+ // TODO(tm): This shouldn't be necessary. Cancellation of the
+ // coroutineScope by collectLatest should cause this to
+ // occur naturally.
+ cameraProvider.unbindAll()
}
}
- currentSettings = newTransientSettings
}
- }
+ }
}
override suspend fun takePicture(onCaptureStarted: (() -> Unit)) {
@@ -701,112 +405,12 @@
videoCaptureControlEvents.trySendBlocking(VideoCaptureControlEvent.StopRecordingEvent)
}
- private suspend fun startVideoRecordingInternal(
- initialMuted: Boolean,
- videoCaptureUri: Uri?,
- shouldUseUri: Boolean,
- videoCaptureUseCase: VideoCapture<Recorder>,
- onVideoRecord: (CameraUseCase.OnVideoRecordEvent) -> Unit
- ): Recording {
- Log.d(TAG, "recordVideo")
- // todo(b/336886716): default setting to enable or disable audio when permission is granted
-
- // ok. there is a difference between MUTING and ENABLING audio
- // audio must be enabled in order to be muted
- // if the video recording isnt started with audio enabled, you will not be able to unmute it
- // the toggle should only affect whether or not the audio is muted.
- // the permission will determine whether or not the audio is enabled.
- val audioEnabled = (
- checkSelfPermission(
- this.application.baseContext,
- Manifest.permission.RECORD_AUDIO
- )
- == PackageManager.PERMISSION_GRANTED
- )
-
- val pendingRecord = if (shouldUseUri) {
- val fileOutputOptions = FileOutputOptions.Builder(
- File(videoCaptureUri!!.path!!)
- ).build()
- videoCaptureUseCase.output.prepareRecording(application, fileOutputOptions)
- } else {
- val captureTypeString =
- when (captureMode) {
- CaptureMode.MULTI_STREAM -> "MultiStream"
- CaptureMode.SINGLE_STREAM -> "SingleStream"
- }
- val name = "JCA-recording-${Date()}-$captureTypeString.mp4"
- val contentValues =
- ContentValues().apply {
- put(MediaStore.Video.Media.DISPLAY_NAME, name)
- }
- val mediaStoreOutput =
- MediaStoreOutputOptions.Builder(
- application.contentResolver,
- MediaStore.Video.Media.EXTERNAL_CONTENT_URI
- )
- .setContentValues(contentValues)
- .build()
- videoCaptureUseCase.output.prepareRecording(application, mediaStoreOutput)
- }
- pendingRecord.apply {
- if (audioEnabled) {
- withAudioEnabled()
- }
- }
- val callbackExecutor: Executor =
- (
- currentCoroutineContext()[ContinuationInterceptor] as?
- CoroutineDispatcher
- )?.asExecutor() ?: ContextCompat.getMainExecutor(application)
- return pendingRecord.start(callbackExecutor) { onVideoRecordEvent ->
- Log.d(TAG, onVideoRecordEvent.toString())
- when (onVideoRecordEvent) {
- is VideoRecordEvent.Finalize -> {
- when (onVideoRecordEvent.error) {
- ERROR_NONE ->
- onVideoRecord(
- CameraUseCase.OnVideoRecordEvent.OnVideoRecorded(
- onVideoRecordEvent.outputResults.outputUri
- )
- )
-
- else ->
- onVideoRecord(
- CameraUseCase.OnVideoRecordEvent.OnVideoRecordError(
- onVideoRecordEvent.cause
- )
- )
- }
- }
-
- is VideoRecordEvent.Status -> {
- onVideoRecord(
- CameraUseCase.OnVideoRecordEvent.OnVideoRecordStatus(
- onVideoRecordEvent.recordingStats.audioStats
- .audioAmplitude
- )
- )
- }
- }
- }.apply {
- mute(initialMuted)
- }
- }
-
override fun setZoomScale(scale: Float) {
currentSettings.update { old ->
old?.copy(zoomScale = scale)
}
}
- // Could be improved by setting initial value only when camera is initialized
- private val _currentCameraState = MutableStateFlow(CameraState())
- override fun getCurrentCameraState(): StateFlow<CameraState> = _currentCameraState.asStateFlow()
-
- private val _surfaceRequest = MutableStateFlow<SurfaceRequest?>(null)
- override fun getSurfaceRequest(): StateFlow<SurfaceRequest?> = _surfaceRequest.asStateFlow()
-
// Sets the camera to the designated lensFacing direction
override suspend fun setLensFacing(lensFacing: LensFacing) {
currentSettings.update { old ->
@@ -908,11 +512,28 @@
} ?: this
}
+ private fun CameraAppSettings.tryApplyConcurrentCameraModeConstraints(): CameraAppSettings =
+ when (concurrentCameraMode) {
+ ConcurrentCameraMode.OFF -> this
+ else ->
+ if (systemConstraints.concurrentCamerasSupported) {
+ copy(
+ targetFrameRate = TARGET_FPS_AUTO,
+ previewStabilization = Stabilization.OFF,
+ videoCaptureStabilization = Stabilization.OFF,
+ dynamicRange = DynamicRange.SDR,
+ captureMode = CaptureMode.MULTI_STREAM
+ )
+ } else {
+ copy(concurrentCameraMode = ConcurrentCameraMode.OFF)
+ }
+ }
+
override suspend fun tapToFocus(x: Float, y: Float) {
focusMeteringEvents.send(CameraEvent.FocusMeteringEvent(x, y))
}
- override fun getScreenFlashEvents() = screenFlashEvents.asSharedFlow()
+ override fun getScreenFlashEvents() = screenFlashEvents
override fun getCurrentSettings() = currentSettings.asStateFlow()
override fun setFlashMode(flashMode: FlashMode) {
@@ -921,58 +542,9 @@
}
}
- private fun setFlashModeInternal(flashMode: FlashMode, isFrontFacing: Boolean) {
- val isScreenFlashRequired =
- isFrontFacing && (flashMode == FlashMode.ON || flashMode == FlashMode.AUTO)
-
- if (isScreenFlashRequired) {
- imageCaptureUseCase!!.screenFlash = object : ScreenFlash {
- override fun apply(
- expirationTimeMillis: Long,
- listener: ImageCapture.ScreenFlashListener
- ) {
- Log.d(TAG, "ImageCapture.ScreenFlash: apply")
- coroutineScope.launch {
- screenFlashEvents.emit(
- CameraUseCase.ScreenFlashEvent(Type.APPLY_UI) {
- listener.onCompleted()
- }
- )
- }
- }
-
- override fun clear() {
- Log.d(TAG, "ImageCapture.ScreenFlash: clear")
- coroutineScope.launch {
- screenFlashEvents.emit(
- CameraUseCase.ScreenFlashEvent(Type.CLEAR_UI) {}
- )
- }
- }
- }
- }
-
- imageCaptureUseCase!!.flashMode = when (flashMode) {
- FlashMode.OFF -> ImageCapture.FLASH_MODE_OFF // 2
-
- FlashMode.ON -> if (isScreenFlashRequired) {
- ImageCapture.FLASH_MODE_SCREEN // 3
- } else {
- ImageCapture.FLASH_MODE_ON // 1
- }
-
- FlashMode.AUTO -> if (isScreenFlashRequired) {
- ImageCapture.FLASH_MODE_SCREEN // 3
- } else {
- ImageCapture.FLASH_MODE_AUTO // 0
- }
- }
- Log.d(TAG, "Set flash mode to: ${imageCaptureUseCase!!.flashMode}")
- }
-
- override fun isScreenFlashEnabled() = imageCaptureUseCase != null &&
- imageCaptureUseCase!!.flashMode == ImageCapture.FLASH_MODE_SCREEN &&
- imageCaptureUseCase!!.screenFlash != null
+ override fun isScreenFlashEnabled() =
+ imageCaptureUseCase?.flashMode == ImageCapture.FLASH_MODE_SCREEN &&
+ imageCaptureUseCase?.screenFlash != null
override suspend fun setAspectRatio(aspectRatio: AspectRatio) {
currentSettings.update { old ->
@@ -982,72 +554,16 @@
override suspend fun setCaptureMode(captureMode: CaptureMode) {
currentSettings.update { old ->
- old?.copy(captureMode = captureMode)?.tryApplyImageFormatConstraints()
+ old?.copy(captureMode = captureMode)
+ ?.tryApplyImageFormatConstraints()
+ ?.tryApplyConcurrentCameraModeConstraints()
}
}
- private fun createUseCaseGroup(
- cameraInfo: CameraInfo,
- sessionSettings: PerpetualSessionSettings,
- initialTransientSettings: TransientSessionSettings,
- supportedStabilizationModes: Set<SupportedStabilizationMode>,
- effect: CameraEffect? = null
- ): UseCaseGroup {
- val previewUseCase =
- createPreviewUseCase(cameraInfo, sessionSettings, supportedStabilizationModes)
- if (useCaseMode != CameraUseCase.UseCaseMode.VIDEO_ONLY) {
- imageCaptureUseCase = createImageUseCase(cameraInfo, sessionSettings)
- }
- var videoCaptureUseCase: VideoCapture<Recorder>? = null
- if (useCaseMode != CameraUseCase.UseCaseMode.IMAGE_ONLY) {
- videoCaptureUseCase =
- createVideoUseCase(cameraInfo, sessionSettings, supportedStabilizationModes)
- }
-
- if (imageCaptureUseCase != null) {
- setFlashModeInternal(
- initialTransientSettings.flashMode,
- sessionSettings.cameraSelector == CameraSelector.DEFAULT_FRONT_CAMERA
- )
- }
-
- return UseCaseGroup.Builder().apply {
- Log.d(
- TAG,
- "Setting initial device rotation to ${initialTransientSettings.deviceRotation}"
- )
- setViewPort(
- ViewPort.Builder(
- sessionSettings.aspectRatio.ratio,
- // Initialize rotation to Preview's rotation, which comes from Display rotation
- previewUseCase.targetRotation
- ).build()
- )
- addUseCase(previewUseCase)
- if (imageCaptureUseCase != null &&
- (
- sessionSettings.dynamicRange == DynamicRange.SDR ||
- sessionSettings.imageFormat == ImageOutputFormat.JPEG_ULTRA_HDR
- )
- ) {
- addUseCase(imageCaptureUseCase!!)
- }
- // Not to bind VideoCapture when Ultra HDR is enabled to keep the app design simple.
- if (videoCaptureUseCase != null &&
- sessionSettings.imageFormat == ImageOutputFormat.JPEG
- ) {
- addUseCase(videoCaptureUseCase)
- }
-
- effect?.let { addEffect(it) }
-
- captureMode = sessionSettings.captureMode
- }.build()
- }
-
override suspend fun setDynamicRange(dynamicRange: DynamicRange) {
currentSettings.update { old ->
old?.copy(dynamicRange = dynamicRange)
+ ?.tryApplyConcurrentCameraModeConstraints()
}
}
@@ -1057,6 +573,13 @@
}
}
+ override suspend fun setConcurrentCameraMode(concurrentCameraMode: ConcurrentCameraMode) {
+ currentSettings.update { old ->
+ old?.copy(concurrentCameraMode = concurrentCameraMode)
+ ?.tryApplyConcurrentCameraModeConstraints()
+ }
+ }
+
override suspend fun setImageFormat(imageFormat: ImageOutputFormat) {
currentSettings.update { old ->
old?.copy(imageFormat = imageFormat)
@@ -1068,6 +591,7 @@
old?.copy(
previewStabilization = previewStabilization
)?.tryApplyStabilizationConstraints()
+ ?.tryApplyConcurrentCameraModeConstraints()
}
}
@@ -1076,39 +600,17 @@
old?.copy(
videoCaptureStabilization = videoCaptureStabilization
)?.tryApplyStabilizationConstraints()
+ ?.tryApplyConcurrentCameraModeConstraints()
}
}
override suspend fun setTargetFrameRate(targetFrameRate: Int) {
currentSettings.update { old ->
old?.copy(targetFrameRate = targetFrameRate)?.tryApplyFrameRateConstraints()
+ ?.tryApplyConcurrentCameraModeConstraints()
}
}
- @OptIn(ExperimentalImageCaptureOutputFormat::class)
- private fun getSupportedImageFormats(cameraInfo: CameraInfo): Set<ImageOutputFormat> {
- return ImageCapture.getImageCaptureCapabilities(cameraInfo).supportedOutputFormats
- .mapNotNull(Int::toAppImageFormat)
- .toSet()
- }
-
- @OptIn(ExperimentalImageCaptureOutputFormat::class)
- private fun createImageUseCase(
- cameraInfo: CameraInfo,
- sessionSettings: PerpetualSessionSettings
- ): ImageCapture {
- val builder = ImageCapture.Builder()
- builder.setResolutionSelector(
- getResolutionSelector(cameraInfo.sensorLandscapeRatio, sessionSettings.aspectRatio)
- )
- if (sessionSettings.dynamicRange != DynamicRange.SDR &&
- sessionSettings.imageFormat == ImageOutputFormat.JPEG_ULTRA_HDR
- ) {
- builder.setOutputFormat(ImageCapture.OUTPUT_FORMAT_JPEG_ULTRA_HDR)
- }
- return builder.build()
- }
-
override suspend fun setLowLightBoost(lowLightBoost: LowLightBoost) {
currentSettings.update { old ->
old?.copy(lowLightBoost = lowLightBoost)
@@ -1121,213 +623,7 @@
}
}
- private fun createVideoUseCase(
- cameraInfo: CameraInfo,
- sessionSettings: PerpetualSessionSettings,
- supportedStabilizationMode: Set<SupportedStabilizationMode>
- ): VideoCapture<Recorder> {
- val sensorLandscapeRatio = cameraInfo.sensorLandscapeRatio
- val recorder = Recorder.Builder()
- .setAspectRatio(
- getAspectRatioForUseCase(sensorLandscapeRatio, sessionSettings.aspectRatio)
- )
- .setExecutor(defaultDispatcher.asExecutor()).build()
- return VideoCapture.Builder(recorder).apply {
- // set video stabilization
- if (shouldVideoBeStabilized(sessionSettings, supportedStabilizationMode)
- ) {
- setVideoStabilizationEnabled(true)
- }
- // set target fps
- if (sessionSettings.targetFrameRate != TARGET_FPS_AUTO) {
- setTargetFrameRate(
- Range(sessionSettings.targetFrameRate, sessionSettings.targetFrameRate)
- )
- }
-
- setDynamicRange(sessionSettings.dynamicRange.toCXDynamicRange())
- }.build()
- }
-
- private fun getAspectRatioForUseCase(
- sensorLandscapeRatio: Float,
- aspectRatio: AspectRatio
- ): Int {
- return when (aspectRatio) {
- AspectRatio.THREE_FOUR -> RATIO_4_3
- AspectRatio.NINE_SIXTEEN -> RATIO_16_9
- else -> {
- // Choose the aspect ratio which maximizes FOV by being closest to the sensor ratio
- if (
- abs(sensorLandscapeRatio - AspectRatio.NINE_SIXTEEN.landscapeRatio.toFloat()) <
- abs(sensorLandscapeRatio - AspectRatio.THREE_FOUR.landscapeRatio.toFloat())
- ) {
- RATIO_16_9
- } else {
- RATIO_4_3
- }
- }
- }
- }
-
- private fun shouldVideoBeStabilized(
- sessionSettings: PerpetualSessionSettings,
- supportedStabilizationModes: Set<SupportedStabilizationMode>
- ): Boolean {
- // video is on and target fps is not 60
- return (sessionSettings.targetFrameRate != TARGET_FPS_60) &&
- (supportedStabilizationModes.contains(SupportedStabilizationMode.HIGH_QUALITY)) &&
- // high quality (video only) selected
- (
- sessionSettings.stabilizeVideoMode == Stabilization.ON &&
- sessionSettings.stabilizePreviewMode == Stabilization.UNDEFINED
- )
- }
-
- private fun createPreviewUseCase(
- cameraInfo: CameraInfo,
- sessionSettings: PerpetualSessionSettings,
- supportedStabilizationModes: Set<SupportedStabilizationMode>
- ): Preview {
- val previewUseCaseBuilder = Preview.Builder()
-
- setOnCaptureCompletedCallback(previewUseCaseBuilder)
-
- // set preview stabilization
- if (shouldPreviewBeStabilized(sessionSettings, supportedStabilizationModes)) {
- previewUseCaseBuilder.setPreviewStabilizationEnabled(true)
- }
-
- previewUseCaseBuilder.setResolutionSelector(
- getResolutionSelector(cameraInfo.sensorLandscapeRatio, sessionSettings.aspectRatio)
- )
-
- return previewUseCaseBuilder.build().apply {
- setSurfaceProvider { surfaceRequest ->
- _surfaceRequest.value = surfaceRequest
- }
- }
- }
-
- private fun getResolutionSelector(
- sensorLandscapeRatio: Float,
- aspectRatio: AspectRatio
- ): ResolutionSelector {
- val aspectRatioStrategy = when (aspectRatio) {
- AspectRatio.THREE_FOUR -> AspectRatioStrategy.RATIO_4_3_FALLBACK_AUTO_STRATEGY
- AspectRatio.NINE_SIXTEEN -> AspectRatioStrategy.RATIO_16_9_FALLBACK_AUTO_STRATEGY
- else -> {
- // Choose the resolution selector strategy which maximizes FOV by being closest
- // to the sensor aspect ratio
- if (
- abs(sensorLandscapeRatio - AspectRatio.NINE_SIXTEEN.landscapeRatio.toFloat()) <
- abs(sensorLandscapeRatio - AspectRatio.THREE_FOUR.landscapeRatio.toFloat())
- ) {
- AspectRatioStrategy.RATIO_16_9_FALLBACK_AUTO_STRATEGY
- } else {
- AspectRatioStrategy.RATIO_4_3_FALLBACK_AUTO_STRATEGY
- }
- }
- }
- return ResolutionSelector.Builder().setAspectRatioStrategy(aspectRatioStrategy).build()
- }
-
- private fun shouldPreviewBeStabilized(
- sessionSettings: PerpetualSessionSettings,
- supportedStabilizationModes: Set<SupportedStabilizationMode>
- ): Boolean {
- // only supported if target fps is 30 or none
- return ((sessionSettings.targetFrameRate in setOf(TARGET_FPS_AUTO, TARGET_FPS_30))) &&
- (
- supportedStabilizationModes.contains(SupportedStabilizationMode.ON) &&
- sessionSettings.stabilizePreviewMode == Stabilization.ON
- )
- }
-
companion object {
private val FIXED_FRAME_RATES = setOf(TARGET_FPS_15, TARGET_FPS_30, TARGET_FPS_60)
-
- /**
- * Checks if preview stabilization is supported by the device.
- *
- */
- private fun isPreviewStabilizationSupported(cameraInfo: CameraInfo): Boolean {
- return Preview.getPreviewCapabilities(cameraInfo).isStabilizationSupported
- }
-
- /**
- * Checks if video stabilization is supported by the device.
- *
- */
- private fun isVideoStabilizationSupported(cameraInfo: CameraInfo): Boolean {
- return Recorder.getVideoCapabilities(cameraInfo).isStabilizationSupported
- }
-
- private fun getSupportedFrameRates(camInfo: CameraInfo): Set<Int> {
- return buildSet {
- camInfo.supportedFrameRateRanges.forEach { e ->
- if (e.upper == e.lower && FIXED_FRAME_RATES.contains(e.upper)) {
- add(e.upper)
- }
- }
- }
- }
}
}
-
-private fun CXDynamicRange.toSupportedAppDynamicRange(): DynamicRange? {
- return when (this) {
- CXDynamicRange.SDR -> DynamicRange.SDR
- CXDynamicRange.HLG_10_BIT -> DynamicRange.HLG10
- // All other dynamic ranges unsupported. Return null.
- else -> null
- }
-}
-
-private fun DynamicRange.toCXDynamicRange(): CXDynamicRange {
- return when (this) {
- DynamicRange.SDR -> CXDynamicRange.SDR
- DynamicRange.HLG10 -> CXDynamicRange.HLG_10_BIT
- }
-}
-
-private fun LensFacing.toCameraSelector(): CameraSelector = when (this) {
- LensFacing.FRONT -> CameraSelector.DEFAULT_FRONT_CAMERA
- LensFacing.BACK -> CameraSelector.DEFAULT_BACK_CAMERA
-}
-
-private fun CameraSelector.toAppLensFacing(): LensFacing = when (this) {
- CameraSelector.DEFAULT_FRONT_CAMERA -> LensFacing.FRONT
- CameraSelector.DEFAULT_BACK_CAMERA -> LensFacing.BACK
- else -> throw IllegalArgumentException(
- "Unknown CameraSelector -> LensFacing mapping. [CameraSelector: $this]"
- )
-}
-
-private val CameraInfo.sensorLandscapeRatio: Float
- @OptIn(ExperimentalCamera2Interop::class)
- get() = Camera2CameraInfo.from(this)
- .getCameraCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)
- ?.let { sensorRect ->
- if (sensorRect.width() > sensorRect.height()) {
- sensorRect.width().toFloat() / sensorRect.height()
- } else {
- sensorRect.height().toFloat() / sensorRect.width()
- }
- } ?: Float.NaN
-
-@OptIn(ExperimentalImageCaptureOutputFormat::class)
-private fun Int.toAppImageFormat(): ImageOutputFormat? {
- return when (this) {
- ImageCapture.OUTPUT_FORMAT_JPEG -> ImageOutputFormat.JPEG
- ImageCapture.OUTPUT_FORMAT_JPEG_ULTRA_HDR -> ImageOutputFormat.JPEG_ULTRA_HDR
- // All other output formats unsupported. Return null.
- else -> null
- }
-}
-
-private fun UseCaseGroup.getVideoCapture() = getUseCaseOrNull<VideoCapture<Recorder>>()
-
-private inline fun <reified T : UseCase> UseCaseGroup.getUseCaseOrNull(): T? {
- return useCases.filterIsInstance<T>().singleOrNull()
-}
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt
new file mode 100644
index 0000000..1ea84a1
--- /dev/null
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.jetpackcamera.core.camera
+
+import android.annotation.SuppressLint
+import android.util.Log
+import androidx.camera.core.CompositionSettings
+import androidx.camera.core.TorchState
+import androidx.lifecycle.asFlow
+import com.google.jetpackcamera.settings.model.DynamicRange
+import com.google.jetpackcamera.settings.model.ImageOutputFormat
+import com.google.jetpackcamera.settings.model.Stabilization
+import kotlinx.coroutines.coroutineScope
+import kotlinx.coroutines.flow.collectLatest
+import kotlinx.coroutines.flow.filterNotNull
+import kotlinx.coroutines.flow.first
+import kotlinx.coroutines.flow.update
+import kotlinx.coroutines.launch
+
+private const val TAG = "ConcurrentCameraSession"
+
+context(CameraSessionContext)
+@SuppressLint("RestrictedApi")
+internal suspend fun runConcurrentCameraSession(
+ sessionSettings: PerpetualSessionSettings.ConcurrentCamera,
+ useCaseMode: CameraUseCase.UseCaseMode
+) = coroutineScope {
+ val primaryLensFacing = sessionSettings.primaryCameraInfo.appLensFacing
+ val secondaryLensFacing = sessionSettings.secondaryCameraInfo.appLensFacing
+ Log.d(
+ TAG,
+ "Starting new concurrent camera session " +
+ "[primary: $primaryLensFacing, secondary: $secondaryLensFacing]"
+ )
+
+ val initialTransientSettings = transientSettings
+ .filterNotNull()
+ .first()
+
+ val useCaseGroup = createUseCaseGroup(
+ cameraInfo = sessionSettings.primaryCameraInfo,
+ initialTransientSettings = initialTransientSettings,
+ stabilizePreviewMode = Stabilization.OFF,
+ stabilizeVideoMode = Stabilization.OFF,
+ aspectRatio = sessionSettings.aspectRatio,
+ targetFrameRate = TARGET_FPS_AUTO,
+ dynamicRange = DynamicRange.SDR,
+ imageFormat = ImageOutputFormat.JPEG,
+ useCaseMode = useCaseMode
+ )
+
+ val cameraConfigs = listOf(
+ Pair(
+ sessionSettings.primaryCameraInfo.cameraSelector,
+ CompositionSettings.Builder()
+ .setAlpha(1.0f)
+ .setOffset(0.0f, 0.0f)
+ .setScale(1.0f, 1.0f)
+ .build()
+ ),
+ Pair(
+ sessionSettings.secondaryCameraInfo.cameraSelector,
+ CompositionSettings.Builder()
+ .setAlpha(1.0f)
+ .setOffset(2 / 3f - 0.1f, -2 / 3f + 0.1f)
+ .setScale(1 / 3f, 1 / 3f)
+ .build()
+ )
+ )
+
+ cameraProvider.runWithConcurrent(cameraConfigs, useCaseGroup) { concurrentCamera ->
+ Log.d(TAG, "Concurrent camera session started")
+ val primaryCamera = concurrentCamera.cameras.first {
+ it.cameraInfo.appLensFacing == sessionSettings.primaryCameraInfo.appLensFacing
+ }
+
+ launch {
+ processFocusMeteringEvents(primaryCamera.cameraControl)
+ }
+
+ launch {
+ processVideoControlEvents(
+ primaryCamera,
+ useCaseGroup.getVideoCapture(),
+ captureTypeSuffix = "DualCam"
+ )
+ }
+
+ launch {
+ sessionSettings.primaryCameraInfo.torchState.asFlow().collectLatest { torchState ->
+ currentCameraState.update { old ->
+ old.copy(torchEnabled = torchState == TorchState.ON)
+ }
+ }
+ }
+
+ applyDeviceRotation(initialTransientSettings.deviceRotation, useCaseGroup)
+ processTransientSettingEvents(
+ primaryCamera,
+ useCaseGroup,
+ initialTransientSettings,
+ transientSettings
+ )
+ }
+}
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt
index 33bcbb9..a6a032f 100644
--- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt
@@ -15,8 +15,12 @@
*/
package com.google.jetpackcamera.core.camera
+import android.annotation.SuppressLint
import androidx.camera.core.Camera
import androidx.camera.core.CameraSelector
+import androidx.camera.core.CompositionSettings
+import androidx.camera.core.ConcurrentCamera
+import androidx.camera.core.ConcurrentCamera.SingleCameraConfig
import androidx.camera.core.UseCaseGroup
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.lifecycle.Lifecycle
@@ -43,6 +47,19 @@
block([email protected](scopedLifecycle, cameraSelector, useCases))
}
+@SuppressLint("RestrictedApi")
+suspend fun <R> ProcessCameraProvider.runWithConcurrent(
+ cameraConfigs: List<Pair<CameraSelector, CompositionSettings>>,
+ useCaseGroup: UseCaseGroup,
+ block: suspend CoroutineScope.(ConcurrentCamera) -> R
+): R = coroutineScope {
+ val scopedLifecycle = CoroutineLifecycleOwner(coroutineContext)
+ val singleCameraConfigs = cameraConfigs.map {
+ SingleCameraConfig(it.first, useCaseGroup, it.second, scopedLifecycle)
+ }
+ block([email protected](singleCameraConfigs))
+}
+
/**
* A [LifecycleOwner] that follows the lifecycle of a coroutine.
*
diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/test/FakeCameraUseCase.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/test/FakeCameraUseCase.kt
index e581352..5939b32 100644
--- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/test/FakeCameraUseCase.kt
+++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/test/FakeCameraUseCase.kt
@@ -25,6 +25,7 @@
import com.google.jetpackcamera.settings.model.AspectRatio
import com.google.jetpackcamera.settings.model.CameraAppSettings
import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.ConcurrentCameraMode
import com.google.jetpackcamera.settings.model.DeviceRotation
import com.google.jetpackcamera.settings.model.DynamicRange
import com.google.jetpackcamera.settings.model.FlashMode
@@ -32,21 +33,16 @@
import com.google.jetpackcamera.settings.model.LensFacing
import com.google.jetpackcamera.settings.model.LowLightBoost
import com.google.jetpackcamera.settings.model.Stabilization
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.SupervisorJob
-import kotlinx.coroutines.flow.MutableSharedFlow
+import kotlinx.coroutines.channels.Channel
+import kotlinx.coroutines.channels.Channel.Factory.UNLIMITED
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.flow.collectLatest
import kotlinx.coroutines.flow.onCompletion
import kotlinx.coroutines.flow.update
-import kotlinx.coroutines.launch
class FakeCameraUseCase(
- private val coroutineScope: CoroutineScope =
- CoroutineScope(SupervisorJob() + Dispatchers.Default),
defaultCameraSettings: CameraAppSettings = CameraAppSettings()
) : CameraUseCase {
private val availableLenses = listOf(LensFacing.FRONT, LensFacing.BACK)
@@ -61,7 +57,7 @@
var isLensFacingFront = false
private var isScreenFlash = true
- private var screenFlashEvents = MutableSharedFlow<CameraUseCase.ScreenFlashEvent>()
+ private var screenFlashEvents = Channel<CameraUseCase.ScreenFlashEvent>(capacity = UNLIMITED)
private val currentSettings = MutableStateFlow(defaultCameraSettings)
@@ -107,14 +103,12 @@
throw IllegalStateException("Usecases not bound")
}
if (isScreenFlash) {
- coroutineScope.launch {
- screenFlashEvents.emit(
- CameraUseCase.ScreenFlashEvent(CameraUseCase.ScreenFlashEvent.Type.APPLY_UI) { }
- )
- screenFlashEvents.emit(
- CameraUseCase.ScreenFlashEvent(CameraUseCase.ScreenFlashEvent.Type.CLEAR_UI) { }
- )
- }
+ screenFlashEvents.trySend(
+ CameraUseCase.ScreenFlashEvent(CameraUseCase.ScreenFlashEvent.Type.APPLY_UI) { }
+ )
+ screenFlashEvents.trySend(
+ CameraUseCase.ScreenFlashEvent(CameraUseCase.ScreenFlashEvent.Type.CLEAR_UI) { }
+ )
}
numPicturesTaken += 1
}
@@ -131,9 +125,7 @@
}
fun emitScreenFlashEvent(event: CameraUseCase.ScreenFlashEvent) {
- coroutineScope.launch {
- screenFlashEvents.emit(event)
- }
+ screenFlashEvents.trySend(event)
}
override suspend fun startVideoRecording(
@@ -209,6 +201,12 @@
}
}
+ override suspend fun setConcurrentCameraMode(concurrentCameraMode: ConcurrentCameraMode) {
+ currentSettings.update { old ->
+ old.copy(concurrentCameraMode = concurrentCameraMode)
+ }
+ }
+
override suspend fun setImageFormat(imageFormat: ImageOutputFormat) {
currentSettings.update { old ->
old.copy(imageFormat = imageFormat)
diff --git a/data/settings/src/main/java/com/google/jetpackcamera/settings/model/CameraAppSettings.kt b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/CameraAppSettings.kt
index 712a0cc..1daa078 100644
--- a/data/settings/src/main/java/com/google/jetpackcamera/settings/model/CameraAppSettings.kt
+++ b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/CameraAppSettings.kt
@@ -35,7 +35,8 @@
val targetFrameRate: Int = TARGET_FPS_AUTO,
val imageFormat: ImageOutputFormat = ImageOutputFormat.JPEG,
val audioMuted: Boolean = false,
- val deviceRotation: DeviceRotation = DeviceRotation.Natural
+ val deviceRotation: DeviceRotation = DeviceRotation.Natural,
+ val concurrentCameraMode: ConcurrentCameraMode = ConcurrentCameraMode.OFF
)
fun SystemConstraints.forCurrentLens(cameraAppSettings: CameraAppSettings): CameraConstraints? {
diff --git a/data/settings/src/main/java/com/google/jetpackcamera/settings/model/ConcurrentCameraMode.kt b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/ConcurrentCameraMode.kt
new file mode 100644
index 0000000..621296a
--- /dev/null
+++ b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/ConcurrentCameraMode.kt
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.jetpackcamera.settings.model
+
+enum class ConcurrentCameraMode {
+ OFF,
+ DUAL
+}
diff --git a/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt
index ae4aa31..8b75351 100644
--- a/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt
+++ b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt
@@ -17,6 +17,7 @@
data class SystemConstraints(
val availableLenses: List<LensFacing>,
+ val concurrentCamerasSupported: Boolean,
val perLensConstraints: Map<LensFacing, CameraConstraints>
)
@@ -34,6 +35,7 @@
val TYPICAL_SYSTEM_CONSTRAINTS =
SystemConstraints(
availableLenses = listOf(LensFacing.FRONT, LensFacing.BACK),
+ concurrentCamerasSupported = false,
perLensConstraints = buildMap {
for (lensFacing in listOf(LensFacing.FRONT, LensFacing.BACK)) {
put(
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/CaptureModeToggleUiState.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/CaptureModeToggleUiState.kt
index 699f1be..04b7a5e 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/CaptureModeToggleUiState.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/CaptureModeToggleUiState.kt
@@ -22,6 +22,7 @@
import com.google.jetpackcamera.feature.preview.ui.HDR_VIDEO_UNSUPPORTED_ON_DEVICE_TAG
import com.google.jetpackcamera.feature.preview.ui.HDR_VIDEO_UNSUPPORTED_ON_LENS_TAG
import com.google.jetpackcamera.feature.preview.ui.IMAGE_CAPTURE_EXTERNAL_UNSUPPORTED_TAG
+import com.google.jetpackcamera.feature.preview.ui.IMAGE_CAPTURE_UNSUPPORTED_CONCURRENT_CAMERA_TAG
import com.google.jetpackcamera.feature.preview.ui.VIDEO_CAPTURE_EXTERNAL_UNSUPPORTED_TAG
sealed interface CaptureModeToggleUiState {
@@ -47,6 +48,11 @@
IMAGE_CAPTURE_EXTERNAL_UNSUPPORTED(
IMAGE_CAPTURE_EXTERNAL_UNSUPPORTED_TAG,
R.string.toast_image_capture_external_unsupported
+
+ ),
+ IMAGE_CAPTURE_UNSUPPORTED_CONCURRENT_CAMERA(
+ IMAGE_CAPTURE_UNSUPPORTED_CONCURRENT_CAMERA_TAG,
+ R.string.toast_image_capture_unsupported_concurrent_camera
),
HDR_VIDEO_UNSUPPORTED_ON_DEVICE(
HDR_VIDEO_UNSUPPORTED_ON_DEVICE_TAG,
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewScreen.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewScreen.kt
index d95048c..55583a2 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewScreen.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewScreen.kt
@@ -59,6 +59,7 @@
import com.google.jetpackcamera.feature.preview.ui.debouncedOrientationFlow
import com.google.jetpackcamera.settings.model.AspectRatio
import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.ConcurrentCameraMode
import com.google.jetpackcamera.settings.model.DEFAULT_CAMERA_APP_SETTINGS
import com.google.jetpackcamera.settings.model.DynamicRange
import com.google.jetpackcamera.settings.model.FlashMode
@@ -141,6 +142,7 @@
onChangeAspectRatio = viewModel::setAspectRatio,
onChangeCaptureMode = viewModel::setCaptureMode,
onChangeDynamicRange = viewModel::setDynamicRange,
+ onChangeConcurrentCameraMode = viewModel::setConcurrentCameraMode,
onLowLightBoost = viewModel::setLowLightBoost,
onChangeImageFormat = viewModel::setImageFormat,
onToggleWhenDisabled = viewModel::showSnackBarForDisabledHdrToggle,
@@ -174,6 +176,7 @@
onChangeAspectRatio: (AspectRatio) -> Unit = {},
onChangeCaptureMode: (CaptureMode) -> Unit = {},
onChangeDynamicRange: (DynamicRange) -> Unit = {},
+ onChangeConcurrentCameraMode: (ConcurrentCameraMode) -> Unit = {},
onLowLightBoost: (LowLightBoost) -> Unit = {},
onChangeImageFormat: (ImageOutputFormat) -> Unit = {},
onToggleWhenDisabled: (CaptureModeToggleUiState.DisabledReason) -> Unit = {},
@@ -237,13 +240,13 @@
isOpen = previewUiState.quickSettingsIsOpen,
toggleIsOpen = onToggleQuickSettings,
currentCameraSettings = previewUiState.currentCameraSettings,
- systemConstraints = previewUiState.systemConstraints,
onLensFaceClick = onSetLensFacing,
onFlashModeClick = onChangeFlash,
onAspectRatioClick = onChangeAspectRatio,
onCaptureModeClick = onChangeCaptureMode,
onDynamicRangeClick = onChangeDynamicRange,
onImageOutputFormatClick = onChangeImageFormat,
+ onConcurrentCameraModeClick = onChangeConcurrentCameraMode,
onLowLightBoostClick = onLowLightBoost
)
// relative-grid style overlay on top of preview display
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewViewModel.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewViewModel.kt
index 70f96d6..b6c82c4 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewViewModel.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/PreviewViewModel.kt
@@ -39,6 +39,7 @@
import com.google.jetpackcamera.settings.model.CameraAppSettings
import com.google.jetpackcamera.settings.model.CameraConstraints
import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.ConcurrentCameraMode
import com.google.jetpackcamera.settings.model.DeviceRotation
import com.google.jetpackcamera.settings.model.DynamicRange
import com.google.jetpackcamera.settings.model.FlashMode
@@ -250,12 +251,17 @@
val isShown = previewMode is PreviewMode.ExternalImageCaptureMode ||
previewMode is PreviewMode.ExternalVideoCaptureMode ||
cameraAppSettings.imageFormat == ImageOutputFormat.JPEG_ULTRA_HDR ||
- cameraAppSettings.dynamicRange == DynamicRange.HLG10
+ cameraAppSettings.dynamicRange == DynamicRange.HLG10 ||
+ cameraAppSettings.concurrentCameraMode == ConcurrentCameraMode.DUAL
val enabled = previewMode !is PreviewMode.ExternalImageCaptureMode &&
previewMode !is PreviewMode.ExternalVideoCaptureMode &&
- hdrDynamicRangeSupported && hdrImageFormatSupported
+ hdrDynamicRangeSupported &&
+ hdrImageFormatSupported &&
+ cameraAppSettings.concurrentCameraMode == ConcurrentCameraMode.OFF
return if (isShown) {
- val currentMode = if (previewMode is PreviewMode.ExternalImageCaptureMode ||
+ val currentMode = if (
+ cameraAppSettings.concurrentCameraMode == ConcurrentCameraMode.OFF &&
+ previewMode is PreviewMode.ExternalImageCaptureMode ||
cameraAppSettings.imageFormat == ImageOutputFormat.JPEG_ULTRA_HDR
) {
CaptureModeToggleUiState.ToggleMode.CAPTURE_TOGGLE_IMAGE
@@ -268,11 +274,13 @@
CaptureModeToggleUiState.Disabled(
currentMode,
getCaptureToggleUiStateDisabledReason(
+ currentMode,
hdrDynamicRangeSupported,
hdrImageFormatSupported,
systemConstraints,
cameraAppSettings.cameraLensFacing,
- cameraAppSettings.captureMode
+ cameraAppSettings.captureMode,
+ cameraAppSettings.concurrentCameraMode
)
)
}
@@ -282,73 +290,96 @@
}
private fun getCaptureToggleUiStateDisabledReason(
+ captureModeToggleUiState: CaptureModeToggleUiState.ToggleMode,
hdrDynamicRangeSupported: Boolean,
hdrImageFormatSupported: Boolean,
systemConstraints: SystemConstraints,
currentLensFacing: LensFacing,
- currentCaptureMode: CaptureMode
+ currentCaptureMode: CaptureMode,
+ concurrentCameraMode: ConcurrentCameraMode
): CaptureModeToggleUiState.DisabledReason {
- if (previewMode is PreviewMode.ExternalImageCaptureMode) {
- return CaptureModeToggleUiState.DisabledReason.VIDEO_CAPTURE_EXTERNAL_UNSUPPORTED
- }
- if (previewMode is PreviewMode.ExternalVideoCaptureMode) {
- return CaptureModeToggleUiState.DisabledReason.IMAGE_CAPTURE_EXTERNAL_UNSUPPORTED
- }
- if (!hdrImageFormatSupported) {
- // First assume HDR image is only unsupported on this capture mode
- var disabledReason = when (currentCaptureMode) {
- CaptureMode.MULTI_STREAM ->
- CaptureModeToggleUiState.DisabledReason.HDR_IMAGE_UNSUPPORTED_ON_MULTI_STREAM
-
- CaptureMode.SINGLE_STREAM ->
- CaptureModeToggleUiState.DisabledReason.HDR_IMAGE_UNSUPPORTED_ON_SINGLE_STREAM
- }
- // Check if other capture modes supports HDR image on this lens
- systemConstraints
- .perLensConstraints[currentLensFacing]
- ?.supportedImageFormatsMap
- ?.filterKeys { it != currentCaptureMode }
- ?.values
- ?.forEach { supportedFormats ->
- if (supportedFormats.size > 1) {
- // Found another capture mode that supports HDR image,
- // return previously discovered disabledReason
- return disabledReason
- }
+ when (captureModeToggleUiState) {
+ CaptureModeToggleUiState.ToggleMode.CAPTURE_TOGGLE_VIDEO -> {
+ if (previewMode is PreviewMode.ExternalVideoCaptureMode) {
+ return CaptureModeToggleUiState.DisabledReason
+ .IMAGE_CAPTURE_EXTERNAL_UNSUPPORTED
}
- // HDR image is not supported by this lens
- disabledReason = CaptureModeToggleUiState.DisabledReason.HDR_IMAGE_UNSUPPORTED_ON_LENS
- // Check if any other lens supports HDR image
- systemConstraints
- .perLensConstraints
- .filterKeys { it != currentLensFacing }
- .values
- .forEach { constraints ->
- constraints.supportedImageFormatsMap.values.forEach { supportedFormats ->
- if (supportedFormats.size > 1) {
- // Found another lens that supports HDR image,
- // return previously discovered disabledReason
- return disabledReason
+
+ if (concurrentCameraMode == ConcurrentCameraMode.DUAL) {
+ return CaptureModeToggleUiState.DisabledReason
+ .IMAGE_CAPTURE_UNSUPPORTED_CONCURRENT_CAMERA
+ }
+
+ if (!hdrImageFormatSupported) {
+ // First check if Ultra HDR image is supported on other capture modes
+ if (systemConstraints
+ .perLensConstraints[currentLensFacing]
+ ?.supportedImageFormatsMap
+ ?.anySupportsUltraHdr { it != currentCaptureMode } == true
+ ) {
+ return when (currentCaptureMode) {
+ CaptureMode.MULTI_STREAM ->
+ CaptureModeToggleUiState.DisabledReason
+ .HDR_IMAGE_UNSUPPORTED_ON_MULTI_STREAM
+
+ CaptureMode.SINGLE_STREAM ->
+ CaptureModeToggleUiState.DisabledReason
+ .HDR_IMAGE_UNSUPPORTED_ON_SINGLE_STREAM
}
}
+
+ // Check if any other lens supports HDR image
+ if (systemConstraints.anySupportsUltraHdr { it != currentLensFacing }) {
+ return CaptureModeToggleUiState.DisabledReason.HDR_IMAGE_UNSUPPORTED_ON_LENS
+ }
+
+ // No lenses support HDR image on device
+ return CaptureModeToggleUiState.DisabledReason.HDR_IMAGE_UNSUPPORTED_ON_DEVICE
}
- // No lenses support HDR image on device
- return CaptureModeToggleUiState.DisabledReason.HDR_IMAGE_UNSUPPORTED_ON_DEVICE
- } else if (!hdrDynamicRangeSupported) {
- systemConstraints.perLensConstraints.forEach { entry ->
- if (entry.key != currentLensFacing) {
- val cameraConstraints = systemConstraints.perLensConstraints[entry.key]
- if (cameraConstraints?.let { it.supportedDynamicRanges.size > 1 } == true) {
+
+ throw RuntimeException("Unknown DisabledReason for video mode.")
+ }
+
+ CaptureModeToggleUiState.ToggleMode.CAPTURE_TOGGLE_IMAGE -> {
+ if (previewMode is PreviewMode.ExternalImageCaptureMode) {
+ return CaptureModeToggleUiState.DisabledReason
+ .VIDEO_CAPTURE_EXTERNAL_UNSUPPORTED
+ }
+
+ if (!hdrDynamicRangeSupported) {
+ if (systemConstraints.anySupportsHdrDynamicRange { it != currentLensFacing }) {
return CaptureModeToggleUiState.DisabledReason.HDR_VIDEO_UNSUPPORTED_ON_LENS
}
+ return CaptureModeToggleUiState.DisabledReason.HDR_VIDEO_UNSUPPORTED_ON_DEVICE
}
+
+ throw RuntimeException("Unknown DisabledReason for image mode.")
}
- return CaptureModeToggleUiState.DisabledReason.HDR_VIDEO_UNSUPPORTED_ON_DEVICE
- } else {
- throw RuntimeException("Unknown CaptureModeUnsupportedReason.")
}
}
+ private fun SystemConstraints.anySupportsHdrDynamicRange(
+ lensFilter: (LensFacing) -> Boolean
+ ): Boolean = perLensConstraints.asSequence().firstOrNull {
+ lensFilter(it.key) && it.value.supportedDynamicRanges.size > 1
+ } != null
+
+ private fun Map<CaptureMode, Set<ImageOutputFormat>>.anySupportsUltraHdr(
+ captureModeFilter: (CaptureMode) -> Boolean
+ ): Boolean = asSequence().firstOrNull {
+ captureModeFilter(it.key) && it.value.contains(ImageOutputFormat.JPEG_ULTRA_HDR)
+ } != null
+
+ private fun SystemConstraints.anySupportsUltraHdr(
+ captureModeFilter: (CaptureMode) -> Boolean = { true },
+ lensFilter: (LensFacing) -> Boolean
+ ): Boolean = perLensConstraints.asSequence().firstOrNull { lensConstraints ->
+ lensFilter(lensConstraints.key) &&
+ lensConstraints.value.supportedImageFormatsMap.anySupportsUltraHdr {
+ captureModeFilter(it)
+ }
+ } != null
+
fun startCamera() {
Log.d(TAG, "startCamera")
stopCamera()
@@ -677,6 +708,12 @@
}
}
+ fun setConcurrentCameraMode(concurrentCameraMode: ConcurrentCameraMode) {
+ viewModelScope.launch {
+ cameraUseCase.setConcurrentCameraMode(concurrentCameraMode)
+ }
+ }
+
fun setLowLightBoost(lowLightBoost: LowLightBoost) {
viewModelScope.launch {
cameraUseCase.setLowLightBoost(lowLightBoost)
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ScreenFlash.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ScreenFlash.kt
index 7cd1a4f..7bd075a 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ScreenFlash.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ScreenFlash.kt
@@ -45,7 +45,7 @@
init {
scope.launch {
- cameraUseCase.getScreenFlashEvents().collect { event ->
+ for (event in cameraUseCase.getScreenFlashEvents()) {
_screenFlashUiState.emit(
when (event.type) {
CameraUseCase.ScreenFlashEvent.Type.APPLY_UI ->
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsEnums.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsEnums.kt
index db13cd2..2ee1e78 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsEnums.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsEnums.kt
@@ -26,6 +26,7 @@
import androidx.compose.material.icons.filled.HdrOff
import androidx.compose.material.icons.filled.HdrOn
import androidx.compose.material.icons.filled.Nightlight
+import androidx.compose.material.icons.filled.PictureInPicture
import androidx.compose.material.icons.outlined.Nightlight
import androidx.compose.runtime.Composable
import androidx.compose.ui.graphics.painter.Painter
@@ -167,3 +168,20 @@
R.string.quick_settings_lowlightboost_disabled_description
}
}
+
+enum class CameraConcurrentCameraMode : QuickSettingsEnum {
+ OFF {
+ override fun getDrawableResId() = R.drawable.picture_in_picture_off_icon
+ override fun getImageVector() = null
+ override fun getTextResId() = R.string.quick_settings_concurrent_camera_off
+ override fun getDescriptionResId() =
+ R.string.quick_settings_concurrent_camera_off_description
+ },
+ DUAL {
+ override fun getDrawableResId() = null
+ override fun getImageVector() = Icons.Filled.PictureInPicture
+ override fun getTextResId() = R.string.quick_settings_concurrent_camera_dual
+ override fun getDescriptionResId() =
+ R.string.quick_settings_concurrent_camera_dual_description
+ }
+}
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsScreen.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsScreen.kt
index 59e97bc..7dbb474 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsScreen.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/QuickSettingsScreen.kt
@@ -44,27 +44,28 @@
import com.google.jetpackcamera.feature.preview.R
import com.google.jetpackcamera.feature.preview.quicksettings.ui.ExpandedQuickSetRatio
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QUICK_SETTINGS_CAPTURE_MODE_BUTTON
+import com.google.jetpackcamera.feature.preview.quicksettings.ui.QUICK_SETTINGS_CONCURRENT_CAMERA_MODE_BUTTON
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QUICK_SETTINGS_FLASH_BUTTON
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QUICK_SETTINGS_FLIP_CAMERA_BUTTON
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QUICK_SETTINGS_HDR_BUTTON
-import com.google.jetpackcamera.feature.preview.quicksettings.ui.QUICK_SETTINGS_LOW_LIGHT_BOOST_BUTTON
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QUICK_SETTINGS_RATIO_BUTTON
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickFlipCamera
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickSetCaptureMode
+import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickSetConcurrentCamera
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickSetFlash
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickSetHdr
-import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickSetLowLightBoost
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickSetRatio
import com.google.jetpackcamera.feature.preview.quicksettings.ui.QuickSettingsGrid
import com.google.jetpackcamera.settings.model.AspectRatio
import com.google.jetpackcamera.settings.model.CameraAppSettings
+import com.google.jetpackcamera.settings.model.CameraConstraints
import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.ConcurrentCameraMode
import com.google.jetpackcamera.settings.model.DynamicRange
import com.google.jetpackcamera.settings.model.FlashMode
import com.google.jetpackcamera.settings.model.ImageOutputFormat
import com.google.jetpackcamera.settings.model.LensFacing
import com.google.jetpackcamera.settings.model.LowLightBoost
-import com.google.jetpackcamera.settings.model.SystemConstraints
import com.google.jetpackcamera.settings.model.TYPICAL_SYSTEM_CONSTRAINTS
import com.google.jetpackcamera.settings.model.forCurrentLens
@@ -75,7 +76,6 @@
fun QuickSettingsScreenOverlay(
previewUiState: PreviewUiState.Ready,
currentCameraSettings: CameraAppSettings,
- systemConstraints: SystemConstraints,
toggleIsOpen: () -> Unit,
onLensFaceClick: (lensFace: LensFacing) -> Unit,
onFlashModeClick: (flashMode: FlashMode) -> Unit,
@@ -83,6 +83,7 @@
onCaptureModeClick: (captureMode: CaptureMode) -> Unit,
onDynamicRangeClick: (dynamicRange: DynamicRange) -> Unit,
onImageOutputFormatClick: (imageOutputFormat: ImageOutputFormat) -> Unit,
+ onConcurrentCameraModeClick: (concurrentCameraMode: ConcurrentCameraMode) -> Unit,
onLowLightBoostClick: (lowLightBoost: LowLightBoost) -> Unit,
modifier: Modifier = Modifier,
isOpen: Boolean = false
@@ -125,7 +126,6 @@
ExpandedQuickSettingsUi(
previewUiState = previewUiState,
currentCameraSettings = currentCameraSettings,
- systemConstraints = systemConstraints,
shouldShowQuickSetting = shouldShowQuickSetting,
setVisibleQuickSetting = { enum: IsExpandedQuickSetting ->
shouldShowQuickSetting = enum
@@ -136,6 +136,7 @@
onCaptureModeClick = onCaptureModeClick,
onDynamicRangeClick = onDynamicRangeClick,
onImageOutputFormatClick = onImageOutputFormatClick,
+ onConcurrentCameraModeClick = onConcurrentCameraModeClick,
onLowLightBoostClick = onLowLightBoostClick
)
}
@@ -157,7 +158,6 @@
private fun ExpandedQuickSettingsUi(
previewUiState: PreviewUiState.Ready,
currentCameraSettings: CameraAppSettings,
- systemConstraints: SystemConstraints,
onLensFaceClick: (newLensFace: LensFacing) -> Unit,
onFlashModeClick: (flashMode: FlashMode) -> Unit,
onAspectRatioClick: (aspectRation: AspectRatio) -> Unit,
@@ -166,6 +166,7 @@
setVisibleQuickSetting: (IsExpandedQuickSetting) -> Unit,
onDynamicRangeClick: (dynamicRange: DynamicRange) -> Unit,
onImageOutputFormatClick: (imageOutputFormat: ImageOutputFormat) -> Unit,
+ onConcurrentCameraModeClick: (concurrentCameraMode: ConcurrentCameraMode) -> Unit,
onLowLightBoostClick: (lowLightBoost: LowLightBoost) -> Unit
) {
Column(
@@ -216,7 +217,9 @@
QuickSetCaptureMode(
modifier = Modifier.testTag(QUICK_SETTINGS_CAPTURE_MODE_BUTTON),
setCaptureMode = { c: CaptureMode -> onCaptureModeClick(c) },
- currentCaptureMode = currentCameraSettings.captureMode
+ currentCaptureMode = currentCameraSettings.captureMode,
+ enabled = currentCameraSettings.concurrentCameraMode ==
+ ConcurrentCameraMode.OFF
)
}
@@ -224,6 +227,24 @@
currentCameraSettings
)
add {
+ fun CameraConstraints.hdrDynamicRangeSupported(): Boolean =
+ this.supportedDynamicRanges.size > 1
+
+ fun CameraConstraints.hdrImageFormatSupported(): Boolean =
+ supportedImageFormatsMap[currentCameraSettings.captureMode]
+ ?.let { it.size > 1 } ?: false
+
+ // TODO(tm): Move this to PreviewUiState
+ fun shouldEnable(): Boolean = when {
+ currentCameraSettings.concurrentCameraMode !=
+ ConcurrentCameraMode.OFF -> false
+ else -> (
+ cameraConstraints?.hdrDynamicRangeSupported() == true &&
+ previewUiState.previewMode is PreviewMode.StandardMode
+ ) ||
+ cameraConstraints?.hdrImageFormatSupported() == true
+ }
+
QuickSetHdr(
modifier = Modifier.testTag(QUICK_SETTINGS_HDR_BUTTON),
onClick = { d: DynamicRange, i: ImageOutputFormat ->
@@ -234,24 +255,26 @@
selectedImageOutputFormat = currentCameraSettings.imageFormat,
hdrDynamicRange = currentCameraSettings.defaultHdrDynamicRange,
hdrImageFormat = currentCameraSettings.defaultHdrImageOutputFormat,
- hdrDynamicRangeSupported = cameraConstraints?.let
- { it.supportedDynamicRanges.size > 1 } ?: false,
- hdrImageFormatSupported =
- cameraConstraints?.supportedImageFormatsMap?.get(
- currentCameraSettings.captureMode
- )?.let { it.size > 1 } ?: false,
- previewMode = previewUiState.previewMode
+ hdrDynamicRangeSupported =
+ cameraConstraints?.hdrDynamicRangeSupported() ?: false,
+ previewMode = previewUiState.previewMode,
+ enabled = shouldEnable()
)
}
add {
- QuickSetLowLightBoost(
- modifier = Modifier.testTag(QUICK_SETTINGS_LOW_LIGHT_BOOST_BUTTON),
- onClick = {
- l: LowLightBoost ->
- onLowLightBoostClick(l)
+ QuickSetConcurrentCamera(
+ modifier =
+ Modifier.testTag(QUICK_SETTINGS_CONCURRENT_CAMERA_MODE_BUTTON),
+ setConcurrentCameraMode = { c: ConcurrentCameraMode ->
+ onConcurrentCameraModeClick(c)
},
- selectedLowLightBoost = currentCameraSettings.lowLightBoost
+ currentConcurrentCameraMode =
+ currentCameraSettings.concurrentCameraMode,
+ enabled =
+ previewUiState.systemConstraints.concurrentCamerasSupported &&
+ previewUiState.previewMode
+ !is PreviewMode.ExternalImageCaptureMode
)
}
}
@@ -280,7 +303,6 @@
captureModeToggleUiState = CaptureModeToggleUiState.Invisible
),
currentCameraSettings = CameraAppSettings(),
- systemConstraints = TYPICAL_SYSTEM_CONSTRAINTS,
onLensFaceClick = { },
onFlashModeClick = { },
shouldShowQuickSetting = IsExpandedQuickSetting.NONE,
@@ -289,6 +311,7 @@
onCaptureModeClick = { },
onDynamicRangeClick = { },
onImageOutputFormatClick = { },
+ onConcurrentCameraModeClick = { },
onLowLightBoostClick = { }
)
}
@@ -306,7 +329,6 @@
captureModeToggleUiState = CaptureModeToggleUiState.Invisible
),
currentCameraSettings = CameraAppSettings(dynamicRange = DynamicRange.HLG10),
- systemConstraints = TYPICAL_SYSTEM_CONSTRAINTS_WITH_HDR,
onLensFaceClick = { },
onFlashModeClick = { },
shouldShowQuickSetting = IsExpandedQuickSetting.NONE,
@@ -315,6 +337,7 @@
onCaptureModeClick = { },
onDynamicRangeClick = { },
onImageOutputFormatClick = { },
+ onConcurrentCameraModeClick = { },
onLowLightBoostClick = { }
)
}
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/QuickSettingsComponents.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/QuickSettingsComponents.kt
index 0e6b85f..66e2bc7 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/QuickSettingsComponents.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/QuickSettingsComponents.kt
@@ -49,6 +49,7 @@
import com.google.jetpackcamera.feature.preview.R
import com.google.jetpackcamera.feature.preview.quicksettings.CameraAspectRatio
import com.google.jetpackcamera.feature.preview.quicksettings.CameraCaptureMode
+import com.google.jetpackcamera.feature.preview.quicksettings.CameraConcurrentCameraMode
import com.google.jetpackcamera.feature.preview.quicksettings.CameraDynamicRange
import com.google.jetpackcamera.feature.preview.quicksettings.CameraFlashMode
import com.google.jetpackcamera.feature.preview.quicksettings.CameraLensFace
@@ -56,6 +57,7 @@
import com.google.jetpackcamera.feature.preview.quicksettings.QuickSettingsEnum
import com.google.jetpackcamera.settings.model.AspectRatio
import com.google.jetpackcamera.settings.model.CaptureMode
+import com.google.jetpackcamera.settings.model.ConcurrentCameraMode
import com.google.jetpackcamera.settings.model.DynamicRange
import com.google.jetpackcamera.settings.model.FlashMode
import com.google.jetpackcamera.settings.model.ImageOutputFormat
@@ -113,8 +115,8 @@
hdrDynamicRange: DynamicRange,
hdrImageFormat: ImageOutputFormat,
hdrDynamicRangeSupported: Boolean,
- hdrImageFormatSupported: Boolean,
- previewMode: PreviewMode
+ previewMode: PreviewMode,
+ enabled: Boolean
) {
val enum =
if (selectedDynamicRange == hdrDynamicRange ||
@@ -146,8 +148,7 @@
onClick(newDynamicRange, newImageOutputFormat)
},
isHighLighted = (selectedDynamicRange != DynamicRange.SDR),
- enabled = (hdrDynamicRangeSupported && previewMode is PreviewMode.StandardMode) ||
- hdrImageFormatSupported
+ enabled = enabled
)
}
@@ -250,7 +251,8 @@
fun QuickSetCaptureMode(
setCaptureMode: (CaptureMode) -> Unit,
currentCaptureMode: CaptureMode,
- modifier: Modifier = Modifier
+ modifier: Modifier = Modifier,
+ enabled: Boolean = true
) {
val enum: CameraCaptureMode =
when (currentCaptureMode) {
@@ -265,7 +267,33 @@
CaptureMode.MULTI_STREAM -> setCaptureMode(CaptureMode.SINGLE_STREAM)
CaptureMode.SINGLE_STREAM -> setCaptureMode(CaptureMode.MULTI_STREAM)
}
+ },
+ enabled = enabled
+ )
+}
+
+@Composable
+fun QuickSetConcurrentCamera(
+ setConcurrentCameraMode: (ConcurrentCameraMode) -> Unit,
+ currentConcurrentCameraMode: ConcurrentCameraMode,
+ modifier: Modifier = Modifier,
+ enabled: Boolean = true
+) {
+ val enum: CameraConcurrentCameraMode =
+ when (currentConcurrentCameraMode) {
+ ConcurrentCameraMode.OFF -> CameraConcurrentCameraMode.OFF
+ ConcurrentCameraMode.DUAL -> CameraConcurrentCameraMode.DUAL
}
+ QuickSettingUiItem(
+ modifier = modifier,
+ enum = enum,
+ onClick = {
+ when (currentConcurrentCameraMode) {
+ ConcurrentCameraMode.OFF -> setConcurrentCameraMode(ConcurrentCameraMode.DUAL)
+ ConcurrentCameraMode.DUAL -> setConcurrentCameraMode(ConcurrentCameraMode.OFF)
+ }
+ },
+ enabled = enabled
)
}
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/TestTags.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/TestTags.kt
index 7334407..5a226e6 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/TestTags.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/quicksettings/ui/TestTags.kt
@@ -16,6 +16,7 @@
package com.google.jetpackcamera.feature.preview.quicksettings.ui
const val QUICK_SETTINGS_CAPTURE_MODE_BUTTON = "QuickSettingsCaptureModeButton"
+const val QUICK_SETTINGS_CONCURRENT_CAMERA_MODE_BUTTON = "QuickSettingsConcurrentCameraModeButton"
const val QUICK_SETTINGS_DROP_DOWN = "QuickSettingsDropDown"
const val QUICK_SETTINGS_HDR_BUTTON = "QuickSettingsHdrButton"
const val QUICK_SETTINGS_FLASH_BUTTON = "QuickSettingsFlashButton"
diff --git a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ui/TestTags.kt b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ui/TestTags.kt
index 9fc6800..077a971 100644
--- a/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ui/TestTags.kt
+++ b/feature/preview/src/main/java/com/google/jetpackcamera/feature/preview/ui/TestTags.kt
@@ -19,10 +19,12 @@
const val FLIP_CAMERA_BUTTON = "FlipCameraButton"
const val IMAGE_CAPTURE_SUCCESS_TAG = "ImageCaptureSuccessTag"
const val IMAGE_CAPTURE_FAILURE_TAG = "ImageCaptureFailureTag"
+const val IMAGE_CAPTURE_EXTERNAL_UNSUPPORTED_TAG = "ImageCaptureExternalUnsupportedTag"
+const val IMAGE_CAPTURE_UNSUPPORTED_CONCURRENT_CAMERA_TAG =
+ "ImageCaptureUnsupportedConcurrentCameraTag"
+const val VIDEO_CAPTURE_EXTERNAL_UNSUPPORTED_TAG = "VideoCaptureExternalUnsupportedTag"
const val VIDEO_CAPTURE_SUCCESS_TAG = "VideoCaptureSuccessTag"
const val VIDEO_CAPTURE_FAILURE_TAG = "VideoCaptureFailureTag"
-const val VIDEO_CAPTURE_EXTERNAL_UNSUPPORTED_TAG = "VideoCaptureExternalUnsupportedTag"
-const val IMAGE_CAPTURE_EXTERNAL_UNSUPPORTED_TAG = "ImageCaptureExternalUnsupportedTag"
const val PREVIEW_DISPLAY = "PreviewDisplay"
const val SCREEN_FLASH_OVERLAY = "ScreenFlashOverlay"
const val SETTINGS_BUTTON = "SettingsButton"
diff --git a/feature/preview/src/main/res/drawable/picture_in_picture_off_icon.xml b/feature/preview/src/main/res/drawable/picture_in_picture_off_icon.xml
new file mode 100644
index 0000000..3c394b1
--- /dev/null
+++ b/feature/preview/src/main/res/drawable/picture_in_picture_off_icon.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ ~ Copyright (C) 2024 The Android Open Source Project
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+<vector android:height="72dp" android:tint="#000000"
+ android:viewportHeight="960" android:viewportWidth="960"
+ android:width="72dp" xmlns:android="http://schemas.android.com/apk/res/android">
+ <path android:fillColor="@android:color/white" android:pathData="M700,520Q725,520 742.5,502.5Q760,485 760,460L760,340Q760,315 742.5,297.5Q725,280 700,280L480,280Q463,280 451.5,291.5Q440,303 440,320Q440,337 451.5,348.5Q463,360 480,360L680,360L680,360L680,440L640,440Q623,440 611.5,451.5Q600,463 600,480Q600,497 612,508.5Q624,520 641,520L700,520ZM840,720Q825,720 812.5,709.5Q800,699 800,679L800,240Q800,240 800,240Q800,240 800,240L361,240Q341,240 331,227.5Q321,215 321,200Q321,185 331,172.5Q341,160 361,160L800,160Q833,160 856.5,183.5Q880,207 880,240L880,680Q880,700 867.5,710Q855,720 840,720ZM577,463L577,463L577,463L577,463Q577,463 577,463Q577,463 577,463ZM383,497L383,497Q383,497 383,497Q383,497 383,497L383,497Q383,497 383,497Q383,497 383,497L383,497ZM790,903L686,800L160,800Q127,800 103.5,776.5Q80,753 80,720L80,240Q80,207 103.5,183.5Q127,160 160,160L160,160L240,240L160,240Q160,240 160,240Q160,240 160,240L160,720Q160,720 160,720Q160,720 160,720L606,720L54,168Q42,156 42,139.5Q42,123 54,111Q66,99 82.5,99Q99,99 111,111L847,847Q859,859 859,875Q859,891 847,903Q835,915 818.5,915Q802,915 790,903Z"/>
+</vector>
diff --git a/feature/preview/src/main/res/values/strings.xml b/feature/preview/src/main/res/values/strings.xml
index 4d14f01..77d80e0 100644
--- a/feature/preview/src/main/res/values/strings.xml
+++ b/feature/preview/src/main/res/values/strings.xml
@@ -32,6 +32,7 @@
<string name="toast_video_capture_failure">Video Capture Failure</string>
<string name="toast_video_capture_external_unsupported">Video not supported while app is in image-only capture mode</string>
<string name="toast_image_capture_external_unsupported">Image capture not supported while app is in video-only capture mode</string>
+ <string name="toast_image_capture_unsupported_concurrent_camera">Image capture not supported in dual camera mode</string>
<string name="stabilization_icon_description_preview_and_video">Preview is Stabilized</string>
<string name="stabilization_icon_description_video_only">Only Video is Stabilized</string>
<string name="toast_hdr_photo_unsupported_on_device">Ultra HDR photos not supported on this device</string>
@@ -79,4 +80,9 @@
<string name="quick_settings_lowlightboost_disabled">Low light boost off</string>
<string name="quick_settings_lowlightboost_enabled_description">Low light boost on</string>
<string name="quick_settings_lowlightboost_disabled_description">Low light boost off</string>
+
+ <string name="quick_settings_concurrent_camera_off">SINGLE</string>
+ <string name="quick_settings_concurrent_camera_dual">DUAL</string>
+ <string name="quick_settings_concurrent_camera_off_description">Concurrent cameras off</string>
+ <string name="quick_settings_concurrent_camera_dual_description">Concurrent dual camera on</string>
</resources>
diff --git a/feature/preview/src/test/java/com/google/jetpackcamera/feature/preview/ScreenFlashTest.kt b/feature/preview/src/test/java/com/google/jetpackcamera/feature/preview/ScreenFlashTest.kt
index 8957fb6..536e90e 100644
--- a/feature/preview/src/test/java/com/google/jetpackcamera/feature/preview/ScreenFlashTest.kt
+++ b/feature/preview/src/test/java/com/google/jetpackcamera/feature/preview/ScreenFlashTest.kt
@@ -44,7 +44,7 @@
@get:Rule
val mainDispatcherRule = MainDispatcherRule(testDispatcher)
- private val cameraUseCase = FakeCameraUseCase(testScope)
+ private val cameraUseCase = FakeCameraUseCase()
private lateinit var screenFlash: ScreenFlash
@Before
diff --git a/settings.gradle.kts b/settings.gradle.kts
index e011fe0..7c7842f 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -26,7 +26,7 @@
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
maven {
- setUrl("https://androidx.dev/snapshots/builds/12143524/artifacts/repository")
+ setUrl("https://androidx.dev/snapshots/builds/12167802/artifacts/repository")
}
google()
mavenCentral()