From c8635473e26ae8d859b9073537a5b68302ef122c Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Thu, 16 Apr 2026 10:17:17 -0400 Subject: [PATCH 01/17] [AI] Add configurable model generation for AI On-Device Introduced `GenerationConfig`, `ModelConfig`, `ModelReleaseStage`, and `ModelPreference` to `firebase-ai-ondevice-interop` to allow for configurable model selection. Updated the internal `genaiPrompt` dependency to `1.0.0-beta2` to support the new configuration options. Deprecated the parameter-less `FirebaseAIOnDeviceGenerativeModelFactory.newGenerativeModel()` method in favor of a new overload that accepts a `GenerationConfig`. --- ai-logic/firebase-ai-ondevice-interop/api.txt | 23 ++++++ ...irebaseAIOnDeviceGenerativeModelFactory.kt | 14 ++++ .../ai/ondevice/interop/GenerationConfig.kt | 73 +++++++++++++++++++ .../firebase-ai-ondevice.gradle.kts | 2 +- .../google/firebase/ai/ondevice/Converters.kt | 34 +++++++++ .../ondevice/FirebaseAIOnDeviceComponent.kt | 15 +++- .../ai/ondevice/GenerativeModelImpl.kt | 3 +- gradle/libs.versions.toml | 2 +- 8 files changed, 161 insertions(+), 5 deletions(-) create mode 100644 ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt diff --git a/ai-logic/firebase-ai-ondevice-interop/api.txt b/ai-logic/firebase-ai-ondevice-interop/api.txt index c27deeb6795..e4d830ef4bc 100644 --- a/ai-logic/firebase-ai-ondevice-interop/api.txt +++ b/ai-logic/firebase-ai-ondevice-interop/api.txt @@ -37,6 +37,29 @@ package com.google.firebase.ai.ondevice.interop { public interface FirebaseAIOnDeviceGenerativeModelFactory { method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(); + method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(com.google.firebase.ai.ondevice.interop.ModelConfig modelConfig); + } + + public final class ModelConfig { + ctor public ModelConfig(com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage = com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE, com.google.firebase.ai.ondevice.interop.ModelPreference preference = com.google.firebase.ai.ondevice.interop.ModelPreference.FULL); + method public com.google.firebase.ai.ondevice.interop.ModelPreference getPreference(); + method public com.google.firebase.ai.ondevice.interop.ModelReleaseStage getReleaseStage(); + property public final com.google.firebase.ai.ondevice.interop.ModelPreference preference; + property public final com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage; + } + + public enum ModelPreference { + method public static com.google.firebase.ai.ondevice.interop.ModelPreference valueOf(String name) throws java.lang.IllegalArgumentException; + method public static com.google.firebase.ai.ondevice.interop.ModelPreference[] values(); + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FAST; + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FULL; + } + + public enum ModelReleaseStage { + method public static com.google.firebase.ai.ondevice.interop.ModelReleaseStage valueOf(String name) throws java.lang.IllegalArgumentException; + method public static com.google.firebase.ai.ondevice.interop.ModelReleaseStage[] values(); + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage PREVIEW; + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage STABLE; } public final class FirebaseAIOnDeviceInvalidRequestException extends com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceException { diff --git a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt index aa2361371d7..024d2a437c7 100644 --- a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt +++ b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt @@ -28,5 +28,19 @@ public interface FirebaseAIOnDeviceGenerativeModelFactory { * * @return A new [GenerativeModel] instance ready for use. */ + @Deprecated( + message = "Use newGenerativeModel(GenerationConfig) instead", + replaceWith = ReplaceWith("newGenerativeModel(GenerationConfig)") + ) public fun newGenerativeModel(): GenerativeModel + + /** + * Creates and returns a new instance of [GenerativeModel] optionally configured with + * [ModelConfig]. + * + * @param generationConfig The configuration for the model, `null` if the default configuration + * should be used. + * @return A new [GenerativeModel] instance ready for use. + */ + public fun newGenerativeModel(generationConfig: GenerationConfig?): GenerativeModel } diff --git a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt new file mode 100644 index 00000000000..ed30ef2a977 --- /dev/null +++ b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt @@ -0,0 +1,73 @@ +/* + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.ai.ondevice.interop + +public class GenerationConfig(public val modelConfig: ModelConfig? = null) { + override fun toString(): String = "GenerationConfig(modelConfig=$modelConfig)" + + override fun equals(other: Any?): Boolean = + other is GenerationConfig && modelConfig == other.modelConfig + + override fun hashCode(): Int = modelConfig?.hashCode() ?: 0 +} + +/** + * Configuration parameters for model selection. + * + * @property releaseStage The release stage of the model to use. + * @property preference The performance preference for the model. + */ +public class ModelConfig( + public val releaseStage: ModelReleaseStage = ModelReleaseStage.STABLE, + public val preference: ModelPreference = ModelPreference.FULL, +) { + override fun equals(other: Any?): Boolean = + other is ModelConfig && releaseStage == other.releaseStage && preference == other.preference + + override fun hashCode(): Int { + var result = releaseStage.hashCode() + result = 31 * result + preference.hashCode() + return result + } + + override fun toString(): String { + return "ModelConfig(releaseStage=$releaseStage, preference=$preference)" + } +} +/** Defines the release stage of the model. */ +public enum class ModelReleaseStage { + /** + * Selects the latest model version that is fully tested and on consumer devices. This is the + * default setting. + */ + STABLE, + + /** + * Selects the latest model version in the preview stage. This stage lets you test beta features + * or newer model architectures before they are widely deployed. + */ + PREVIEW, +} + +/** Defines the performance preference for the model. */ +public enum class ModelPreference { + /** Recommended when model accuracy and full capabilities are prioritized over speed. */ + FULL, + + /** Recommended for latency-sensitive apps that require minimal response times. */ + FAST, +} diff --git a/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts b/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts index ae21bd4f955..9cb9a75df6d 100644 --- a/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts +++ b/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts @@ -68,7 +68,7 @@ kotlin { dependencies { implementation(libs.genai.prompt) - implementation("com.google.firebase:firebase-ai-ondevice-interop:16.0.0-beta01") + implementation(project(":ai-logic:firebase-ai-ondevice-interop")) implementation(libs.firebase.common) implementation(libs.firebase.components) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt index ef615423857..e757189bd97 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt @@ -22,9 +22,15 @@ import com.google.firebase.ai.ondevice.interop.CountTokensResponse import com.google.firebase.ai.ondevice.interop.FinishReason import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceInvalidRequestException import com.google.firebase.ai.ondevice.interop.GenerateContentResponse +import com.google.firebase.ai.ondevice.interop.GenerationConfig +import com.google.firebase.ai.ondevice.interop.ModelConfig import com.google.mlkit.genai.prompt.GenerateContentRequest import com.google.mlkit.genai.prompt.ImagePart +import com.google.mlkit.genai.prompt.ModelPreference +import com.google.mlkit.genai.prompt.ModelReleaseStage import com.google.mlkit.genai.prompt.TextPart +import com.google.mlkit.genai.prompt.generationConfig +import com.google.mlkit.genai.prompt.modelConfig import kotlin.math.min // ==================================== @@ -75,6 +81,34 @@ internal fun com.google.firebase.ai.ondevice.interop.GenerateContentRequest.toMl internal fun com.google.mlkit.genai.prompt.GenerateContentResponse.toInterop(): GenerateContentResponse = GenerateContentResponse(candidates.map { it.toInterop() }) +// ================================================ +// `GenerationConfig` converter extension functions +// ================================================ +internal fun GenerationConfig.toMlKit(): com.google.mlkit.genai.prompt.GenerationConfig = + generationConfig { + modelConfig = this@toMlKit.modelConfig.toMlKit() + } + +// =========================================== +// `ModelConfig` converter extension functions +// =========================================== +internal fun ModelConfig.toMlKit(): com.google.mlkit.genai.prompt.ModelConfig = modelConfig { + releaseStage = this@toMlKit.releaseStage.toMlKit() + preference = this@toMlKit.preference.toMlKit() +} + +private fun com.google.firebase.ai.ondevice.interop.ModelReleaseStage.toMlKit(): Int = + when (this) { + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW -> ModelReleaseStage.PREVIEW + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE -> ModelReleaseStage.STABLE + } + +private fun com.google.firebase.ai.ondevice.interop.ModelPreference.toMlKit(): Int = + when (this) { + com.google.firebase.ai.ondevice.interop.ModelPreference.FULL -> ModelPreference.FULL + com.google.firebase.ai.ondevice.interop.ModelPreference.FAST -> ModelPreference.FAST + } + private fun generateContentRequest( text: com.google.firebase.ai.ondevice.interop.TextPart, image: com.google.firebase.ai.ondevice.interop.ImagePart? = null, diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt index ab44fc1dc7c..810d1e7fa5b 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt @@ -17,7 +17,9 @@ package com.google.firebase.ai.ondevice import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceGenerativeModelFactory +import com.google.firebase.ai.ondevice.interop.GenerationConfig import com.google.firebase.ai.ondevice.interop.GenerativeModel +import com.google.mlkit.genai.prompt.Generation /** * Factory class for Firebase AI OnDevice. @@ -26,5 +28,16 @@ import com.google.firebase.ai.ondevice.interop.GenerativeModel */ internal class FirebaseAIOnDeviceComponent : FirebaseAIOnDeviceGenerativeModelFactory { - override fun newGenerativeModel(): GenerativeModel = GenerativeModelImpl() + @Deprecated( + "Use newGenerativeModel(ModelConfig) instead", + replaceWith = ReplaceWith("newGenerativeModel(ModelConfig)") + ) + override fun newGenerativeModel(): GenerativeModel = newGenerativeModel(null) + + override fun newGenerativeModel(generationConfig: GenerationConfig?): GenerativeModel = + if (generationConfig == null) { + GenerativeModelImpl(Generation.getClient()) + } else { + GenerativeModelImpl(Generation.getClient(generationConfig.toMlKit())) + } } diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt index c2254b283a4..77a5354d667 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt @@ -27,14 +27,13 @@ import com.google.firebase.ai.ondevice.interop.GenerativeModel import com.google.mlkit.genai.common.FeatureStatus import com.google.mlkit.genai.common.GenAiException import com.google.mlkit.genai.common.GenAiException.ErrorCode -import com.google.mlkit.genai.prompt.Generation import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.catch import kotlinx.coroutines.flow.map /** Implementation of [GenerativeModel] backed by MLKit's genai prompt SDK. */ internal class GenerativeModelImpl( - internal val mlkitModel: com.google.mlkit.genai.prompt.GenerativeModel = Generation.getClient() + internal val mlkitModel: com.google.mlkit.genai.prompt.GenerativeModel ) : GenerativeModel { /** diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index bf8ffbcf871..74a86cfa29c 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -28,7 +28,7 @@ firebaseAnnotations = "17.0.0" firebaseCommon = "22.0.1" firebaseComponents = "19.0.0" firebaseCrashlyticsGradle = "3.0.4" -genaiPrompt = "1.0.0-alpha1" +genaiPrompt = "1.0.0-beta2" glide = "5.0.5" googleApiClient = "2.8.1" googleServices = "4.3.15" From b507cc1a8b1b97182e687e3da99f7e27d1eb697d Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Thu, 16 Apr 2026 12:21:00 -0400 Subject: [PATCH 02/17] Fix compile issue --- .../main/kotlin/com/google/firebase/ai/ondevice/Converters.kt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt index e757189bd97..595959eb60d 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt @@ -86,7 +86,9 @@ internal fun com.google.mlkit.genai.prompt.GenerateContentResponse.toInterop(): // ================================================ internal fun GenerationConfig.toMlKit(): com.google.mlkit.genai.prompt.GenerationConfig = generationConfig { - modelConfig = this@toMlKit.modelConfig.toMlKit() + this@toMlKit.modelConfig?.let { + modelConfig = it.toMlKit() + } } // =========================================== From 61c8e209d2048b1d53f8302f942847f20210a681 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Thu, 16 Apr 2026 12:25:48 -0400 Subject: [PATCH 03/17] Update api.txt --- ai-logic/firebase-ai-ondevice-interop/api.txt | 50 ++++++++++--------- 1 file changed, 26 insertions(+), 24 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice-interop/api.txt b/ai-logic/firebase-ai-ondevice-interop/api.txt index e4d830ef4bc..b160b11527c 100644 --- a/ai-logic/firebase-ai-ondevice-interop/api.txt +++ b/ai-logic/firebase-ai-ondevice-interop/api.txt @@ -36,30 +36,8 @@ package com.google.firebase.ai.ondevice.interop { } public interface FirebaseAIOnDeviceGenerativeModelFactory { - method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(); - method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(com.google.firebase.ai.ondevice.interop.ModelConfig modelConfig); - } - - public final class ModelConfig { - ctor public ModelConfig(com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage = com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE, com.google.firebase.ai.ondevice.interop.ModelPreference preference = com.google.firebase.ai.ondevice.interop.ModelPreference.FULL); - method public com.google.firebase.ai.ondevice.interop.ModelPreference getPreference(); - method public com.google.firebase.ai.ondevice.interop.ModelReleaseStage getReleaseStage(); - property public final com.google.firebase.ai.ondevice.interop.ModelPreference preference; - property public final com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage; - } - - public enum ModelPreference { - method public static com.google.firebase.ai.ondevice.interop.ModelPreference valueOf(String name) throws java.lang.IllegalArgumentException; - method public static com.google.firebase.ai.ondevice.interop.ModelPreference[] values(); - enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FAST; - enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FULL; - } - - public enum ModelReleaseStage { - method public static com.google.firebase.ai.ondevice.interop.ModelReleaseStage valueOf(String name) throws java.lang.IllegalArgumentException; - method public static com.google.firebase.ai.ondevice.interop.ModelReleaseStage[] values(); - enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage PREVIEW; - enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage STABLE; + method @Deprecated public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(); + method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(com.google.firebase.ai.ondevice.interop.GenerationConfig? generationConfig); } public final class FirebaseAIOnDeviceInvalidRequestException extends com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceException { @@ -98,6 +76,12 @@ package com.google.firebase.ai.ondevice.interop { property public final java.util.List candidates; } + public final class GenerationConfig { + ctor public GenerationConfig(com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig = null); + method public com.google.firebase.ai.ondevice.interop.ModelConfig? getModelConfig(); + property public final com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig; + } + public interface GenerativeModel { method public suspend Object? countTokens(com.google.firebase.ai.ondevice.interop.GenerateContentRequest request, kotlin.coroutines.Continuation); method public suspend Object? generateContent(com.google.firebase.ai.ondevice.interop.GenerateContentRequest request, kotlin.coroutines.Continuation); @@ -114,6 +98,24 @@ package com.google.firebase.ai.ondevice.interop { property public final android.graphics.Bitmap bitmap; } + public final class ModelConfig { + ctor public ModelConfig(com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage = com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE, com.google.firebase.ai.ondevice.interop.ModelPreference preference = com.google.firebase.ai.ondevice.interop.ModelPreference.FULL); + method public com.google.firebase.ai.ondevice.interop.ModelPreference getPreference(); + method public com.google.firebase.ai.ondevice.interop.ModelReleaseStage getReleaseStage(); + property public final com.google.firebase.ai.ondevice.interop.ModelPreference preference; + property public final com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage; + } + + public enum ModelPreference { + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FAST; + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FULL; + } + + public enum ModelReleaseStage { + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage PREVIEW; + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage STABLE; + } + public interface Part { } From 388058bf260f2e3ca0d715453fbf28e7443e0b74 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Thu, 16 Apr 2026 12:27:57 -0400 Subject: [PATCH 04/17] Update deprecation message --- .../firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt index 810d1e7fa5b..3275391ec10 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt @@ -29,8 +29,8 @@ import com.google.mlkit.genai.prompt.Generation internal class FirebaseAIOnDeviceComponent : FirebaseAIOnDeviceGenerativeModelFactory { @Deprecated( - "Use newGenerativeModel(ModelConfig) instead", - replaceWith = ReplaceWith("newGenerativeModel(ModelConfig)") + "Use newGenerativeModel(GenerationConfig) instead", + replaceWith = ReplaceWith("newGenerativeModel(GenerationConfig)") ) override fun newGenerativeModel(): GenerativeModel = newGenerativeModel(null) From 23a8abba21b9671ac09f61f6dba880145d5f7aa8 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Date: Thu, 16 Apr 2026 23:51:31 -0400 Subject: [PATCH 05/17] Update ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../interop/FirebaseAIOnDeviceGenerativeModelFactory.kt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt index 024d2a437c7..670203af712 100644 --- a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt +++ b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt @@ -29,8 +29,8 @@ public interface FirebaseAIOnDeviceGenerativeModelFactory { * @return A new [GenerativeModel] instance ready for use. */ @Deprecated( - message = "Use newGenerativeModel(GenerationConfig) instead", - replaceWith = ReplaceWith("newGenerativeModel(GenerationConfig)") + message = "Use newGenerativeModel(GenerationConfig?) instead", + replaceWith = ReplaceWith("newGenerativeModel(null)") ) public fun newGenerativeModel(): GenerativeModel From ac6bb98041028673704b4038188dd4baeca6162f Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Date: Thu, 16 Apr 2026 23:51:48 -0400 Subject: [PATCH 06/17] Update ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt index 3275391ec10..9a98e5cab35 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt @@ -29,8 +29,8 @@ import com.google.mlkit.genai.prompt.Generation internal class FirebaseAIOnDeviceComponent : FirebaseAIOnDeviceGenerativeModelFactory { @Deprecated( - "Use newGenerativeModel(GenerationConfig) instead", - replaceWith = ReplaceWith("newGenerativeModel(GenerationConfig)") + "Use newGenerativeModel(GenerationConfig?) instead", + replaceWith = ReplaceWith("newGenerativeModel(null)") ) override fun newGenerativeModel(): GenerativeModel = newGenerativeModel(null) From d906a02f70be035556d4bd1f04652149e2897059 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Date: Thu, 16 Apr 2026 23:52:10 -0400 Subject: [PATCH 07/17] Update ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../interop/FirebaseAIOnDeviceGenerativeModelFactory.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt index 670203af712..8cc45e80e6f 100644 --- a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt +++ b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt @@ -36,7 +36,7 @@ public interface FirebaseAIOnDeviceGenerativeModelFactory { /** * Creates and returns a new instance of [GenerativeModel] optionally configured with - * [ModelConfig]. + * [GenerationConfig]. * * @param generationConfig The configuration for the model, `null` if the default configuration * should be used. From d415b857f040dff3619910b079586577dc55a11c Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Fri, 17 Apr 2026 00:08:53 -0400 Subject: [PATCH 08/17] Add changelog entries --- ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md | 3 ++- ai-logic/firebase-ai-ondevice/CHANGELOG.md | 2 ++ .../main/kotlin/com/google/firebase/ai/ondevice/Converters.kt | 4 +--- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md b/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md index 7f045d58617..4134d65099b 100644 --- a/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md +++ b/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md @@ -1,6 +1,7 @@ # Unreleased +- [feature] Added support for model selection, required for nano-v4. (#8043) + # 16.0.0-beta01 - [feature] Initial release. - diff --git a/ai-logic/firebase-ai-ondevice/CHANGELOG.md b/ai-logic/firebase-ai-ondevice/CHANGELOG.md index 7f045d58617..0a19d412041 100644 --- a/ai-logic/firebase-ai-ondevice/CHANGELOG.md +++ b/ai-logic/firebase-ai-ondevice/CHANGELOG.md @@ -1,5 +1,7 @@ # Unreleased +- [feature] Added support for model selection, required for nano-v4. (#8043) + # 16.0.0-beta01 - [feature] Initial release. diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt index 595959eb60d..85e1ae61794 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt @@ -86,9 +86,7 @@ internal fun com.google.mlkit.genai.prompt.GenerateContentResponse.toInterop(): // ================================================ internal fun GenerationConfig.toMlKit(): com.google.mlkit.genai.prompt.GenerationConfig = generationConfig { - this@toMlKit.modelConfig?.let { - modelConfig = it.toMlKit() - } + this@toMlKit.modelConfig?.let { modelConfig = it.toMlKit() } } // =========================================== From f2774539b64da6d2af24b72d54c1c5bb0ae70c91 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Tue, 28 Apr 2026 15:52:02 -0400 Subject: [PATCH 09/17] Update Converter --- .../main/kotlin/com/google/firebase/ai/ondevice/Converters.kt | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt index 8f37e02e776..f867848024a 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt @@ -16,7 +16,6 @@ package com.google.firebase.ai.ondevice -import android.graphics.Bitmap import com.google.firebase.ai.ondevice.interop.Candidate import com.google.firebase.ai.ondevice.interop.CountTokensResponse import com.google.firebase.ai.ondevice.interop.FinishReason @@ -31,7 +30,6 @@ import com.google.mlkit.genai.prompt.ModelReleaseStage import com.google.mlkit.genai.prompt.TextPart import com.google.mlkit.genai.prompt.generationConfig import com.google.mlkit.genai.prompt.modelConfig -import kotlin.math.min // ==================================== // `Part` converter extension functions @@ -39,7 +37,7 @@ import kotlin.math.min internal fun com.google.firebase.ai.ondevice.interop.TextPart.toMlKit(): TextPart = TextPart(text) internal fun com.google.firebase.ai.ondevice.interop.ImagePart.toMlKit(): ImagePart = - ImagePart(downsizeBitmapIfNeeded(bitmap)) + ImagePart(bitmap) // ============================================ // `CountTokens*` converter extension functions From e30842f10d8fd8119256ab478f9d7772942ef0e4 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Tue, 28 Apr 2026 23:32:12 -0400 Subject: [PATCH 10/17] fix changelog --- ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md b/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md index 4134d65099b..0a19d412041 100644 --- a/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md +++ b/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md @@ -5,3 +5,4 @@ # 16.0.0-beta01 - [feature] Initial release. + From e8b8a25106b0dd8c2ff3ea6b9979bb18d2f1bbef Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Tue, 28 Apr 2026 23:32:22 -0400 Subject: [PATCH 11/17] update dependency --- ai-logic/firebase-ai/firebase-ai.gradle.kts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ai-logic/firebase-ai/firebase-ai.gradle.kts b/ai-logic/firebase-ai/firebase-ai.gradle.kts index cd0d36b2155..fd10c7b7632 100644 --- a/ai-logic/firebase-ai/firebase-ai.gradle.kts +++ b/ai-logic/firebase-ai/firebase-ai.gradle.kts @@ -97,7 +97,7 @@ dependencies { implementation("androidx.concurrent:concurrent-futures:1.2.0") implementation("androidx.concurrent:concurrent-futures-ktx:1.2.0") implementation("com.google.firebase:firebase-auth-interop:18.0.0") - implementation("com.google.firebase:firebase-ai-ondevice-interop:16.0.0-beta01") + implementation(project(":ai-logic:firebase-ai-ondevice-interop")) // Use different logging libraries depending on the variant releaseImplementation(libs.slf4j.nop) From 0ed1c4bf0e59f153de12431a2f50bb02e80a73bb Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Wed, 29 Apr 2026 00:04:10 -0400 Subject: [PATCH 12/17] Use simple interface to select model --- .../ai/ondevice/FirebaseAIOnDevice.kt | 50 +++++++++++++++++-- 1 file changed, 46 insertions(+), 4 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt index fff3eea9d0c..a091f0198e2 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt @@ -18,8 +18,13 @@ package com.google.firebase.ai.ondevice import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceException import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceUnknownException +import com.google.firebase.ai.ondevice.interop.GenerationConfig import com.google.mlkit.genai.common.FeatureStatus import com.google.mlkit.genai.prompt.Generation +import com.google.mlkit.genai.prompt.ModelPreference as MlKitModelPreference +import com.google.mlkit.genai.prompt.ModelReleaseStage as MlKitModelReleaseStage +import com.google.mlkit.genai.prompt.generationConfig +import com.google.mlkit.genai.prompt.modelConfig import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.map @@ -33,10 +38,13 @@ public object FirebaseAIOnDevice { /** * Checks the current status / availability of the on-device AI model. * + * @param option The configuration option for the model. * @return An [OnDeviceModelStatus] object indicating the current state of the model. */ - public suspend fun checkStatus(): OnDeviceModelStatus { - return OnDeviceModelStatus.fromFeatureStatus(Generation.getClient().checkStatus()) + public suspend fun checkStatus(option: OnDeviceModelOption): OnDeviceModelStatus { + return OnDeviceModelStatus.fromFeatureStatus( + Generation.getClient(option.toMlKit()).checkStatus() + ) } /** @@ -46,12 +54,46 @@ public object FirebaseAIOnDevice { * Consumers should collect the flow to start the download process, and optionally process any * updates on the download state, progress, and completion or failure. * + * @param option The configuration option for the model. * @return A [Flow] of [DownloadStatus] objects representing the download lifecycle. */ - public fun download(): Flow = - Generation.getClient().download().map { DownloadStatus.fromMlKit(it) } + public fun download(option: OnDeviceModelOption): Flow { + return Generation.getClient(option.toMlKit()).download().map { DownloadStatus.fromMlKit(it) } + } +} + +/** Options for configuring the on-device AI model. */ +public enum class OnDeviceModelOption { + /** Selects the latest stable model. */ + STABLE, + + /** Selects the latest preview model with full performance. */ + PREVIEW, + + /** Selects the latest preview model optimized for speed. */ + PREVIEW_FAST } +internal fun OnDeviceModelOption.toMlKit(): com.google.mlkit.genai.prompt.GenerationConfig = + generationConfig { + modelConfig = modelConfig { + when (this@toMlKit) { + OnDeviceModelOption.STABLE -> { + releaseStage = MlKitModelReleaseStage.STABLE + preference = MlKitModelPreference.FULL + } + OnDeviceModelOption.PREVIEW -> { + releaseStage = MlKitModelReleaseStage.PREVIEW + preference = MlKitModelPreference.FULL + } + OnDeviceModelOption.PREVIEW_FAST -> { + releaseStage = MlKitModelReleaseStage.PREVIEW + preference = MlKitModelPreference.FAST + } + } + } + } + /** Represents the current status of the on-device AI model. */ public class OnDeviceModelStatus private constructor(private val status: Int) { public companion object { From 55f8c8164c62fd040f579f711771bcd76faab221 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Wed, 29 Apr 2026 00:52:47 -0400 Subject: [PATCH 13/17] use non-enum pattern --- .../ai/ondevice/FirebaseAIOnDevice.kt | 18 +++-- .../com/google/firebase/ai/GenerativeModel.kt | 15 ++-- .../com/google/firebase/ai/OnDeviceConfig.kt | 72 ++++++++++++++++++- .../OnDeviceGenerativeModelProviderTests.kt | 37 ++++++++++ 4 files changed, 129 insertions(+), 13 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt index a091f0198e2..8f708b83d46 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt @@ -63,15 +63,19 @@ public object FirebaseAIOnDevice { } /** Options for configuring the on-device AI model. */ -public enum class OnDeviceModelOption { - /** Selects the latest stable model. */ - STABLE, +public class OnDeviceModelOption private constructor(private val value: String) { + override fun toString(): String = value - /** Selects the latest preview model with full performance. */ - PREVIEW, + public companion object { + /** Selects the latest stable model. */ + @JvmField public val STABLE: OnDeviceModelOption = OnDeviceModelOption("stable") + + /** Selects the latest preview model with full performance. */ + @JvmField public val PREVIEW: OnDeviceModelOption = OnDeviceModelOption("preview") - /** Selects the latest preview model optimized for speed. */ - PREVIEW_FAST + /** Selects the latest preview model optimized for speed. */ + @JvmField public val PREVIEW_FAST: OnDeviceModelOption = OnDeviceModelOption("preview_fast") + } } internal fun OnDeviceModelOption.toMlKit(): com.google.mlkit.genai.prompt.GenerationConfig = diff --git a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt index ef16c6c1abd..9f0cc5df18e 100644 --- a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt +++ b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt @@ -345,9 +345,14 @@ internal constructor( } @OptIn(PublicPreviewAPI::class) - internal fun buildOnDeviceModelProvider(): GenerativeModelProvider = + internal fun buildOnDeviceModelProvider( + generationConfig: OnDeviceGenerationConfig? + ): GenerativeModelProvider = onDeviceFactoryProvider?.let { - OnDeviceGenerativeModelProvider(it.newGenerativeModel(), onDeviceConfig) + OnDeviceGenerativeModelProvider( + it.newGenerativeModel(generationConfig?.toInterop()), + onDeviceConfig + ) } ?: MissingOnDeviceGenerativeModelProvider() @@ -355,10 +360,10 @@ internal constructor( internal fun getModelProvider(): GenerativeModelProvider = when (onDeviceConfig.mode) { InferenceMode.ONLY_IN_CLOUD -> buildCloudModelProvider() - InferenceMode.ONLY_ON_DEVICE -> buildOnDeviceModelProvider() + InferenceMode.ONLY_ON_DEVICE -> buildOnDeviceModelProvider(onDeviceConfig.generationConfig) InferenceMode.PREFER_ON_DEVICE -> { FallbackGenerativeModelProvider( - defaultModel = buildOnDeviceModelProvider(), + defaultModel = buildOnDeviceModelProvider(onDeviceConfig.generationConfig), fallbackModel = buildCloudModelProvider(isHybrid = true), shouldFallbackInException = true ) @@ -366,7 +371,7 @@ internal constructor( InferenceMode.PREFER_IN_CLOUD -> FallbackGenerativeModelProvider( defaultModel = buildCloudModelProvider(isHybrid = true), - fallbackModel = buildOnDeviceModelProvider(), + fallbackModel = buildOnDeviceModelProvider(onDeviceConfig.generationConfig), precondition = NetworkStatusChecker( firebaseApp.applicationContext.getSystemService(Context.CONNECTIVITY_SERVICE) diff --git a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt index 3355a726a5f..c9ff6d50a79 100644 --- a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt +++ b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt @@ -32,6 +32,7 @@ import com.google.firebase.ai.type.PublicPreviewAPI * for more detail. * @property candidateCount The number of generated responses to return. See [GenerationConfig] for * more detail. By default it's set to `1`. + * @property generationConfig Configuration for the on-device model selection and performance. */ @PublicPreviewAPI public class OnDeviceConfig @@ -42,7 +43,8 @@ constructor( public val temperature: Float? = null, public val topK: Int? = null, public val seed: Int? = null, - public val candidateCount: Int = 1 + public val candidateCount: Int = 1, + public val generationConfig: OnDeviceGenerationConfig? = null ) { public companion object { @@ -97,3 +99,71 @@ public class InferenceSource private constructor(private val value: String) { @JvmField public val IN_CLOUD: InferenceSource = InferenceSource("In Cloud") } } + +@PublicPreviewAPI +public class ModelReleaseStage private constructor(private val value: String) { + override fun toString(): String = value + + internal fun toInterop(): com.google.firebase.ai.ondevice.interop.ModelReleaseStage = + when (this) { + PREVIEW -> com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW + else -> com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE + } + + public companion object { + @JvmField public val STABLE: ModelReleaseStage = ModelReleaseStage("stable") + @JvmField public val PREVIEW: ModelReleaseStage = ModelReleaseStage("preview") + } +} + +@PublicPreviewAPI +public class ModelPreference private constructor(private val value: String) { + override fun toString(): String = value + + internal fun toInterop(): com.google.firebase.ai.ondevice.interop.ModelPreference = + when (this) { + FAST -> com.google.firebase.ai.ondevice.interop.ModelPreference.FAST + else -> com.google.firebase.ai.ondevice.interop.ModelPreference.FULL + } + + public companion object { + @JvmField public val FULL: ModelPreference = ModelPreference("full") + @JvmField public val FAST: ModelPreference = ModelPreference("fast") + } +} + +@PublicPreviewAPI +public class ModelConfig( + public val releaseStage: ModelReleaseStage = ModelReleaseStage.STABLE, + public val preference: ModelPreference = ModelPreference.FULL, +) { + internal fun toInterop(): com.google.firebase.ai.ondevice.interop.ModelConfig = + com.google.firebase.ai.ondevice.interop.ModelConfig( + releaseStage = releaseStage.toInterop(), + preference = preference.toInterop() + ) + + override fun equals(other: Any?): Boolean = + other is ModelConfig && releaseStage == other.releaseStage && preference == other.preference + + override fun hashCode(): Int { + var result = releaseStage.hashCode() + result = 31 * result + preference.hashCode() + return result + } + + override fun toString(): String { + return "ModelConfig(releaseStage=$releaseStage, preference=$preference)" + } +} + +@PublicPreviewAPI +public class OnDeviceGenerationConfig(public val modelConfig: ModelConfig? = null) { + internal fun toInterop(): com.google.firebase.ai.ondevice.interop.GenerationConfig = + com.google.firebase.ai.ondevice.interop.GenerationConfig(modelConfig = modelConfig?.toInterop()) + + override fun toString(): String = "OnDeviceGenerationConfig(modelConfig=$modelConfig)" + override fun equals(other: Any?): Boolean = + other is OnDeviceGenerationConfig && modelConfig == other.modelConfig + override fun hashCode(): Int = modelConfig?.hashCode() ?: 0 +} diff --git a/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt b/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt index f9483cea73f..c1db6f6dfd9 100644 --- a/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt +++ b/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt @@ -18,7 +18,11 @@ package com.google.firebase.ai.generativemodel import com.google.firebase.ai.InferenceMode import com.google.firebase.ai.InferenceSource +import com.google.firebase.ai.ModelConfig +import com.google.firebase.ai.ModelPreference +import com.google.firebase.ai.ModelReleaseStage import com.google.firebase.ai.OnDeviceConfig +import com.google.firebase.ai.OnDeviceGenerationConfig import com.google.firebase.ai.ondevice.interop.Candidate as OnDeviceCandidate import com.google.firebase.ai.ondevice.interop.CountTokensResponse as OnDeviceCountTokensResponse import com.google.firebase.ai.ondevice.interop.FinishReason as OnDeviceFinishReason @@ -129,4 +133,37 @@ internal class OnDeviceGenerativeModelProviderTests { val exception = shouldThrow { provider.generateContent(promptNoText) } exception.cause!!::class shouldBe IllegalArgumentException::class } + + @Test + fun `OnDeviceConfig can be constructed with OnDeviceGenerationConfig`() { + val config = + OnDeviceConfig( + mode = InferenceMode.ONLY_ON_DEVICE, + generationConfig = + OnDeviceGenerationConfig( + modelConfig = + ModelConfig( + releaseStage = ModelReleaseStage.PREVIEW, + preference = ModelPreference.FAST + ) + ) + ) + config.generationConfig?.modelConfig?.releaseStage shouldBe ModelReleaseStage.PREVIEW + config.generationConfig?.modelConfig?.preference shouldBe ModelPreference.FAST + } + @Test + fun `OnDeviceGenerationConfig toInterop maps correctly`() { + val config = + OnDeviceGenerationConfig( + modelConfig = + ModelConfig(releaseStage = ModelReleaseStage.PREVIEW, preference = ModelPreference.FAST) + ) + + val interopConfig = config.toInterop() + + interopConfig.modelConfig?.releaseStage shouldBe + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW + interopConfig.modelConfig?.preference shouldBe + com.google.firebase.ai.ondevice.interop.ModelPreference.FAST + } } From 64cb225db00dc0e2a04a8a447872a5338033d101 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Wed, 29 Apr 2026 00:59:27 -0400 Subject: [PATCH 14/17] Simplify model option --- .../com/google/firebase/ai/GenerativeModel.kt | 10 +- .../com/google/firebase/ai/OnDeviceConfig.kt | 93 +++++++------------ .../OnDeviceGenerativeModelProviderTests.kt | 31 ++----- 3 files changed, 46 insertions(+), 88 deletions(-) diff --git a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt index 9f0cc5df18e..f446bbb7845 100644 --- a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt +++ b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/GenerativeModel.kt @@ -346,11 +346,11 @@ internal constructor( @OptIn(PublicPreviewAPI::class) internal fun buildOnDeviceModelProvider( - generationConfig: OnDeviceGenerationConfig? + modelOption: OnDeviceModelOption? ): GenerativeModelProvider = onDeviceFactoryProvider?.let { OnDeviceGenerativeModelProvider( - it.newGenerativeModel(generationConfig?.toInterop()), + it.newGenerativeModel(modelOption?.toInterop()), onDeviceConfig ) } @@ -360,10 +360,10 @@ internal constructor( internal fun getModelProvider(): GenerativeModelProvider = when (onDeviceConfig.mode) { InferenceMode.ONLY_IN_CLOUD -> buildCloudModelProvider() - InferenceMode.ONLY_ON_DEVICE -> buildOnDeviceModelProvider(onDeviceConfig.generationConfig) + InferenceMode.ONLY_ON_DEVICE -> buildOnDeviceModelProvider(onDeviceConfig.modelOption) InferenceMode.PREFER_ON_DEVICE -> { FallbackGenerativeModelProvider( - defaultModel = buildOnDeviceModelProvider(onDeviceConfig.generationConfig), + defaultModel = buildOnDeviceModelProvider(onDeviceConfig.modelOption), fallbackModel = buildCloudModelProvider(isHybrid = true), shouldFallbackInException = true ) @@ -371,7 +371,7 @@ internal constructor( InferenceMode.PREFER_IN_CLOUD -> FallbackGenerativeModelProvider( defaultModel = buildCloudModelProvider(isHybrid = true), - fallbackModel = buildOnDeviceModelProvider(onDeviceConfig.generationConfig), + fallbackModel = buildOnDeviceModelProvider(onDeviceConfig.modelOption), precondition = NetworkStatusChecker( firebaseApp.applicationContext.getSystemService(Context.CONNECTIVITY_SERVICE) diff --git a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt index c9ff6d50a79..1151e871e4c 100644 --- a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt +++ b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt @@ -32,7 +32,7 @@ import com.google.firebase.ai.type.PublicPreviewAPI * for more detail. * @property candidateCount The number of generated responses to return. See [GenerationConfig] for * more detail. By default it's set to `1`. - * @property generationConfig Configuration for the on-device model selection and performance. + * @property modelOption Configuration for the on-device model selection and performance. */ @PublicPreviewAPI public class OnDeviceConfig @@ -44,7 +44,7 @@ constructor( public val topK: Int? = null, public val seed: Int? = null, public val candidateCount: Int = 1, - public val generationConfig: OnDeviceGenerationConfig? = null + public val modelOption: OnDeviceModelOption? = null ) { public companion object { @@ -101,69 +101,40 @@ public class InferenceSource private constructor(private val value: String) { } @PublicPreviewAPI -public class ModelReleaseStage private constructor(private val value: String) { +public class OnDeviceModelOption private constructor(private val value: String) { override fun toString(): String = value - internal fun toInterop(): com.google.firebase.ai.ondevice.interop.ModelReleaseStage = - when (this) { - PREVIEW -> com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW - else -> com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE - } - public companion object { - @JvmField public val STABLE: ModelReleaseStage = ModelReleaseStage("stable") - @JvmField public val PREVIEW: ModelReleaseStage = ModelReleaseStage("preview") + @JvmField public val STABLE: OnDeviceModelOption = OnDeviceModelOption("stable") + @JvmField public val PREVIEW: OnDeviceModelOption = OnDeviceModelOption("preview") + @JvmField public val PREVIEW_FAST: OnDeviceModelOption = OnDeviceModelOption("preview_fast") } } -@PublicPreviewAPI -public class ModelPreference private constructor(private val value: String) { - override fun toString(): String = value - - internal fun toInterop(): com.google.firebase.ai.ondevice.interop.ModelPreference = - when (this) { - FAST -> com.google.firebase.ai.ondevice.interop.ModelPreference.FAST - else -> com.google.firebase.ai.ondevice.interop.ModelPreference.FULL - } - - public companion object { - @JvmField public val FULL: ModelPreference = ModelPreference("full") - @JvmField public val FAST: ModelPreference = ModelPreference("fast") +@OptIn(PublicPreviewAPI::class) +internal fun OnDeviceModelOption.toInterop(): + com.google.firebase.ai.ondevice.interop.GenerationConfig = + when (this) { + OnDeviceModelOption.STABLE -> + com.google.firebase.ai.ondevice.interop.GenerationConfig( + com.google.firebase.ai.ondevice.interop.ModelConfig( + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE, + com.google.firebase.ai.ondevice.interop.ModelPreference.FULL + ) + ) + OnDeviceModelOption.PREVIEW -> + com.google.firebase.ai.ondevice.interop.GenerationConfig( + com.google.firebase.ai.ondevice.interop.ModelConfig( + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW, + com.google.firebase.ai.ondevice.interop.ModelPreference.FULL + ) + ) + OnDeviceModelOption.PREVIEW_FAST -> + com.google.firebase.ai.ondevice.interop.GenerationConfig( + com.google.firebase.ai.ondevice.interop.ModelConfig( + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW, + com.google.firebase.ai.ondevice.interop.ModelPreference.FAST + ) + ) + else -> throw IllegalArgumentException("Unknown option") } -} - -@PublicPreviewAPI -public class ModelConfig( - public val releaseStage: ModelReleaseStage = ModelReleaseStage.STABLE, - public val preference: ModelPreference = ModelPreference.FULL, -) { - internal fun toInterop(): com.google.firebase.ai.ondevice.interop.ModelConfig = - com.google.firebase.ai.ondevice.interop.ModelConfig( - releaseStage = releaseStage.toInterop(), - preference = preference.toInterop() - ) - - override fun equals(other: Any?): Boolean = - other is ModelConfig && releaseStage == other.releaseStage && preference == other.preference - - override fun hashCode(): Int { - var result = releaseStage.hashCode() - result = 31 * result + preference.hashCode() - return result - } - - override fun toString(): String { - return "ModelConfig(releaseStage=$releaseStage, preference=$preference)" - } -} - -@PublicPreviewAPI -public class OnDeviceGenerationConfig(public val modelConfig: ModelConfig? = null) { - internal fun toInterop(): com.google.firebase.ai.ondevice.interop.GenerationConfig = - com.google.firebase.ai.ondevice.interop.GenerationConfig(modelConfig = modelConfig?.toInterop()) - - override fun toString(): String = "OnDeviceGenerationConfig(modelConfig=$modelConfig)" - override fun equals(other: Any?): Boolean = - other is OnDeviceGenerationConfig && modelConfig == other.modelConfig - override fun hashCode(): Int = modelConfig?.hashCode() ?: 0 -} diff --git a/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt b/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt index c1db6f6dfd9..9ef22433578 100644 --- a/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt +++ b/ai-logic/firebase-ai/src/test/java/com/google/firebase/ai/generativeModel/OnDeviceGenerativeModelProviderTests.kt @@ -18,17 +18,15 @@ package com.google.firebase.ai.generativemodel import com.google.firebase.ai.InferenceMode import com.google.firebase.ai.InferenceSource -import com.google.firebase.ai.ModelConfig -import com.google.firebase.ai.ModelPreference -import com.google.firebase.ai.ModelReleaseStage import com.google.firebase.ai.OnDeviceConfig -import com.google.firebase.ai.OnDeviceGenerationConfig +import com.google.firebase.ai.OnDeviceModelOption import com.google.firebase.ai.ondevice.interop.Candidate as OnDeviceCandidate import com.google.firebase.ai.ondevice.interop.CountTokensResponse as OnDeviceCountTokensResponse import com.google.firebase.ai.ondevice.interop.FinishReason as OnDeviceFinishReason import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceNotAvailableException import com.google.firebase.ai.ondevice.interop.GenerateContentResponse as OnDeviceGenerateContentResponse import com.google.firebase.ai.ondevice.interop.GenerativeModel as OnDeviceGenerativeModel +import com.google.firebase.ai.toInterop import com.google.firebase.ai.type.Content import com.google.firebase.ai.type.FirebaseAIException import com.google.firebase.ai.type.JsonSchema @@ -135,31 +133,20 @@ internal class OnDeviceGenerativeModelProviderTests { } @Test - fun `OnDeviceConfig can be constructed with OnDeviceGenerationConfig`() { + fun `OnDeviceConfig can be constructed with OnDeviceModelOption`() { val config = OnDeviceConfig( mode = InferenceMode.ONLY_ON_DEVICE, - generationConfig = - OnDeviceGenerationConfig( - modelConfig = - ModelConfig( - releaseStage = ModelReleaseStage.PREVIEW, - preference = ModelPreference.FAST - ) - ) + modelOption = OnDeviceModelOption.PREVIEW_FAST ) - config.generationConfig?.modelConfig?.releaseStage shouldBe ModelReleaseStage.PREVIEW - config.generationConfig?.modelConfig?.preference shouldBe ModelPreference.FAST + config.modelOption shouldBe OnDeviceModelOption.PREVIEW_FAST } + @Test - fun `OnDeviceGenerationConfig toInterop maps correctly`() { - val config = - OnDeviceGenerationConfig( - modelConfig = - ModelConfig(releaseStage = ModelReleaseStage.PREVIEW, preference = ModelPreference.FAST) - ) + fun `OnDeviceModelOption toInterop maps correctly`() { + val option = OnDeviceModelOption.PREVIEW_FAST - val interopConfig = config.toInterop() + val interopConfig = option.toInterop() interopConfig.modelConfig?.releaseStage shouldBe com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW From 72954b0ece83fab0c4b8be37dcae96e519521860 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Wed, 29 Apr 2026 01:50:07 -0400 Subject: [PATCH 15/17] Update api.txt files --- ai-logic/firebase-ai-ondevice/api.txt | 14 ++++++++++++-- ai-logic/firebase-ai/api.txt | 13 +++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice/api.txt b/ai-logic/firebase-ai-ondevice/api.txt index df8eeaac394..112e763ae3e 100644 --- a/ai-logic/firebase-ai-ondevice/api.txt +++ b/ai-logic/firebase-ai-ondevice/api.txt @@ -25,11 +25,21 @@ package com.google.firebase.ai.ondevice { } public final class FirebaseAIOnDevice { - method public suspend Object? checkStatus(kotlin.coroutines.Continuation); - method public kotlinx.coroutines.flow.Flow download(); + method public suspend Object? checkStatus(com.google.firebase.ai.ondevice.OnDeviceModelOption option, kotlin.coroutines.Continuation); + method public kotlinx.coroutines.flow.Flow download(com.google.firebase.ai.ondevice.OnDeviceModelOption option); field public static final com.google.firebase.ai.ondevice.FirebaseAIOnDevice INSTANCE; } + public final class OnDeviceModelOption { + field public static final com.google.firebase.ai.ondevice.OnDeviceModelOption.Companion Companion; + field public static final com.google.firebase.ai.ondevice.OnDeviceModelOption PREVIEW; + field public static final com.google.firebase.ai.ondevice.OnDeviceModelOption PREVIEW_FAST; + field public static final com.google.firebase.ai.ondevice.OnDeviceModelOption STABLE; + } + + public static final class OnDeviceModelOption.Companion { + } + public final class OnDeviceModelStatus { field public static final com.google.firebase.ai.ondevice.OnDeviceModelStatus AVAILABLE; field public static final com.google.firebase.ai.ondevice.OnDeviceModelStatus.Companion Companion; diff --git a/ai-logic/firebase-ai/api.txt b/ai-logic/firebase-ai/api.txt index 18a650770bd..7dff0b70e60 100644 --- a/ai-logic/firebase-ai/api.txt +++ b/ai-logic/firebase-ai/api.txt @@ -121,15 +121,18 @@ package com.google.firebase.ai { ctor public OnDeviceConfig(com.google.firebase.ai.InferenceMode mode, Integer? maxOutputTokens = null, Float? temperature = null, Integer? topK = null); ctor public OnDeviceConfig(com.google.firebase.ai.InferenceMode mode, Integer? maxOutputTokens = null, Float? temperature = null, Integer? topK = null, Integer? seed = null); ctor public OnDeviceConfig(com.google.firebase.ai.InferenceMode mode, Integer? maxOutputTokens = null, Float? temperature = null, Integer? topK = null, Integer? seed = null, int candidateCount = 1); + ctor public OnDeviceConfig(com.google.firebase.ai.InferenceMode mode, Integer? maxOutputTokens = null, Float? temperature = null, Integer? topK = null, Integer? seed = null, int candidateCount = 1, com.google.firebase.ai.OnDeviceModelOption? modelOption = null); method public int getCandidateCount(); method public Integer? getMaxOutputTokens(); method public com.google.firebase.ai.InferenceMode getMode(); + method public com.google.firebase.ai.OnDeviceModelOption? getModelOption(); method public Integer? getSeed(); method public Float? getTemperature(); method public Integer? getTopK(); property public final int candidateCount; property public final Integer? maxOutputTokens; property public final com.google.firebase.ai.InferenceMode mode; + property public final com.google.firebase.ai.OnDeviceModelOption? modelOption; property public final Integer? seed; property public final Float? temperature; property public final Integer? topK; @@ -140,6 +143,16 @@ package com.google.firebase.ai { public static final class OnDeviceConfig.Companion { } + @com.google.firebase.ai.type.PublicPreviewAPI public final class OnDeviceModelOption { + field public static final com.google.firebase.ai.OnDeviceModelOption.Companion Companion; + field public static final com.google.firebase.ai.OnDeviceModelOption PREVIEW; + field public static final com.google.firebase.ai.OnDeviceModelOption PREVIEW_FAST; + field public static final com.google.firebase.ai.OnDeviceModelOption STABLE; + } + + public static final class OnDeviceModelOption.Companion { + } + @com.google.firebase.ai.type.PublicPreviewAPI public final class TemplateChat { method public java.util.List getHistory(); method public suspend Object? sendMessage(com.google.firebase.ai.type.Content prompt, kotlin.coroutines.Continuation); From 1f23d4d840ebb3af1a771bf81ab2736b521b3568 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Wed, 29 Apr 2026 01:54:43 -0400 Subject: [PATCH 16/17] address gemini comments --- .../com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt | 1 + .../src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt index 8f708b83d46..f66674e452b 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDevice.kt @@ -94,6 +94,7 @@ internal fun OnDeviceModelOption.toMlKit(): com.google.mlkit.genai.prompt.Genera releaseStage = MlKitModelReleaseStage.PREVIEW preference = MlKitModelPreference.FAST } + else -> throw IllegalArgumentException("Unknown option: ${this@toMlKit}") } } } diff --git a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt index 1151e871e4c..b127d14dd85 100644 --- a/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt +++ b/ai-logic/firebase-ai/src/main/kotlin/com/google/firebase/ai/OnDeviceConfig.kt @@ -104,6 +104,10 @@ public class InferenceSource private constructor(private val value: String) { public class OnDeviceModelOption private constructor(private val value: String) { override fun toString(): String = value + override fun equals(other: Any?): Boolean = other is OnDeviceModelOption && value == other.value + + override fun hashCode(): Int = value.hashCode() + public companion object { @JvmField public val STABLE: OnDeviceModelOption = OnDeviceModelOption("stable") @JvmField public val PREVIEW: OnDeviceModelOption = OnDeviceModelOption("preview") From 54910f8b8112a25ec7885c18c659df7620d88a8b Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Wed, 29 Apr 2026 10:15:55 -0400 Subject: [PATCH 17/17] Address comments from review --- ai-logic/firebase-ai-ondevice-interop/api.txt | 6 +++--- .../google/firebase/ai/ondevice/interop/GenerationConfig.kt | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ai-logic/firebase-ai-ondevice-interop/api.txt b/ai-logic/firebase-ai-ondevice-interop/api.txt index b160b11527c..8e4c4e26d19 100644 --- a/ai-logic/firebase-ai-ondevice-interop/api.txt +++ b/ai-logic/firebase-ai-ondevice-interop/api.txt @@ -77,9 +77,9 @@ package com.google.firebase.ai.ondevice.interop { } public final class GenerationConfig { - ctor public GenerationConfig(com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig = null); - method public com.google.firebase.ai.ondevice.interop.ModelConfig? getModelConfig(); - property public final com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig; + ctor public GenerationConfig(com.google.firebase.ai.ondevice.interop.ModelConfig modelConfig); + method public com.google.firebase.ai.ondevice.interop.ModelConfig getModelConfig(); + property public final com.google.firebase.ai.ondevice.interop.ModelConfig modelConfig; } public interface GenerativeModel { diff --git a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt index ed30ef2a977..65f5f2462a5 100644 --- a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt +++ b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt @@ -16,13 +16,13 @@ package com.google.firebase.ai.ondevice.interop -public class GenerationConfig(public val modelConfig: ModelConfig? = null) { +public class GenerationConfig(public val modelConfig: ModelConfig) { override fun toString(): String = "GenerationConfig(modelConfig=$modelConfig)" override fun equals(other: Any?): Boolean = other is GenerationConfig && modelConfig == other.modelConfig - override fun hashCode(): Int = modelConfig?.hashCode() ?: 0 + override fun hashCode(): Int = modelConfig.hashCode() ?: 0 } /**