diff --git a/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md b/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md index 7f045d58617..4134d65099b 100644 --- a/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md +++ b/ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md @@ -1,6 +1,7 @@ # Unreleased +- [feature] Added support for model selection, required for nano-v4. (#8043) + # 16.0.0-beta01 - [feature] Initial release. - diff --git a/ai-logic/firebase-ai-ondevice-interop/api.txt b/ai-logic/firebase-ai-ondevice-interop/api.txt index c27deeb6795..b160b11527c 100644 --- a/ai-logic/firebase-ai-ondevice-interop/api.txt +++ b/ai-logic/firebase-ai-ondevice-interop/api.txt @@ -36,7 +36,8 @@ package com.google.firebase.ai.ondevice.interop { } public interface FirebaseAIOnDeviceGenerativeModelFactory { - method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(); + method @Deprecated public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(); + method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(com.google.firebase.ai.ondevice.interop.GenerationConfig? generationConfig); } public final class FirebaseAIOnDeviceInvalidRequestException extends com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceException { @@ -75,6 +76,12 @@ package com.google.firebase.ai.ondevice.interop { property public final java.util.List candidates; } + public final class GenerationConfig { + ctor public GenerationConfig(com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig = null); + method public com.google.firebase.ai.ondevice.interop.ModelConfig? getModelConfig(); + property public final com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig; + } + public interface GenerativeModel { method public suspend Object? countTokens(com.google.firebase.ai.ondevice.interop.GenerateContentRequest request, kotlin.coroutines.Continuation); method public suspend Object? generateContent(com.google.firebase.ai.ondevice.interop.GenerateContentRequest request, kotlin.coroutines.Continuation); @@ -91,6 +98,24 @@ package com.google.firebase.ai.ondevice.interop { property public final android.graphics.Bitmap bitmap; } + public final class ModelConfig { + ctor public ModelConfig(com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage = com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE, com.google.firebase.ai.ondevice.interop.ModelPreference preference = com.google.firebase.ai.ondevice.interop.ModelPreference.FULL); + method public com.google.firebase.ai.ondevice.interop.ModelPreference getPreference(); + method public com.google.firebase.ai.ondevice.interop.ModelReleaseStage getReleaseStage(); + property public final com.google.firebase.ai.ondevice.interop.ModelPreference preference; + property public final com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage; + } + + public enum ModelPreference { + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FAST; + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FULL; + } + + public enum ModelReleaseStage { + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage PREVIEW; + enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage STABLE; + } + public interface Part { } diff --git a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt index aa2361371d7..8cc45e80e6f 100644 --- a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt +++ b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/FirebaseAIOnDeviceGenerativeModelFactory.kt @@ -28,5 +28,19 @@ public interface FirebaseAIOnDeviceGenerativeModelFactory { * * @return A new [GenerativeModel] instance ready for use. */ + @Deprecated( + message = "Use newGenerativeModel(GenerationConfig?) instead", + replaceWith = ReplaceWith("newGenerativeModel(null)") + ) public fun newGenerativeModel(): GenerativeModel + + /** + * Creates and returns a new instance of [GenerativeModel] optionally configured with + * [GenerationConfig]. + * + * @param generationConfig The configuration for the model, `null` if the default configuration + * should be used. + * @return A new [GenerativeModel] instance ready for use. + */ + public fun newGenerativeModel(generationConfig: GenerationConfig?): GenerativeModel } diff --git a/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt new file mode 100644 index 00000000000..ed30ef2a977 --- /dev/null +++ b/ai-logic/firebase-ai-ondevice-interop/src/main/kotlin/com/google/firebase/ai/ondevice/interop/GenerationConfig.kt @@ -0,0 +1,73 @@ +/* + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.ai.ondevice.interop + +public class GenerationConfig(public val modelConfig: ModelConfig? = null) { + override fun toString(): String = "GenerationConfig(modelConfig=$modelConfig)" + + override fun equals(other: Any?): Boolean = + other is GenerationConfig && modelConfig == other.modelConfig + + override fun hashCode(): Int = modelConfig?.hashCode() ?: 0 +} + +/** + * Configuration parameters for model selection. + * + * @property releaseStage The release stage of the model to use. + * @property preference The performance preference for the model. + */ +public class ModelConfig( + public val releaseStage: ModelReleaseStage = ModelReleaseStage.STABLE, + public val preference: ModelPreference = ModelPreference.FULL, +) { + override fun equals(other: Any?): Boolean = + other is ModelConfig && releaseStage == other.releaseStage && preference == other.preference + + override fun hashCode(): Int { + var result = releaseStage.hashCode() + result = 31 * result + preference.hashCode() + return result + } + + override fun toString(): String { + return "ModelConfig(releaseStage=$releaseStage, preference=$preference)" + } +} +/** Defines the release stage of the model. */ +public enum class ModelReleaseStage { + /** + * Selects the latest model version that is fully tested and on consumer devices. This is the + * default setting. + */ + STABLE, + + /** + * Selects the latest model version in the preview stage. This stage lets you test beta features + * or newer model architectures before they are widely deployed. + */ + PREVIEW, +} + +/** Defines the performance preference for the model. */ +public enum class ModelPreference { + /** Recommended when model accuracy and full capabilities are prioritized over speed. */ + FULL, + + /** Recommended for latency-sensitive apps that require minimal response times. */ + FAST, +} diff --git a/ai-logic/firebase-ai-ondevice/CHANGELOG.md b/ai-logic/firebase-ai-ondevice/CHANGELOG.md index 7f045d58617..0a19d412041 100644 --- a/ai-logic/firebase-ai-ondevice/CHANGELOG.md +++ b/ai-logic/firebase-ai-ondevice/CHANGELOG.md @@ -1,5 +1,7 @@ # Unreleased +- [feature] Added support for model selection, required for nano-v4. (#8043) + # 16.0.0-beta01 - [feature] Initial release. diff --git a/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts b/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts index ae21bd4f955..9cb9a75df6d 100644 --- a/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts +++ b/ai-logic/firebase-ai-ondevice/firebase-ai-ondevice.gradle.kts @@ -68,7 +68,7 @@ kotlin { dependencies { implementation(libs.genai.prompt) - implementation("com.google.firebase:firebase-ai-ondevice-interop:16.0.0-beta01") + implementation(project(":ai-logic:firebase-ai-ondevice-interop")) implementation(libs.firebase.common) implementation(libs.firebase.components) diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt index ef615423857..85e1ae61794 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/Converters.kt @@ -22,9 +22,15 @@ import com.google.firebase.ai.ondevice.interop.CountTokensResponse import com.google.firebase.ai.ondevice.interop.FinishReason import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceInvalidRequestException import com.google.firebase.ai.ondevice.interop.GenerateContentResponse +import com.google.firebase.ai.ondevice.interop.GenerationConfig +import com.google.firebase.ai.ondevice.interop.ModelConfig import com.google.mlkit.genai.prompt.GenerateContentRequest import com.google.mlkit.genai.prompt.ImagePart +import com.google.mlkit.genai.prompt.ModelPreference +import com.google.mlkit.genai.prompt.ModelReleaseStage import com.google.mlkit.genai.prompt.TextPart +import com.google.mlkit.genai.prompt.generationConfig +import com.google.mlkit.genai.prompt.modelConfig import kotlin.math.min // ==================================== @@ -75,6 +81,34 @@ internal fun com.google.firebase.ai.ondevice.interop.GenerateContentRequest.toMl internal fun com.google.mlkit.genai.prompt.GenerateContentResponse.toInterop(): GenerateContentResponse = GenerateContentResponse(candidates.map { it.toInterop() }) +// ================================================ +// `GenerationConfig` converter extension functions +// ================================================ +internal fun GenerationConfig.toMlKit(): com.google.mlkit.genai.prompt.GenerationConfig = + generationConfig { + this@toMlKit.modelConfig?.let { modelConfig = it.toMlKit() } + } + +// =========================================== +// `ModelConfig` converter extension functions +// =========================================== +internal fun ModelConfig.toMlKit(): com.google.mlkit.genai.prompt.ModelConfig = modelConfig { + releaseStage = this@toMlKit.releaseStage.toMlKit() + preference = this@toMlKit.preference.toMlKit() +} + +private fun com.google.firebase.ai.ondevice.interop.ModelReleaseStage.toMlKit(): Int = + when (this) { + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW -> ModelReleaseStage.PREVIEW + com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE -> ModelReleaseStage.STABLE + } + +private fun com.google.firebase.ai.ondevice.interop.ModelPreference.toMlKit(): Int = + when (this) { + com.google.firebase.ai.ondevice.interop.ModelPreference.FULL -> ModelPreference.FULL + com.google.firebase.ai.ondevice.interop.ModelPreference.FAST -> ModelPreference.FAST + } + private fun generateContentRequest( text: com.google.firebase.ai.ondevice.interop.TextPart, image: com.google.firebase.ai.ondevice.interop.ImagePart? = null, diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt index ab44fc1dc7c..9a98e5cab35 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/FirebaseAIOnDeviceComponent.kt @@ -17,7 +17,9 @@ package com.google.firebase.ai.ondevice import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceGenerativeModelFactory +import com.google.firebase.ai.ondevice.interop.GenerationConfig import com.google.firebase.ai.ondevice.interop.GenerativeModel +import com.google.mlkit.genai.prompt.Generation /** * Factory class for Firebase AI OnDevice. @@ -26,5 +28,16 @@ import com.google.firebase.ai.ondevice.interop.GenerativeModel */ internal class FirebaseAIOnDeviceComponent : FirebaseAIOnDeviceGenerativeModelFactory { - override fun newGenerativeModel(): GenerativeModel = GenerativeModelImpl() + @Deprecated( + "Use newGenerativeModel(GenerationConfig?) instead", + replaceWith = ReplaceWith("newGenerativeModel(null)") + ) + override fun newGenerativeModel(): GenerativeModel = newGenerativeModel(null) + + override fun newGenerativeModel(generationConfig: GenerationConfig?): GenerativeModel = + if (generationConfig == null) { + GenerativeModelImpl(Generation.getClient()) + } else { + GenerativeModelImpl(Generation.getClient(generationConfig.toMlKit())) + } } diff --git a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt index c2254b283a4..77a5354d667 100644 --- a/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt +++ b/ai-logic/firebase-ai-ondevice/src/main/kotlin/com/google/firebase/ai/ondevice/GenerativeModelImpl.kt @@ -27,14 +27,13 @@ import com.google.firebase.ai.ondevice.interop.GenerativeModel import com.google.mlkit.genai.common.FeatureStatus import com.google.mlkit.genai.common.GenAiException import com.google.mlkit.genai.common.GenAiException.ErrorCode -import com.google.mlkit.genai.prompt.Generation import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.catch import kotlinx.coroutines.flow.map /** Implementation of [GenerativeModel] backed by MLKit's genai prompt SDK. */ internal class GenerativeModelImpl( - internal val mlkitModel: com.google.mlkit.genai.prompt.GenerativeModel = Generation.getClient() + internal val mlkitModel: com.google.mlkit.genai.prompt.GenerativeModel ) : GenerativeModel { /** diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index bf8ffbcf871..74a86cfa29c 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -28,7 +28,7 @@ firebaseAnnotations = "17.0.0" firebaseCommon = "22.0.1" firebaseComponents = "19.0.0" firebaseCrashlyticsGradle = "3.0.4" -genaiPrompt = "1.0.0-alpha1" +genaiPrompt = "1.0.0-beta2" glide = "5.0.5" googleApiClient = "2.8.1" googleServices = "4.3.15"