Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion ai-logic/firebase-ai-ondevice-interop/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Unreleased

- [feature] Added support for model selection, required for nano-v4. (#8043)

# 16.0.0-beta01

- [feature] Initial release.

27 changes: 26 additions & 1 deletion ai-logic/firebase-ai-ondevice-interop/api.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ package com.google.firebase.ai.ondevice.interop {
}

public interface FirebaseAIOnDeviceGenerativeModelFactory {
method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel();
method @Deprecated public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel();
method public com.google.firebase.ai.ondevice.interop.GenerativeModel newGenerativeModel(com.google.firebase.ai.ondevice.interop.GenerationConfig? generationConfig);
}

public final class FirebaseAIOnDeviceInvalidRequestException extends com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceException {
Expand Down Expand Up @@ -75,6 +76,12 @@ package com.google.firebase.ai.ondevice.interop {
property public final java.util.List<com.google.firebase.ai.ondevice.interop.Candidate> candidates;
}

public final class GenerationConfig {
ctor public GenerationConfig(com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig = null);
method public com.google.firebase.ai.ondevice.interop.ModelConfig? getModelConfig();
property public final com.google.firebase.ai.ondevice.interop.ModelConfig? modelConfig;
}

public interface GenerativeModel {
method public suspend Object? countTokens(com.google.firebase.ai.ondevice.interop.GenerateContentRequest request, kotlin.coroutines.Continuation<? super com.google.firebase.ai.ondevice.interop.CountTokensResponse>);
method public suspend Object? generateContent(com.google.firebase.ai.ondevice.interop.GenerateContentRequest request, kotlin.coroutines.Continuation<? super com.google.firebase.ai.ondevice.interop.GenerateContentResponse>);
Expand All @@ -91,6 +98,24 @@ package com.google.firebase.ai.ondevice.interop {
property public final android.graphics.Bitmap bitmap;
}

public final class ModelConfig {
ctor public ModelConfig(com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage = com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE, com.google.firebase.ai.ondevice.interop.ModelPreference preference = com.google.firebase.ai.ondevice.interop.ModelPreference.FULL);
method public com.google.firebase.ai.ondevice.interop.ModelPreference getPreference();
method public com.google.firebase.ai.ondevice.interop.ModelReleaseStage getReleaseStage();
property public final com.google.firebase.ai.ondevice.interop.ModelPreference preference;
property public final com.google.firebase.ai.ondevice.interop.ModelReleaseStage releaseStage;
}

public enum ModelPreference {
enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FAST;
enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelPreference FULL;
}

public enum ModelReleaseStage {
enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage PREVIEW;
enum_constant public static final com.google.firebase.ai.ondevice.interop.ModelReleaseStage STABLE;
}

public interface Part {
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,19 @@ public interface FirebaseAIOnDeviceGenerativeModelFactory {
*
* @return A new [GenerativeModel] instance ready for use.
*/
@Deprecated(
message = "Use newGenerativeModel(GenerationConfig?) instead",
replaceWith = ReplaceWith("newGenerativeModel(null)")
)
Comment thread
rlazo marked this conversation as resolved.
public fun newGenerativeModel(): GenerativeModel

/**
* Creates and returns a new instance of [GenerativeModel] optionally configured with
* [GenerationConfig].
*
* @param generationConfig The configuration for the model, `null` if the default configuration
* should be used.
* @return A new [GenerativeModel] instance ready for use.
*/
public fun newGenerativeModel(generationConfig: GenerationConfig?): GenerativeModel
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
/*
* Copyright 2026 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.google.firebase.ai.ondevice.interop

public class GenerationConfig(public val modelConfig: ModelConfig? = null) {
override fun toString(): String = "GenerationConfig(modelConfig=$modelConfig)"

override fun equals(other: Any?): Boolean =
other is GenerationConfig && modelConfig == other.modelConfig

override fun hashCode(): Int = modelConfig?.hashCode() ?: 0
}

/**
* Configuration parameters for model selection.
*
* @property releaseStage The release stage of the model to use.
* @property preference The performance preference for the model.
*/
public class ModelConfig(
public val releaseStage: ModelReleaseStage = ModelReleaseStage.STABLE,
public val preference: ModelPreference = ModelPreference.FULL,
) {
override fun equals(other: Any?): Boolean =
other is ModelConfig && releaseStage == other.releaseStage && preference == other.preference

override fun hashCode(): Int {
var result = releaseStage.hashCode()
result = 31 * result + preference.hashCode()
return result
}

override fun toString(): String {
return "ModelConfig(releaseStage=$releaseStage, preference=$preference)"
}
}
/** Defines the release stage of the model. */
public enum class ModelReleaseStage {
/**
* Selects the latest model version that is fully tested and on consumer devices. This is the
* default setting.
*/
STABLE,

/**
* Selects the latest model version in the preview stage. This stage lets you test beta features
* or newer model architectures before they are widely deployed.
*/
PREVIEW,
}

/** Defines the performance preference for the model. */
public enum class ModelPreference {
/** Recommended when model accuracy and full capabilities are prioritized over speed. */
FULL,

/** Recommended for latency-sensitive apps that require minimal response times. */
FAST,
}
2 changes: 2 additions & 0 deletions ai-logic/firebase-ai-ondevice/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# Unreleased

- [feature] Added support for model selection, required for nano-v4. (#8043)

# 16.0.0-beta01

- [feature] Initial release.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ kotlin {

dependencies {
implementation(libs.genai.prompt)
implementation("com.google.firebase:firebase-ai-ondevice-interop:16.0.0-beta01")
implementation(project(":ai-logic:firebase-ai-ondevice-interop"))

implementation(libs.firebase.common)
implementation(libs.firebase.components)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,15 @@ import com.google.firebase.ai.ondevice.interop.CountTokensResponse
import com.google.firebase.ai.ondevice.interop.FinishReason
import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceInvalidRequestException
import com.google.firebase.ai.ondevice.interop.GenerateContentResponse
import com.google.firebase.ai.ondevice.interop.GenerationConfig
import com.google.firebase.ai.ondevice.interop.ModelConfig
import com.google.mlkit.genai.prompt.GenerateContentRequest
import com.google.mlkit.genai.prompt.ImagePart
import com.google.mlkit.genai.prompt.ModelPreference
import com.google.mlkit.genai.prompt.ModelReleaseStage
import com.google.mlkit.genai.prompt.TextPart
import com.google.mlkit.genai.prompt.generationConfig
import com.google.mlkit.genai.prompt.modelConfig
import kotlin.math.min

// ====================================
Expand Down Expand Up @@ -75,6 +81,34 @@ internal fun com.google.firebase.ai.ondevice.interop.GenerateContentRequest.toMl
internal fun com.google.mlkit.genai.prompt.GenerateContentResponse.toInterop():
GenerateContentResponse = GenerateContentResponse(candidates.map { it.toInterop() })

// ================================================
// `GenerationConfig` converter extension functions
// ================================================
internal fun GenerationConfig.toMlKit(): com.google.mlkit.genai.prompt.GenerationConfig =
generationConfig {
this@toMlKit.modelConfig?.let { modelConfig = it.toMlKit() }
}

// ===========================================
// `ModelConfig` converter extension functions
// ===========================================
internal fun ModelConfig.toMlKit(): com.google.mlkit.genai.prompt.ModelConfig = modelConfig {
releaseStage = this@toMlKit.releaseStage.toMlKit()
preference = this@toMlKit.preference.toMlKit()
}

private fun com.google.firebase.ai.ondevice.interop.ModelReleaseStage.toMlKit(): Int =
when (this) {
com.google.firebase.ai.ondevice.interop.ModelReleaseStage.PREVIEW -> ModelReleaseStage.PREVIEW
com.google.firebase.ai.ondevice.interop.ModelReleaseStage.STABLE -> ModelReleaseStage.STABLE
}

private fun com.google.firebase.ai.ondevice.interop.ModelPreference.toMlKit(): Int =
when (this) {
com.google.firebase.ai.ondevice.interop.ModelPreference.FULL -> ModelPreference.FULL
com.google.firebase.ai.ondevice.interop.ModelPreference.FAST -> ModelPreference.FAST
}

private fun generateContentRequest(
text: com.google.firebase.ai.ondevice.interop.TextPart,
image: com.google.firebase.ai.ondevice.interop.ImagePart? = null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@
package com.google.firebase.ai.ondevice

import com.google.firebase.ai.ondevice.interop.FirebaseAIOnDeviceGenerativeModelFactory
import com.google.firebase.ai.ondevice.interop.GenerationConfig
import com.google.firebase.ai.ondevice.interop.GenerativeModel
import com.google.mlkit.genai.prompt.Generation

/**
* Factory class for Firebase AI OnDevice.
Expand All @@ -26,5 +28,16 @@ import com.google.firebase.ai.ondevice.interop.GenerativeModel
*/
internal class FirebaseAIOnDeviceComponent : FirebaseAIOnDeviceGenerativeModelFactory {

override fun newGenerativeModel(): GenerativeModel = GenerativeModelImpl()
@Deprecated(
"Use newGenerativeModel(GenerationConfig?) instead",
replaceWith = ReplaceWith("newGenerativeModel(null)")
)
Comment thread
rlazo marked this conversation as resolved.
override fun newGenerativeModel(): GenerativeModel = newGenerativeModel(null)

override fun newGenerativeModel(generationConfig: GenerationConfig?): GenerativeModel =
if (generationConfig == null) {
GenerativeModelImpl(Generation.getClient())
} else {
GenerativeModelImpl(Generation.getClient(generationConfig.toMlKit()))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,13 @@ import com.google.firebase.ai.ondevice.interop.GenerativeModel
import com.google.mlkit.genai.common.FeatureStatus
import com.google.mlkit.genai.common.GenAiException
import com.google.mlkit.genai.common.GenAiException.ErrorCode
import com.google.mlkit.genai.prompt.Generation
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.catch
import kotlinx.coroutines.flow.map

/** Implementation of [GenerativeModel] backed by MLKit's genai prompt SDK. */
internal class GenerativeModelImpl(
internal val mlkitModel: com.google.mlkit.genai.prompt.GenerativeModel = Generation.getClient()
internal val mlkitModel: com.google.mlkit.genai.prompt.GenerativeModel
) : GenerativeModel {

/**
Expand Down
2 changes: 1 addition & 1 deletion gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ firebaseAnnotations = "17.0.0"
firebaseCommon = "22.0.1"
firebaseComponents = "19.0.0"
firebaseCrashlyticsGradle = "3.0.4"
genaiPrompt = "1.0.0-alpha1"
genaiPrompt = "1.0.0-beta2"
glide = "5.0.5"
googleApiClient = "2.8.1"
googleServices = "4.3.15"
Expand Down
Loading