Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
231 changes: 231 additions & 0 deletions examples/tracing/google-gemini/gemini_tracing.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,231 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "2722b419",
"metadata": {},
"source": [
"[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/openlayer-python/blob/main/examples/tracing/google-gemini/gemini_tracing.ipynb)\n",
"\n",
"\n",
"# <a id=\"top\">Google Gemini API tracing</a>\n",
"\n",
"This notebook illustrates how to get started tracing Google Gemini API calls with Openlayer."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "020c8f6a",
"metadata": {},
"outputs": [],
"source": [
"!pip install google-generativeai openlayer"
]
},
{
"cell_type": "markdown",
"id": "75c2a473",
"metadata": {},
"source": [
"## 1. Set the environment variables"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f3f4fa13",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"\n",
"import google.generativeai as genai\n",
"\n",
"# Gemini API key\n",
"os.environ[\"GOOGLE_API_KEY\"] = \"YOUR_GOOGLE_API_KEY_HERE\"\n",
"\n",
"# Openlayer env variables\n",
"os.environ[\"OPENLAYER_API_KEY\"] = \"YOUR_OPENLAYER_API_KEY_HERE\"\n",
"os.environ[\"OPENLAYER_INFERENCE_PIPELINE_ID\"] = \"YOUR_OPENLAYER_INFERENCE_PIPELINE_ID_HERE\""
]
},
{
"cell_type": "markdown",
"id": "9758533f",
"metadata": {},
"source": [
"## 2. Configure Gemini and create a traced model"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c35d9860-dc41-4f7c-8d69-cc2ac7e5e485",
"metadata": {},
"outputs": [],
"source": [
"from openlayer.lib import trace_gemini\n",
"\n",
"genai.configure(api_key=os.environ[\"GOOGLE_API_KEY\"])\n",
"\n",
"model = genai.GenerativeModel(\"gemini-pro\")\n",
"traced_model = trace_gemini(model)"
]
},
{
"cell_type": "markdown",
"id": "72a6b954",
"metadata": {},
"source": [
"## 3. Use the traced Gemini model normally"
]
},
{
"cell_type": "markdown",
"id": "76a350b4",
"metadata": {},
"source": [
"That's it! Now you can continue using the traced Gemini model normally. The data is automatically published to Openlayer and you can start creating tests around it!"
]
},
{
"cell_type": "markdown",
"id": "fb5ebdad",
"metadata": {},
"source": [
"### 3.1 Non-streaming generation"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e00c1c79",
"metadata": {},
"outputs": [],
"source": [
"response = traced_model.generate_content(\"What is the meaning of life?\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b5e8c9f0",
"metadata": {},
"outputs": [],
"source": [
"response.text"
]
},
{
"cell_type": "markdown",
"id": "09d39983",
"metadata": {},
"source": [
"### 3.2 Streaming generation"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9a86642c",
"metadata": {},
"outputs": [],
"source": [
"response = traced_model.generate_content(\"Tell me a short story.\", stream=True)\n",
"\n",
"for chunk in response:\n",
" if hasattr(chunk, 'text'):\n",
" continue # Process chunks as needed"
]
},
{
"cell_type": "markdown",
"id": "4e6fb396",
"metadata": {},
"source": [
"### 3.3 Multi-turn conversation"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "21369c42",
"metadata": {},
"outputs": [],
"source": [
"chat = traced_model.start_chat(history=[])\n",
"\n",
"response1 = chat.send_message(\"Hello, I'm learning about AI.\")\n",
"response2 = chat.send_message(\"Can you explain neural networks?\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "22369c43",
"metadata": {},
"outputs": [],
"source": [
"response2.text"
]
},
{
"cell_type": "markdown",
"id": "5e6fb397",
"metadata": {},
"source": [
"### 3.4 With generation configuration"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "31369c44",
"metadata": {},
"outputs": [],
"source": [
"response = traced_model.generate_content(\n",
" \"Write a haiku about technology.\",\n",
" generation_config=genai.types.GenerationConfig(\n",
" temperature=0.7,\n",
" top_p=0.9,\n",
" top_k=40,\n",
" max_output_tokens=100,\n",
" ),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "41369c45",
"metadata": {},
"outputs": [],
"source": [
"response.text"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.18"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
19 changes: 19 additions & 0 deletions src/openlayer/lib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"trace_litellm",
"trace_google_adk",
"unpatch_google_adk",
"trace_gemini",
"update_current_trace",
"update_current_step",
# Offline buffer management functions
Expand Down Expand Up @@ -235,3 +236,21 @@ def unpatch_google_adk():
from .integrations import google_adk_tracer

return google_adk_tracer.unpatch_google_adk()


# -------------------------------- Google Gemini --------------------------------- #
def trace_gemini(client):
"""Trace Google Gemini chat completions."""
# pylint: disable=import-outside-toplevel
try:
import google.generativeai as genai
except ImportError:
raise ImportError(
"google-generativeai is required for Gemini tracing. Install with: pip install google-generativeai"
)

from .integrations import gemini_tracer

if not isinstance(client, genai.GenerativeModel):
raise ValueError("Invalid client. Please provide a google.generativeai.GenerativeModel instance.")
return gemini_tracer.trace_gemini(client)
7 changes: 7 additions & 0 deletions src/openlayer/lib/integrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,10 @@
__all__.extend(["trace_google_adk", "unpatch_google_adk"])
except ImportError:
pass

try:
from .gemini_tracer import trace_gemini

__all__.extend(["trace_gemini"])
except ImportError:
pass
Loading