diff --git a/.github/workflows/check-notebookes.yaml b/.github/workflows/check-notebooks.yaml similarity index 72% rename from .github/workflows/check-notebookes.yaml rename to .github/workflows/check-notebooks.yaml index fa424e6..6641df6 100644 --- a/.github/workflows/check-notebookes.yaml +++ b/.github/workflows/check-notebooks.yaml @@ -6,13 +6,17 @@ on: - main paths: - 'notebooks/**' - - '.github/workflows/check-notebookes.yaml' + - 'scripts/**' + - 'snippets/quickstart/**' + - '.github/workflows/check-notebooks.yaml' pull_request: branches: - main paths: - 'notebooks/**' - - '.github/workflows/check-notebookes.yaml' + - 'scripts/**' + - 'snippets/quickstart/**' + - '.github/workflows/check-notebooks.yaml' workflow_dispatch: jobs: @@ -51,6 +55,13 @@ jobs: uv run nbqa mypy . echo "✅ Mypy type checks passed!" + - name: Check snippet freshness + working-directory: . + run: | + echo "🔍 Checking that generated snippets match notebook source..." + python3 scripts/generate_snippets.py --check + echo "✅ Snippets are up to date!" + - name: Summary if: success() run: | diff --git a/docs.json b/docs.json index 1376dda..1970f90 100644 --- a/docs.json +++ b/docs.json @@ -36,7 +36,7 @@ "logo": { "light": "/logo/light.svg", "dark": "/logo/dark.svg", - "href": "https://liquid.ai" + "href": "/docs/getting-started/welcome" }, "navbar": { "links": [ diff --git a/docs/fine-tuning/trl.mdx b/docs/fine-tuning/trl.mdx index 243218b..c63ac28 100644 --- a/docs/fine-tuning/trl.mdx +++ b/docs/fine-tuning/trl.mdx @@ -385,23 +385,12 @@ trainer.train() ``` -## Other Training Methods[​](#other-training-methods "Direct link to Other Training Methods") - -TRL also provides additional trainers that work seamlessly with LFM models: - -* **RewardTrainer**: Train reward models for RLHF -* **PPOTrainer**: Proximal Policy Optimization for reinforcement learning from human feedback -* **ORPOTrainer**: Odds Ratio Preference Optimization, an alternative to DPO -* **KTOTrainer**: Kahneman-Tversky Optimization for alignment - -Refer to the [TRL documentation](https://huggingface.co/docs/trl) for detailed guides on these methods. - ## Tips[​](#tips "Direct link to Tips") * **Learning Rates**: SFT typically uses higher learning rates (1e-5 to 5e-5) than DPO (1e-7 to 1e-6) * **Batch Size**: DPO requires larger effective batch sizes; increase `gradient_accumulation_steps` if GPU memory is limited -* **LoRA Ranks**: Start with `r=16` for experimentation; increase to `r=64` or higher for better quality -* **DPO Beta**: The `beta` parameter controls the deviation from the reference model; typical values range from 0.1 to 0.5 +* **LoRA Ranks**: Start with `r=16`. Higher ranks increase adapter memory and parameter count. Set `lora_alpha` (`a`) to `2 * r` +* **DPO Beta**: The `beta` parameter controls the deviation from the reference model. Start with `0.1` *** diff --git a/docs/getting-started/quickstart.mdx b/docs/getting-started/quickstart.mdx deleted file mode 100644 index 8cffe1d..0000000 --- a/docs/getting-started/quickstart.mdx +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: "From zero to inference in less than 5 minutes" -description: "Select your use case and deployment platform to receive ready-to-run instructions for the LFM model you need." ---- - -## 🚀 Step 1. Choose your use case - -Get personalized code snippets for your specific model and deployment platform. - -💬 - -### Chat Completions - -Conversational AI and text generation for chatbots and assistants - -👁️ - -### Vision Understanding - -Analyze images, describe visual content, and answer questions about pictures - -🎵 - -### Audio & Transcription - -Process audio, transcribe speech, and audio-based conversations - -💻 - -### Code Generation - -Generate, debug, and explain code across multiple programming languages - -🔍 - -### Text Embeddings - -Generate vector representations of text for search and similarity tasks - -🛠️ - -### Function Calling & Agents - -Build agentic workflows with structured outputs and tool integration - -[Edit this page](https://github.com/Liquid4All/docs/tree/main/lfm/getting-started/quickstart.md) diff --git a/docs/models/complete-library.mdx b/docs/models/complete-library.mdx index c68325f..a78bd7e 100644 --- a/docs/models/complete-library.mdx +++ b/docs/models/complete-library.mdx @@ -49,9 +49,9 @@ All LFM2 models are available in multiple formats for flexible deployment: Quantization reduces model size and speeds up inference with minimal quality loss. Available options by format: -- **GGUF** — Supports `Q2_K`, `Q3_K_M`, `Q4_K_M`, `Q5_K_M`, `Q6_K`, and `Q8_0` quantization levels. `Q4_K_M` offers the best balance of size and quality. `Q8_0` preserves near-full precision. -- **MLX** — Available in `4bit` and `8bit` variants. `8bit` is the default for most models. -- **ONNX** — Supports `FP16` and `INT8` quantization. `INT8` is best for CPU inference; `FP16` for GPU acceleration. +- **GGUF** — Supports `Q4_0`, `Q4_K_M`, `Q5_K_M`, `Q6_K`, `Q8_0`, `BF16`, and `F16`. `Q4_K_M` offers the best balance of size and quality. +- **MLX** — Available in `3bit`, `4bit`, `5bit`, `6bit`, `8bit`, and `BF16`. `8bit` is recommended. +- **ONNX** — Supports `FP32`, `FP16`, `Q4`, and `Q8` (MoE models also support `Q4F16`). `Q4` is recommended for most deployments. ## Model Chart diff --git a/notebooks/pyproject.toml b/notebooks/pyproject.toml index be83f3a..c8e6a70 100644 --- a/notebooks/pyproject.toml +++ b/notebooks/pyproject.toml @@ -12,10 +12,6 @@ dev = [ "mypy>=1.14.1", ] -[tool.ruff] -# Exclude certain directories -extend-exclude = ["runnable-examples"] - [tool.ruff.lint] # Select rules to check - focus on correctness, not style select = [ diff --git a/notebooks/quickstart_snippets.ipynb b/notebooks/quickstart_snippets.ipynb new file mode 100644 index 0000000..559e5b9 --- /dev/null +++ b/notebooks/quickstart_snippets.ipynb @@ -0,0 +1,179 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Quickstart Snippet Sources\n", + "\n", + "This notebook is the **source of truth** for the Python code in `snippets/quickstart/*.mdx`.\n", + "\n", + "Each code cell is tagged with `\"snippet\": \"\"` in its cell metadata.\n", + "The generation script (`scripts/generate_snippets.py`) reads these cells, replaces\n", + "default model names with template variables, and generates the MDX snippet files.\n", + "\n", + "**Do not edit the MDX files directly.** Edit the code cells here, then run:\n", + "```bash\n", + "python3 scripts/generate_snippets.py\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Text Model Snippets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "snippet": "text-transformers" + }, + "outputs": [], + "source": [ + "from transformers import AutoModelForCausalLM, AutoTokenizer\n", + "\n", + "model_id = \"LiquidAI/LFM2.5-1.2B-Instruct\"\n", + "model = AutoModelForCausalLM.from_pretrained(\n", + " model_id,\n", + " device_map=\"auto\",\n", + " dtype=\"bfloat16\",\n", + ")\n", + "tokenizer = AutoTokenizer.from_pretrained(model_id)\n", + "\n", + "input_ids = tokenizer.apply_chat_template(\n", + " [{\"role\": \"user\", \"content\": \"What is machine learning?\"}],\n", + " add_generation_prompt=True,\n", + " return_tensors=\"pt\",\n", + " tokenize=True,\n", + ").to(model.device)\n", + "\n", + "output = model.generate(input_ids, max_new_tokens=512)\n", + "response = tokenizer.decode(output[0][len(input_ids[0]):], skip_special_tokens=True)\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "snippet": "text-vllm" + }, + "outputs": [], + "source": [ + "from vllm import LLM, SamplingParams\n", + "\n", + "llm = LLM(model=\"LiquidAI/LFM2.5-1.2B-Instruct\")\n", + "\n", + "sampling_params = SamplingParams(\n", + " temperature=0.3,\n", + " min_p=0.15,\n", + " repetition_penalty=1.05,\n", + " max_tokens=512,\n", + ")\n", + "\n", + "output = llm.chat(\"What is machine learning?\", sampling_params)\n", + "print(output[0].outputs[0].text)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Vision Model Snippets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "snippet": "vl-transformers" + }, + "outputs": [], + "source": [ + "from transformers import AutoProcessor, AutoModelForImageTextToText\n", + "from transformers.image_utils import load_image\n", + "\n", + "model_id = \"LiquidAI/LFM2.5-VL-1.6B\"\n", + "model = AutoModelForImageTextToText.from_pretrained(\n", + " model_id,\n", + " device_map=\"auto\",\n", + " dtype=\"bfloat16\",\n", + ")\n", + "processor = AutoProcessor.from_pretrained(model_id)\n", + "\n", + "url = \"https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg\"\n", + "image = load_image(url)\n", + "\n", + "conversation = [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": [\n", + " {\"type\": \"image\", \"image\": image},\n", + " {\"type\": \"text\", \"text\": \"What is in this image?\"},\n", + " ],\n", + " },\n", + "]\n", + "\n", + "inputs = processor.apply_chat_template(\n", + " conversation,\n", + " add_generation_prompt=True,\n", + " return_tensors=\"pt\",\n", + " return_dict=True,\n", + " tokenize=True,\n", + ").to(model.device)\n", + "\n", + "outputs = model.generate(**inputs, max_new_tokens=256)\n", + "response = processor.batch_decode(outputs, skip_special_tokens=True)[0]\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "snippet": "vl-vllm" + }, + "outputs": [], + "source": [ + "from vllm import LLM, SamplingParams\n", + "\n", + "IMAGE_URL = \"http://images.cocodataset.org/val2017/000000039769.jpg\"\n", + "\n", + "llm = LLM(\n", + " model=\"LiquidAI/LFM2.5-VL-1.6B\",\n", + " max_model_len=1024,\n", + ")\n", + "\n", + "sampling_params = SamplingParams(\n", + " temperature=0.0,\n", + " max_tokens=256,\n", + ")\n", + "\n", + "messages = [{\n", + " \"role\": \"user\",\n", + " \"content\": [\n", + " {\"type\": \"image_url\", \"image_url\": {\"url\": IMAGE_URL}},\n", + " {\"type\": \"text\", \"text\": \"Describe what you see in this image.\"},\n", + " ],\n", + "}]\n", + "\n", + "outputs = llm.chat(messages, sampling_params)\n", + "print(outputs[0].outputs[0].text)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/quickstarts/LFM2-1.2B__ollama.md b/quickstarts/LFM2-1.2B__ollama.md deleted file mode 100644 index 6d3bd01..0000000 --- a/quickstarts/LFM2-1.2B__ollama.md +++ /dev/null @@ -1,77 +0,0 @@ -# LiquidAI/LFM2-2.6B with Ollama - -Perfect for fast local development using an OpenAI API compatible server. -Ollama is not intended for production-ready deployments. - -## Install Ollama - -Go to [ollama.com/download](https://ollama.com/download) and follow the installation instructions -for your operating system. MacOS, Linux and Windows are supported. - -## Pull the model checkpoint from Hugging Face and start the server - -```shell -ollama run hf.co/LiquidAI/LFM2-2.6B-GGUF -``` - -After running this command for the first time, the model weights will be cached in your local drive. When you run the command for the second time, Ollama will directly load the model weights from disk into memory without trigger a re-download from Hugging Face. - -You can check the list of models weights available in your cache with -```shell -ollama list -``` - -## Request chat completions - -Install the OpenAI Python SDK -```shell -pip install openai -``` - -Generate chat completion with the model, either streaming or non-streaming the response. -```python -def generate_chat_completion_with_ollama( - model_name: str = 'hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream: bool = False, -): - from openai import OpenAI - - # Point to local Ollama server - client = OpenAI( - base_url='http://localhost:11434/v1', - api_key='ollama', # required but unused - ) - - response = client.chat.completions.create( - model=model_name, # model name is ignored by Ollama - messages=[ - { - 'role': 'user', - 'content': 'Why is C.Elegans?' - } - ], - stream=stream, - ) - - if stream: - for chunk in response: - if chunk.choices[0].delta.content: - print(chunk.choices[0].delta.content, end='', flush=True) - print() - else: - print(response.choices[0].message.content) - -# Print the full completion at once - useful for offline jobs and applications that do not -# require instant feedback to the user. -generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream=False, -) - -# Stream tokens to the console as they are produced - useful for user-facing applications -# that need to provide feedback to the user in real-time. -generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream=True, -) -``` diff --git a/quickstarts/LFM2-1.2B__transformers.md b/quickstarts/LFM2-1.2B__transformers.md deleted file mode 100644 index 80dfa77..0000000 --- a/quickstarts/LFM2-1.2B__transformers.md +++ /dev/null @@ -1,60 +0,0 @@ -# LiquidAI/LFM2-1.2B with Transformers - -Perfect for research, prototyping, and quick experimentation in Jupyter notebooks. - -
- - Open In Colab - -
- -## Install Python dependencies - -```shell -pip install transformers -# if you are using uv do -# uv pip install transformers -``` - -## Run inference - -```python -from transformers import AutoModelForCausalLM, AutoTokenizer - -# Load model and tokenizer -model_id = "LiquidAI/LFM2-1.2B" -model = AutoModelForCausalLM.from_pretrained( - model_id, - device_map="auto", - torch_dtype="bfloat16", -# attn_implementation="flash_attention_2" <- uncomment on compatible GPU -) -tokenizer = AutoTokenizer.from_pretrained(model_id) - -# Generate answer -prompt = "What is C. elegans?" -input_ids = tokenizer.apply_chat_template( - [{"role": "user", "content": prompt}], - add_generation_prompt=True, - return_tensors="pt", - tokenize=True, -).to(model.device) - -output = model.generate( - input_ids, - do_sample=True, - temperature=0.3, - min_p=0.15, - repetition_penalty=1.05, - max_new_tokens=512, -) - -print(tokenizer.decode(output[0], skip_special_tokens=False)) - -# <|startoftext|><|im_start|>user -# What is C. elegans?<|im_end|> -# <|im_start|>assistant -# C. elegans, also known as Caenorhabditis elegans, is a small, free-living -# nematode worm (roundworm) that belongs to the phylum Nematoda. - -``` diff --git a/quickstarts/LFM2-2.6B__ollama.md b/quickstarts/LFM2-2.6B__ollama.md deleted file mode 100644 index 6d3bd01..0000000 --- a/quickstarts/LFM2-2.6B__ollama.md +++ /dev/null @@ -1,77 +0,0 @@ -# LiquidAI/LFM2-2.6B with Ollama - -Perfect for fast local development using an OpenAI API compatible server. -Ollama is not intended for production-ready deployments. - -## Install Ollama - -Go to [ollama.com/download](https://ollama.com/download) and follow the installation instructions -for your operating system. MacOS, Linux and Windows are supported. - -## Pull the model checkpoint from Hugging Face and start the server - -```shell -ollama run hf.co/LiquidAI/LFM2-2.6B-GGUF -``` - -After running this command for the first time, the model weights will be cached in your local drive. When you run the command for the second time, Ollama will directly load the model weights from disk into memory without trigger a re-download from Hugging Face. - -You can check the list of models weights available in your cache with -```shell -ollama list -``` - -## Request chat completions - -Install the OpenAI Python SDK -```shell -pip install openai -``` - -Generate chat completion with the model, either streaming or non-streaming the response. -```python -def generate_chat_completion_with_ollama( - model_name: str = 'hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream: bool = False, -): - from openai import OpenAI - - # Point to local Ollama server - client = OpenAI( - base_url='http://localhost:11434/v1', - api_key='ollama', # required but unused - ) - - response = client.chat.completions.create( - model=model_name, # model name is ignored by Ollama - messages=[ - { - 'role': 'user', - 'content': 'Why is C.Elegans?' - } - ], - stream=stream, - ) - - if stream: - for chunk in response: - if chunk.choices[0].delta.content: - print(chunk.choices[0].delta.content, end='', flush=True) - print() - else: - print(response.choices[0].message.content) - -# Print the full completion at once - useful for offline jobs and applications that do not -# require instant feedback to the user. -generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream=False, -) - -# Stream tokens to the console as they are produced - useful for user-facing applications -# that need to provide feedback to the user in real-time. -generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream=True, -) -``` diff --git a/quickstarts/LFM2-2.6B__transformers.md b/quickstarts/LFM2-2.6B__transformers.md deleted file mode 100644 index e05b33b..0000000 --- a/quickstarts/LFM2-2.6B__transformers.md +++ /dev/null @@ -1,60 +0,0 @@ -# LiquidAI/LFM2-2.6B with Transformers - -Perfect for research, prototyping, and quick experimentation in Jupyter notebooks. - -
- - Open In Colab - -
- -## Install Python dependencies - -```shell -pip install transformers -# if you are using uv do -# uv pip install transformers -``` - -## Run inference - -```python -from transformers import AutoModelForCausalLM, AutoTokenizer - -# Load model and tokenizer -model_id = "LiquidAI/LFM2-2.6B" -model = AutoModelForCausalLM.from_pretrained( - model_id, - device_map="auto", - torch_dtype="bfloat16", -# attn_implementation="flash_attention_2" <- uncomment on compatible GPU -) -tokenizer = AutoTokenizer.from_pretrained(model_id) - -# Generate answer -prompt = "What is C. elegans?" -input_ids = tokenizer.apply_chat_template( - [{"role": "user", "content": prompt}], - add_generation_prompt=True, - return_tensors="pt", - tokenize=True, -).to(model.device) - -output = model.generate( - input_ids, - do_sample=True, - temperature=0.3, - min_p=0.15, - repetition_penalty=1.05, - max_new_tokens=512, -) - -print(tokenizer.decode(output[0], skip_special_tokens=False)) - -# <|startoftext|><|im_start|>user -# What is C. elegans?<|im_end|> -# <|im_start|>assistant -# C. elegans, also known as Caenorhabditis elegans, is a small, free-living -# nematode worm (roundworm) that belongs to the phylum Nematoda. - -``` diff --git a/quickstarts/LFM2-350M__transformers.md b/quickstarts/LFM2-350M__transformers.md deleted file mode 100644 index 26268af..0000000 --- a/quickstarts/LFM2-350M__transformers.md +++ /dev/null @@ -1,60 +0,0 @@ -# LiquidAI/LFM2-350M with Transformers - -Perfect for research, prototyping, and quick experimentation in Jupyter notebooks. - -
- - Open In Colab - -
- -## Install Python dependencies - -```shell -pip install transformers -# if you are using uv do -# uv pip install transformers -``` - -## Run inference - -```python -from transformers import AutoModelForCausalLM, AutoTokenizer - -# Load model and tokenizer -model_id = "LiquidAI/LFM2-350M" -model = AutoModelForCausalLM.from_pretrained( - model_id, - device_map="auto", - torch_dtype="bfloat16", -# attn_implementation="flash_attention_2" <- uncomment on compatible GPU -) -tokenizer = AutoTokenizer.from_pretrained(model_id) - -# Generate answer -prompt = "What is C. elegans?" -input_ids = tokenizer.apply_chat_template( - [{"role": "user", "content": prompt}], - add_generation_prompt=True, - return_tensors="pt", - tokenize=True, -).to(model.device) - -output = model.generate( - input_ids, - do_sample=True, - temperature=0.3, - min_p=0.15, - repetition_penalty=1.05, - max_new_tokens=512, -) - -print(tokenizer.decode(output[0], skip_special_tokens=False)) - -# <|startoftext|><|im_start|>user -# What is C. elegans?<|im_end|> -# <|im_start|>assistant -# C. elegans, also known as Caenorhabditis elegans, is a small, free-living -# nematode worm (roundworm) that belongs to the phylum Nematoda. - -``` diff --git a/quickstarts/LFM2-700M__transformers.md b/quickstarts/LFM2-700M__transformers.md deleted file mode 100644 index a2e43da..0000000 --- a/quickstarts/LFM2-700M__transformers.md +++ /dev/null @@ -1,60 +0,0 @@ -# LiquidAI/LFM2-700M with Transformers - -Perfect for research, prototyping, and quick experimentation in Jupyter notebooks. - -
- - Open In Colab - -
- -## Install Python dependencies - -```shell -pip install transformers -# if you are using uv do -# uv pip install transformers -``` - -## Run inference - -```python -from transformers import AutoModelForCausalLM, AutoTokenizer - -# Load model and tokenizer -model_id = "LiquidAI/LFM2-700M" -model = AutoModelForCausalLM.from_pretrained( - model_id, - device_map="auto", - torch_dtype="bfloat16", -# attn_implementation="flash_attention_2" <- uncomment on compatible GPU -) -tokenizer = AutoTokenizer.from_pretrained(model_id) - -# Generate answer -prompt = "What is C. elegans?" -input_ids = tokenizer.apply_chat_template( - [{"role": "user", "content": prompt}], - add_generation_prompt=True, - return_tensors="pt", - tokenize=True, -).to(model.device) - -output = model.generate( - input_ids, - do_sample=True, - temperature=0.3, - min_p=0.15, - repetition_penalty=1.05, - max_new_tokens=512, -) - -print(tokenizer.decode(output[0], skip_special_tokens=False)) - -# <|startoftext|><|im_start|>user -# What is C. elegans?<|im_end|> -# <|im_start|>assistant -# C. elegans, also known as Caenorhabditis elegans, is a small, free-living -# nematode worm (roundworm) that belongs to the phylum Nematoda. - -``` diff --git a/quickstarts/LFM2-8B-A1B__ollama.md b/quickstarts/LFM2-8B-A1B__ollama.md deleted file mode 100644 index 1273d66..0000000 --- a/quickstarts/LFM2-8B-A1B__ollama.md +++ /dev/null @@ -1,77 +0,0 @@ -# LiquidAI/LFM2-8B-A1B with Ollama - -Perfect for fast local development using an OpenAI API compatible server. -Ollama is not intended for production-ready deployments. - -## Install Ollama - -Go to [ollama.com/download](https://ollama.com/download) and follow the installation instructions -for your operating system. MacOS, Linux and Windows are supported. - -## Pull the model checkpoint from Hugging Face and start the server - -```shell -ollama run hf.co/LiquidAI/LFM2-8B-A1B-GGUF -``` - -After running this command for the first time, the model weights will be cached in your local drive. When you run the command for the second time, Ollama will directly load the model weights from disk into memory without trigger a re-download from Hugging Face. - -You can check the list of models weights available in your cache with -```shell -ollama list -``` - -## Request chat completions - -Install the OpenAI Python SDK -```shell -pip install openai -``` - -Generate chat completion with the model, either streaming or non-streaming the response. -```python -def generate_chat_completion_with_ollama( - model_name: str = 'hf.co/LiquidAI/LFM2-8B-A1B-GGUF', - stream: bool = False, -): - from openai import OpenAI - - # Point to local Ollama server - client = OpenAI( - base_url='http://localhost:11434/v1', - api_key='ollama', # required but unused - ) - - response = client.chat.completions.create( - model=model_name, # model name is ignored by Ollama - messages=[ - { - 'role': 'user', - 'content': 'Why is C.Elegans?' - } - ], - stream=stream, - ) - - if stream: - for chunk in response: - if chunk.choices[0].delta.content: - print(chunk.choices[0].delta.content, end='', flush=True) - print() - else: - print(response.choices[0].message.content) - -# Print the full completion at once - useful for offline jobs and applications that do not -# require instant feedback to the user. -generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-8B-A1B-GGUF', - stream=False, -) - -# Stream tokens to the console as they are produced - useful for user-facing applications -# that need to provide feedback to the user in real-time. -generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-8B-A1B-GGUF', - stream=True, -) -``` diff --git a/quickstarts/LFM2-8B-A1B__transformers.md b/quickstarts/LFM2-8B-A1B__transformers.md deleted file mode 100644 index b891bd5..0000000 --- a/quickstarts/LFM2-8B-A1B__transformers.md +++ /dev/null @@ -1,63 +0,0 @@ -# LiquidAI/LFM2-8B-A1B with Transformers - -Perfect for research, prototyping, and quick experimentation in Jupyter notebooks. - -
- - Open In Colab - -
- -## Install Python dependencies - -```shell -pip install "transformers>=5.0.0" bitsandbytes -``` - -> **Note:** Transformers v5 is newly released. If you encounter issues, fall back to the pinned git source: -> ```shell -> pip install git+https://github.com/huggingface/transformers.git@0c9a72e4576fe4c84077f066e585129c97bfd4e6 bitsandbytes -> ``` - -## Run inference - -```python -from transformers import AutoModelForCausalLM, AutoTokenizer - -# Load model and tokenizer -model_id = "LiquidAI/LFM2-8B-A1B" -model = AutoModelForCausalLM.from_pretrained( - model_id, - device_map="auto", - dtype="bfloat16", - load_in_8bit=True, -# attn_implementation="flash_attention_2" <- uncomment on compatible GPU -) -tokenizer = AutoTokenizer.from_pretrained(model_id) - -# Generate answer -prompt = "What is C. elegans?" -input_ids = tokenizer.apply_chat_template( - [{"role": "user", "content": prompt}], - add_generation_prompt=True, - return_tensors="pt", - tokenize=True, -).to(model.device) - -output = model.generate( - input_ids, - do_sample=True, - temperature=0.3, - min_p=0.15, - repetition_penalty=1.05, - max_new_tokens=512, -) - -print(tokenizer.decode(output[0], skip_special_tokens=False)) - -# <|startoftext|><|im_start|>user -# What is C. elegans?<|im_end|> -# <|im_start|>assistant -# C. elegans, also known as Caenorhabditis elegans, is a small, free-living -# nematode worm (roundworm) that belongs to the phylum Nematoda. -``` diff --git a/runnable-examples/.python-version b/runnable-examples/.python-version deleted file mode 100644 index e4fba21..0000000 --- a/runnable-examples/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.12 diff --git a/runnable-examples/README.md b/runnable-examples/README.md deleted file mode 100644 index e69de29..0000000 diff --git a/runnable-examples/main.py b/runnable-examples/main.py deleted file mode 100644 index 151a599..0000000 --- a/runnable-examples/main.py +++ /dev/null @@ -1,46 +0,0 @@ -def generate_chat_completion_with_ollama( - model_name: str = 'hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream: bool = False, -): - print("Hello from runnable-examples!") - - from openai import OpenAI - - # Point to local Ollama server - client = OpenAI( - base_url='http://localhost:11434/v1', - api_key='ollama', # required but unused - ) - - response = client.chat.completions.create( - model=model_name, # model name is ignored by Ollama - messages=[ - { - 'role': 'user', - 'content': 'Why is C.Elegans?' - } - ], - stream=stream, - ) - - if stream: - for chunk in response: - if chunk.choices[0].delta.content: - print(chunk.choices[0].delta.content, end='', flush=True) - print() - else: - print(response.choices[0].message.content) - - - -if __name__ == "__main__": - generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream=False, - ) - - generate_chat_completion_with_ollama( - model_name='hf.co/LiquidAI/LFM2-2.6B-GGUF', - stream=True, - ) - diff --git a/runnable-examples/pyproject.toml b/runnable-examples/pyproject.toml deleted file mode 100644 index c904f2e..0000000 --- a/runnable-examples/pyproject.toml +++ /dev/null @@ -1,10 +0,0 @@ -[project] -name = "runnable-examples" -version = "0.1.0" -description = "Add your description here" -readme = "README.md" -requires-python = ">=3.12" -dependencies = [ - "ollama>=0.6.1", - "openai>=2.11.0", -] diff --git a/runnable-examples/uv.lock b/runnable-examples/uv.lock deleted file mode 100644 index 59fec59..0000000 --- a/runnable-examples/uv.lock +++ /dev/null @@ -1,341 +0,0 @@ -version = 1 -revision = 3 -requires-python = ">=3.12" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, -] - -[[package]] -name = "anyio" -version = "4.12.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" }, -] - -[[package]] -name = "certifi" -version = "2025.11.12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, -] - -[[package]] -name = "distro" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, -] - -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - -[[package]] -name = "idna" -version = "3.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, -] - -[[package]] -name = "jiter" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, - { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, - { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, - { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, - { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, - { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, - { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, - { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, - { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" }, - { url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" }, - { url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" }, - { url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" }, - { url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" }, - { url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" }, - { url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" }, - { url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" }, - { url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" }, - { url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" }, - { url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" }, - { url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" }, - { url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/a8/99/45c9f0dbe4a1416b2b9a8a6d1236459540f43d7fb8883cff769a8db0612d/jiter-0.12.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:c46d927acd09c67a9fb1416df45c5a04c27e83aae969267e98fba35b74e99525", size = 312478, upload-time = "2025-11-09T20:48:10.898Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a7/54ae75613ba9e0f55fcb0bc5d1f807823b5167cc944e9333ff322e9f07dd/jiter-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:774ff60b27a84a85b27b88cd5583899c59940bcc126caca97eb2a9df6aa00c49", size = 318706, upload-time = "2025-11-09T20:48:12.266Z" }, - { url = "https://files.pythonhosted.org/packages/59/31/2aa241ad2c10774baf6c37f8b8e1f39c07db358f1329f4eb40eba179c2a2/jiter-0.12.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5433fab222fb072237df3f637d01b81f040a07dcac1cb4a5c75c7aa9ed0bef1", size = 351894, upload-time = "2025-11-09T20:48:13.673Z" }, - { url = "https://files.pythonhosted.org/packages/54/4f/0f2759522719133a9042781b18cc94e335b6d290f5e2d3e6899d6af933e3/jiter-0.12.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8c593c6e71c07866ec6bfb790e202a833eeec885022296aff6b9e0b92d6a70e", size = 365714, upload-time = "2025-11-09T20:48:15.083Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6f/806b895f476582c62a2f52c453151edd8a0fde5411b0497baaa41018e878/jiter-0.12.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90d32894d4c6877a87ae00c6b915b609406819dce8bc0d4e962e4de2784e567e", size = 478989, upload-time = "2025-11-09T20:48:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/86/6c/012d894dc6e1033acd8db2b8346add33e413ec1c7c002598915278a37f79/jiter-0.12.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:798e46eed9eb10c3adbbacbd3bdb5ecd4cf7064e453d00dbef08802dae6937ff", size = 378615, upload-time = "2025-11-09T20:48:18.614Z" }, - { url = "https://files.pythonhosted.org/packages/87/30/d718d599f6700163e28e2c71c0bbaf6dace692e7df2592fd793ac9276717/jiter-0.12.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3f1368f0a6719ea80013a4eb90ba72e75d7ea67cfc7846db2ca504f3df0169a", size = 364745, upload-time = "2025-11-09T20:48:20.117Z" }, - { url = "https://files.pythonhosted.org/packages/8f/85/315b45ce4b6ddc7d7fceca24068543b02bdc8782942f4ee49d652e2cc89f/jiter-0.12.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65f04a9d0b4406f7e51279710b27484af411896246200e461d80d3ba0caa901a", size = 386502, upload-time = "2025-11-09T20:48:21.543Z" }, - { url = "https://files.pythonhosted.org/packages/74/0b/ce0434fb40c5b24b368fe81b17074d2840748b4952256bab451b72290a49/jiter-0.12.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:fd990541982a24281d12b67a335e44f117e4c6cbad3c3b75c7dea68bf4ce3a67", size = 519845, upload-time = "2025-11-09T20:48:22.964Z" }, - { url = "https://files.pythonhosted.org/packages/e8/a3/7a7a4488ba052767846b9c916d208b3ed114e3eb670ee984e4c565b9cf0d/jiter-0.12.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:b111b0e9152fa7df870ecaebb0bd30240d9f7fff1f2003bcb4ed0f519941820b", size = 510701, upload-time = "2025-11-09T20:48:24.483Z" }, - { url = "https://files.pythonhosted.org/packages/c3/16/052ffbf9d0467b70af24e30f91e0579e13ded0c17bb4a8eb2aed3cb60131/jiter-0.12.0-cp314-cp314-win32.whl", hash = "sha256:a78befb9cc0a45b5a5a0d537b06f8544c2ebb60d19d02c41ff15da28a9e22d42", size = 205029, upload-time = "2025-11-09T20:48:25.749Z" }, - { url = "https://files.pythonhosted.org/packages/e4/18/3cf1f3f0ccc789f76b9a754bdb7a6977e5d1d671ee97a9e14f7eb728d80e/jiter-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:e1fe01c082f6aafbe5c8faf0ff074f38dfb911d53f07ec333ca03f8f6226debf", size = 204960, upload-time = "2025-11-09T20:48:27.415Z" }, - { url = "https://files.pythonhosted.org/packages/02/68/736821e52ecfdeeb0f024b8ab01b5a229f6b9293bbdb444c27efade50b0f/jiter-0.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:d72f3b5a432a4c546ea4bedc84cce0c3404874f1d1676260b9c7f048a9855451", size = 185529, upload-time = "2025-11-09T20:48:29.125Z" }, - { url = "https://files.pythonhosted.org/packages/30/61/12ed8ee7a643cce29ac97c2281f9ce3956eb76b037e88d290f4ed0d41480/jiter-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e6ded41aeba3603f9728ed2b6196e4df875348ab97b28fc8afff115ed42ba7a7", size = 318974, upload-time = "2025-11-09T20:48:30.87Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c6/f3041ede6d0ed5e0e79ff0de4c8f14f401bbf196f2ef3971cdbe5fd08d1d/jiter-0.12.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a947920902420a6ada6ad51892082521978e9dd44a802663b001436e4b771684", size = 345932, upload-time = "2025-11-09T20:48:32.658Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5d/4d94835889edd01ad0e2dbfc05f7bdfaed46292e7b504a6ac7839aa00edb/jiter-0.12.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:add5e227e0554d3a52cf390a7635edaffdf4f8fce4fdbcef3cc2055bb396a30c", size = 367243, upload-time = "2025-11-09T20:48:34.093Z" }, - { url = "https://files.pythonhosted.org/packages/fd/76/0051b0ac2816253a99d27baf3dda198663aff882fa6ea7deeb94046da24e/jiter-0.12.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9b1cda8fcb736250d7e8711d4580ebf004a46771432be0ae4796944b5dfa5d", size = 479315, upload-time = "2025-11-09T20:48:35.507Z" }, - { url = "https://files.pythonhosted.org/packages/70/ae/83f793acd68e5cb24e483f44f482a1a15601848b9b6f199dacb970098f77/jiter-0.12.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deeb12a2223fe0135c7ff1356a143d57f95bbf1f4a66584f1fc74df21d86b993", size = 380714, upload-time = "2025-11-09T20:48:40.014Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/4808a88338ad2c228b1126b93fcd8ba145e919e886fe910d578230dabe3b/jiter-0.12.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c596cc0f4cb574877550ce4ecd51f8037469146addd676d7c1a30ebe6391923f", size = 365168, upload-time = "2025-11-09T20:48:41.462Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d4/04619a9e8095b42aef436b5aeb4c0282b4ff1b27d1db1508df9f5dc82750/jiter-0.12.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ab4c823b216a4aeab3fdbf579c5843165756bd9ad87cc6b1c65919c4715f783", size = 387893, upload-time = "2025-11-09T20:48:42.921Z" }, - { url = "https://files.pythonhosted.org/packages/17/ea/d3c7e62e4546fdc39197fa4a4315a563a89b95b6d54c0d25373842a59cbe/jiter-0.12.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e427eee51149edf962203ff8db75a7514ab89be5cb623fb9cea1f20b54f1107b", size = 520828, upload-time = "2025-11-09T20:48:44.278Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0b/c6d3562a03fd767e31cb119d9041ea7958c3c80cb3d753eafb19b3b18349/jiter-0.12.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:edb868841f84c111255ba5e80339d386d937ec1fdce419518ce1bd9370fac5b6", size = 511009, upload-time = "2025-11-09T20:48:45.726Z" }, - { url = "https://files.pythonhosted.org/packages/aa/51/2cb4468b3448a8385ebcd15059d325c9ce67df4e2758d133ab9442b19834/jiter-0.12.0-cp314-cp314t-win32.whl", hash = "sha256:8bbcfe2791dfdb7c5e48baf646d37a6a3dcb5a97a032017741dea9f817dca183", size = 205110, upload-time = "2025-11-09T20:48:47.033Z" }, - { url = "https://files.pythonhosted.org/packages/b2/c5/ae5ec83dec9c2d1af805fd5fe8f74ebded9c8670c5210ec7820ce0dbeb1e/jiter-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2fa940963bf02e1d8226027ef461e36af472dea85d36054ff835aeed944dd873", size = 205223, upload-time = "2025-11-09T20:48:49.076Z" }, - { url = "https://files.pythonhosted.org/packages/97/9a/3c5391907277f0e55195550cf3fa8e293ae9ee0c00fb402fec1e38c0c82f/jiter-0.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:506c9708dd29b27288f9f8f1140c3cb0e3d8ddb045956d7757b1fa0e0f39a473", size = 185564, upload-time = "2025-11-09T20:48:50.376Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, - { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, -] - -[[package]] -name = "ollama" -version = "0.6.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "httpx" }, - { name = "pydantic" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9d/5a/652dac4b7affc2b37b95386f8ae78f22808af09d720689e3d7a86b6ed98e/ollama-0.6.1.tar.gz", hash = "sha256:478c67546836430034b415ed64fa890fd3d1ff91781a9d548b3325274e69d7c6", size = 51620, upload-time = "2025-11-13T23:02:17.416Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/4f/4a617ee93d8208d2bcf26b2d8b9402ceaed03e3853c754940e2290fed063/ollama-0.6.1-py3-none-any.whl", hash = "sha256:fc4c984b345735c5486faeee67d8a265214a31cbb828167782dc642ce0a2bf8c", size = 14354, upload-time = "2025-11-13T23:02:16.292Z" }, -] - -[[package]] -name = "openai" -version = "2.11.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "distro" }, - { name = "httpx" }, - { name = "jiter" }, - { name = "pydantic" }, - { name = "sniffio" }, - { name = "tqdm" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f4/8c/aa6aea6072f985ace9d6515046b9088ff00c157f9654da0c7b1e129d9506/openai-2.11.0.tar.gz", hash = "sha256:b3da01d92eda31524930b6ec9d7167c535e843918d7ba8a76b1c38f1104f321e", size = 624540, upload-time = "2025-12-11T19:11:58.539Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/f1/d9251b565fce9f8daeb45611e3e0d2f7f248429e40908dcee3b6fe1b5944/openai-2.11.0-py3-none-any.whl", hash = "sha256:21189da44d2e3d027b08c7a920ba4454b8b7d6d30ae7e64d9de11dbe946d4faa", size = 1064131, upload-time = "2025-12-11T19:11:56.816Z" }, -] - -[[package]] -name = "pydantic" -version = "2.12.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, -] - -[[package]] -name = "pydantic-core" -version = "2.41.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, -] - -[[package]] -name = "runnable-examples" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "ollama" }, - { name = "openai" }, -] - -[package.metadata] -requires-dist = [ - { name = "ollama", specifier = ">=0.6.1" }, - { name = "openai", specifier = ">=2.11.0" }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - -[[package]] -name = "tqdm" -version = "4.67.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, -] - -[[package]] -name = "typing-extensions" -version = "4.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, -] diff --git a/scripts/generate_snippets.py b/scripts/generate_snippets.py new file mode 100644 index 0000000..0375829 --- /dev/null +++ b/scripts/generate_snippets.py @@ -0,0 +1,317 @@ +#!/usr/bin/env python3 +import argparse +import json +import logging +import pathlib +import sys + +logger = logging.getLogger(__name__) + +REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent +NOTEBOOK_PATH = REPO_ROOT / "notebooks" / "quickstart_snippets.ipynb" +SNIPPETS_DIR = REPO_ROOT / "snippets" / "quickstart" + +SHIKI_STYLE = ( + 'className="shiki shiki-themes github-light github-dark" ' + "style={{backgroundColor: '#fff', '--shiki-dark-bg': '#24292e', " + "color: '#24292e', '--shiki-dark': '#e1e4e8'}}" +) + +SHIKI_STYLE_WITH_MARGIN = ( + 'className="shiki shiki-themes github-light github-dark" ' + "style={{backgroundColor: '#fff', '--shiki-dark-bg': '#24292e', " + "color: '#24292e', '--shiki-dark': '#e1e4e8', marginTop: '0.5rem'}}" +) + +REPLACEMENTS = { + "text": [ + ("LiquidAI/LFM2.5-1.2B-Instruct-GGUF", "${ggufRepo}"), + ("LiquidAI/LFM2.5-1.2B-Instruct", "${modelId}"), + ], + "vl": [ + ("LiquidAI/LFM2.5-VL-1.6B-GGUF", "${ggufRepo}"), + ("LiquidAI/LFM2.5-VL-1.6B", "${modelId}"), + ], +} + +# === Snippet Configuration === + +SNIPPET_CONFIG = { + "text-transformers": { + "component_name": "TextTransformers", + "props": "{ modelId }", + "replacement_group": "text", + "source": "notebook", + "sections": [ + {"type": "label", "text": "Install:"}, + {"type": "code_block", "language": "bash", + "code": 'pip install "transformers>=5.0.0" torch accelerate'}, + {"type": "label", "text": "Download & Run:"}, + {"type": "notebook_code", "language": "python"}, + ], + }, + "text-vllm": { + "component_name": "TextVllm", + "props": "{ modelId }", + "replacement_group": "text", + "source": "notebook", + "sections": [ + {"type": "label", "text": "Install:"}, + {"type": "code_block", "language": "bash", + "code": "pip install vllm==0.14"}, + {"type": "label", "text": "Run:"}, + {"type": "notebook_code", "language": "python"}, + ], + }, + "text-llamacpp": { + "component_name": "TextLlamacpp", + "props": "{ ggufRepo }", + "replacement_group": "text", + "source": "config", + "sections": [ + {"type": "label", "text": "Install:"}, + {"type": "code_block", "language": "bash", + "code": "brew install llama.cpp"}, + {"type": "label", "text": "Run:"}, + {"type": "code_block", "language": "bash", + "code": "llama-cli -hf ${ggufRepo} -c 4096 --color -i"}, + {"type": "raw_html", + "html": '

The -hf flag downloads the model directly from Hugging Face. For other installation methods and advanced usage, see the llama.cpp guide.

'}, + ], + }, + "vl-transformers": { + "component_name": "VlTransformers", + "props": "{ modelId }", + "replacement_group": "vl", + "source": "notebook", + "sections": [ + {"type": "label", "text": "Install:"}, + {"type": "code_block", "language": "bash", + "code": 'pip install "transformers>=5.0.0" pillow torch'}, + {"type": "note", "children": [ + {"type": "text", + "text": "Transformers v5 is newly released. If you encounter issues, fall back to the pinned git source:"}, + {"type": "code_block_margin", "language": "bash", + "code": "pip install git+https://github.com/huggingface/transformers.git@3c2517727ce28a30f5044e01663ee204deb1cdbe pillow torch"}, + ]}, + {"type": "label", "text": "Download & Run:"}, + {"type": "notebook_code", "language": "python"}, + ], + }, + "vl-vllm": { + "component_name": "VlVllm", + "props": "{ modelId }", + "replacement_group": "vl", + "source": "notebook", + "sections": [ + {"type": "warning", + "text": "vLLM support for LFM Vision Models requires a specific version. Install from the custom source below."}, + {"type": "label", "text": "Install:"}, + {"type": "code_block", "language": "bash", + "code": "VLLM_PRECOMPILED_WHEEL_COMMIT=72506c98349d6bcd32b4e33eec7b5513453c1502 \\\n VLLM_USE_PRECOMPILED=1 \\\n pip install git+https://github.com/vllm-project/vllm.git"}, + {"type": "code_block", "language": "bash", + "code": 'pip install "transformers>=5.0.0" pillow'}, + {"type": "note", "children": [ + {"type": "text", + "text": "Transformers v5 is newly released. If you encounter issues, fall back to the pinned git source:"}, + {"type": "code_block_margin", "language": "bash", + "code": "pip install git+https://github.com/huggingface/transformers.git@3c2517727ce28a30f5044e01663ee204deb1cdbe pillow"}, + ]}, + {"type": "label", "text": "Run:"}, + {"type": "notebook_code", "language": "python"}, + ], + }, + "vl-llamacpp": { + "component_name": "VlLlamacpp", + "props": "{ ggufRepo }", + "replacement_group": "vl", + "source": "config", + "sections": [ + {"type": "raw_html", + "html": "

llama.cpp enables efficient CPU inference for vision models.

"}, + {"type": "label", "text": "Install:"}, + {"type": "code_block", "language": "bash", + "code": "brew install llama.cpp"}, + {"type": "raw_html", + "html": '

Or download pre-built binaries from llama.cpp releases.

'}, + {"type": "label", "text": "Run:"}, + {"type": "code_block", "language": "bash", + "code": "llama-cli \\\n -hf ${ggufRepo}:Q4_0 \\\n --image test_image.jpg \\\n -p \"What's in this image?\" \\\n -n 128"}, + {"type": "raw_html", + "html": '

The -hf flag downloads the model directly from Hugging Face. Use --image-max-tokens to control image token budget.

'}, + {"type": "raw_html", + "html": '

For server deployment and advanced usage, see the llama.cpp guide.

'}, + ], + }, +} + + +def read_notebook_cells(): + with open(NOTEBOOK_PATH) as f: + nb = json.load(f) + + cells = {} + for cell in nb["cells"]: + snippet_name = cell.get("metadata", {}).get("snippet") + if snippet_name and cell["cell_type"] == "code": + source_lines = cell["source"] + code = "".join(source_lines) + cells[snippet_name] = code + return cells + + +def apply_replacements(code, group): + for old, new in REPLACEMENTS[group]: + code = code.replace(old, new) + return code + + +def render_code_block(code, language, style=None): + if style is None: + style = SHIKI_STYLE + # Backslashes must be escaped inside JS template literals + escaped_code = code.replace("\\", "\\\\") + lines = [ + f'
',
+        f'',
+        "{`" + escaped_code + "`.split('\\n').map((line, i) => {line}{'\\n'})}",
+        "",
+        "
", + ] + return "\n".join(lines) + + +def render_section(section, notebook_code=None): + section_type = section["type"] + + if section_type == "label": + return f"

{section['text']}

" + + if section_type == "code_block": + return render_code_block(section["code"], section["language"]) + + if section_type == "code_block_margin": + return render_code_block( + section["code"], section["language"], style=SHIKI_STYLE_WITH_MARGIN + ) + + if section_type == "notebook_code": + if notebook_code is None: + msg = "notebook_code section requires notebook code" + raise ValueError(msg) + return render_code_block(notebook_code, section["language"]) + + if section_type == "raw_html": + return section["html"] + + if section_type == "warning": + return f"\n{section['text']}\n" + + if section_type == "note": + inner_parts = [] + for child in section["children"]: + if child["type"] == "text": + inner_parts.append(child["text"]) + else: + inner_parts.append(render_section(child)) + return "\n" + "\n".join(inner_parts) + "\n" + + if section_type == "text": + return section["text"] + + msg = f"Unknown section type: {section_type}" + raise ValueError(msg) + + +def generate_snippet(name, config, notebook_cells): + group = config["replacement_group"] + notebook_code = None + + if config["source"] == "notebook": + raw_code = notebook_cells.get(name) + if raw_code is None: + logger.error("No notebook cell found for snippet %s", name) + return None + notebook_code = apply_replacements(raw_code, group) + + parts = [] + for section in config["sections"]: + parts.append(render_section(section, notebook_code)) + + body = "\n".join(parts) + component_name = config["component_name"] + props = config["props"] + + return f"export const {component_name} = ({props}) => (\n
\n{body}\n
\n);\n" + + +def generate_all(): + notebook_cells = read_notebook_cells() + results = {} + + for name, config in SNIPPET_CONFIG.items(): + content = generate_snippet(name, config, notebook_cells) + if content is None: + return None + results[name] = content + + return results + + +def write_snippets(results): + for name, content in results.items(): + output_path = SNIPPETS_DIR / f"{name}.mdx" + output_path.write_text(content) + logger.info("Generated %s", output_path) + + +def check_freshness(results): + mismatches = [] + + for name, expected in results.items(): + output_path = SNIPPETS_DIR / f"{name}.mdx" + if not output_path.exists(): + mismatches.append((name, "file does not exist")) + continue + + actual = output_path.read_text() + if actual != expected: + mismatches.append((name, "content differs")) + logger.error("Snippet %s is out of date. Regenerate with:", name) + logger.error(" python3 scripts/generate_snippets.py") + + return mismatches + + +def main(): + parser = argparse.ArgumentParser( + description="Generate quickstart snippet MDX files from notebook source" + ) + parser.add_argument( + "--check", + action="store_true", + help="Check that committed snippets match generated output (for CI)", + ) + args = parser.parse_args() + + logging.basicConfig(level=logging.INFO, format="%(message)s") + + results = generate_all() + if results is None: + sys.exit(1) + + if args.check: + mismatches = check_freshness(results) + if mismatches: + logger.error("Snippet freshness check failed:") + for name, reason in mismatches: + logger.error(" %s: %s", name, reason) + sys.exit(1) + logger.info("All snippets are up to date.") + else: + write_snippets(results) + logger.info("All snippets generated successfully.") + + +if __name__ == "__main__": + main()