diff --git a/Examples/Examples/Chat/ChatExampleXai.cs b/Examples/Examples/Chat/ChatExampleXai.cs new file mode 100644 index 0000000..9f0e758 --- /dev/null +++ b/Examples/Examples/Chat/ChatExampleXai.cs @@ -0,0 +1,18 @@ +using Examples.Utils; +using MaIN.Core.Hub; + +namespace Examples; + +public class ChatExampleXai : IExample +{ + public async Task Start() + { + XaiExample.Setup(); //We need to provide xAI API key + Console.WriteLine("(xAI) ChatExample is running!"); + + await AIHub.Chat() + .WithModel("grok-3-beta") + .WithMessage("Is the killer whale cute?") + .CompleteAsync(interactive: true); + } +} \ No newline at end of file diff --git a/Examples/Examples/Program.cs b/Examples/Examples/Program.cs index e3274ec..80cc3d6 100644 --- a/Examples/Examples/Program.cs +++ b/Examples/Examples/Program.cs @@ -78,6 +78,7 @@ static void RegisterExamples(IServiceCollection services) services.AddTransient(); services.AddTransient(); services.AddTransient(); + services.AddTransient(); } async Task RunSelectedExample(IServiceProvider serviceProvider) @@ -172,6 +173,7 @@ public class ExampleRegistry(IServiceProvider serviceProvider) ("\u25a0 DeepSeek Chat with reasoning", serviceProvider.GetRequiredService()), ("\u25a0 GroqCloud Chat", serviceProvider.GetRequiredService()), ("\u25a0 Anthropic Chat", serviceProvider.GetRequiredService()), + ("\u25a0 xAI Chat", serviceProvider.GetRequiredService()), ("\u25a0 McpClient example", serviceProvider.GetRequiredService()), ("\u25a0 McpAgent example", serviceProvider.GetRequiredService()), ("\u25a0 Chat with TTS example", serviceProvider.GetRequiredService()), diff --git a/Examples/Examples/Utils/XaiExample.cs b/Examples/Examples/Utils/XaiExample.cs new file mode 100644 index 0000000..537fec6 --- /dev/null +++ b/Examples/Examples/Utils/XaiExample.cs @@ -0,0 +1,16 @@ +using MaIN.Core; +using MaIN.Domain.Configuration; + +namespace Examples.Utils; + +public class XaiExample +{ + public static void Setup() + { + MaINBootstrapper.Initialize(configureSettings: (options) => + { + options.BackendType = BackendType.Xai; + options.XaiKey = ""; + }); + } +} \ No newline at end of file diff --git a/Releases/0.7.9.md b/Releases/0.7.9.md new file mode 100644 index 0000000..1d04d60 --- /dev/null +++ b/Releases/0.7.9.md @@ -0,0 +1,3 @@ +# 0.7.9 release + +xAi integration has been added. \ No newline at end of file diff --git a/src/MaIN.Core/.nuspec b/src/MaIN.Core/.nuspec index da1f63a..78f647b 100644 --- a/src/MaIN.Core/.nuspec +++ b/src/MaIN.Core/.nuspec @@ -2,7 +2,7 @@ MaIN.NET - 0.7.8 + 0.7.9 Wisedev Wisedev favicon.png diff --git a/src/MaIN.Domain/Configuration/MaINSettings.cs b/src/MaIN.Domain/Configuration/MaINSettings.cs index 96e6d45..17f7e06 100644 --- a/src/MaIN.Domain/Configuration/MaINSettings.cs +++ b/src/MaIN.Domain/Configuration/MaINSettings.cs @@ -13,6 +13,7 @@ public class MaINSettings public string? DeepSeekKey { get; set; } public string? AnthropicKey { get; set; } public string? GroqCloudKey { get; set; } + public string? XaiKey { get; set; } public MongoDbSettings? MongoDbSettings { get; set; } public FileSystemSettings? FileSystemSettings { get; set; } public SqliteSettings? SqliteSettings { get; set; } @@ -28,4 +29,5 @@ public enum BackendType DeepSeek = 3, GroqCloud = 4, Anthropic = 5, + Xai = 6, } \ No newline at end of file diff --git a/src/MaIN.Services/Constants/ServiceConstants.cs b/src/MaIN.Services/Constants/ServiceConstants.cs index e9ad39d..33aa25e 100644 --- a/src/MaIN.Services/Constants/ServiceConstants.cs +++ b/src/MaIN.Services/Constants/ServiceConstants.cs @@ -10,6 +10,7 @@ public static class HttpClients public const string DeepSeekClient = "DeepSeekClient"; public const string GroqCloudClient = "GroqCloudClient"; public const string AnthropicClient = "AnthropicClient"; + public const string XaiClient = "XaiClient"; public const string ImageDownloadClient = "ImageDownloadClient"; public const string ModelContextDownloadClient = "ModelContextDownloadClient"; } @@ -36,6 +37,10 @@ public static class ApiUrls public const string AnthropicChatMessages = "https://api.anthropic.com/v1/messages"; public const string AnthropicModels = "https://api.anthropic.com/v1/models"; + + public const string XaiImageGenerations = "https://api.x.ai/v1/images/generations"; + public const string XaiOpenAiChatCompletions = "https://api.x.ai/v1/chat/completions"; + public const string XaiModels = "https://api.x.ai/v1/models"; } public static class Messages diff --git a/src/MaIN.Services/Services/ImageGenServices/XaiImageGenService.cs b/src/MaIN.Services/Services/ImageGenServices/XaiImageGenService.cs new file mode 100644 index 0000000..e6be9f6 --- /dev/null +++ b/src/MaIN.Services/Services/ImageGenServices/XaiImageGenService.cs @@ -0,0 +1,112 @@ +using MaIN.Domain.Configuration; +using MaIN.Domain.Entities; +using MaIN.Services.Constants; +using MaIN.Services.Services.Abstract; +using MaIN.Services.Services.Models; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Text.Json; + +namespace MaIN.Services.Services.ImageGenServices; + +public class XaiImageGenService( + IHttpClientFactory httpClientFactory, + MaINSettings settings) + : IImageGenService +{ + private readonly IHttpClientFactory _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + private readonly MaINSettings _settings = settings ?? throw new ArgumentNullException(nameof(settings)); + + public async Task Send(Chat chat) + { + var client = _httpClientFactory.CreateClient(ServiceConstants.HttpClients.XaiClient); + string apiKey = _settings.XaiKey ?? Environment.GetEnvironmentVariable("XAI_API_KEY") ?? + throw new InvalidOperationException("xAI Key not configured"); + + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", apiKey); + var requestBody = new + { + model = string.IsNullOrWhiteSpace(chat.Model) ? Models.GROK_IMAGE : chat.Model, + prompt = BuildPromptFromChat(chat), + n = 1, + response_format = "b64_json" //or "url" + }; + + using var response = await client.PostAsJsonAsync(ServiceConstants.ApiUrls.XaiImageGenerations, requestBody); + var imageBytes = await ProcessXaiResponse(response); + return CreateChatResult(imageBytes); + } + + private static string BuildPromptFromChat(Chat chat) + { + return chat.Messages + .Select((msg, index) => index == 0 ? msg.Content : $"&& {msg.Content}") + .Aggregate((current, next) => $"{current} {next}"); + } + + private async Task ProcessXaiResponse(HttpResponseMessage response) + { + response.EnsureSuccessStatusCode(); + var responseData = await response.Content.ReadFromJsonAsync(new JsonSerializerOptions{ PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower }); + + var first = responseData?.Data.FirstOrDefault() + ?? throw new InvalidOperationException("No image data returned from xAI"); + + if (!string.IsNullOrEmpty(first.B64Json)) + { + return Convert.FromBase64String(first.B64Json); + } + + if (!string.IsNullOrEmpty(first.Url)) + { + return await DownloadImageAsync(first.Url); + } + + throw new InvalidOperationException("No image content returned from xAI"); + } + + private async Task DownloadImageAsync(string imageUrl) + { + var imageClient = _httpClientFactory.CreateClient(ServiceConstants.HttpClients.ImageDownloadClient); + + using var imageResponse = await imageClient.GetAsync(imageUrl); + imageResponse.EnsureSuccessStatusCode(); + + return await imageResponse.Content.ReadAsByteArrayAsync(); + } + + private static ChatResult CreateChatResult(byte[] imageBytes) + { + return new ChatResult + { + Done = true, + Message = new Message + { + Content = ServiceConstants.Messages.GeneratedImageContent, + Role = ServiceConstants.Roles.Assistant, + Image = imageBytes, + Type = MessageType.Image + }, + Model = Models.GROK_IMAGE, + CreatedAt = DateTime.UtcNow + }; + } + + private struct Models + { + public const string GROK_IMAGE = "grok-2-image"; + } +} + + +file class XaiImageResponse +{ + public XaiImageData[] Data { get; set; } = []; +} + +file class XaiImageData +{ + public string? Url { get; set; } + public string? B64Json { get; set; } + public string? RevisedPrompt { get; set; } +} \ No newline at end of file diff --git a/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs b/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs index 2d44c5c..1bedeeb 100644 --- a/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs +++ b/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs @@ -18,6 +18,8 @@ public class ImageGenServiceFactory(IServiceProvider serviceProvider) : IImageGe BackendType.DeepSeek => null, BackendType.GroqCloud => null, BackendType.Anthropic => null, + BackendType.Xai => new XaiImageGenService(serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService()), BackendType.Self => new ImageGenService(serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), diff --git a/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs b/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs index 132dfb0..bff97bb 100644 --- a/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs +++ b/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs @@ -39,6 +39,13 @@ public ILLMService CreateService(BackendType backendType) serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), + BackendType.Xai => new XaiService( + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService()), + BackendType.Anthropic => new AnthropicService( serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService(), diff --git a/src/MaIN.Services/Services/LLMService/GroqCloudService.cs b/src/MaIN.Services/Services/LLMService/GroqCloudService.cs index 724f251..12ca5d2 100644 --- a/src/MaIN.Services/Services/LLMService/GroqCloudService.cs +++ b/src/MaIN.Services/Services/LLMService/GroqCloudService.cs @@ -19,7 +19,6 @@ public sealed class GroqCloudService( : OpenAiCompatibleService(notificationService, httpClientFactory, memoryFactory, memoryService, logger) { private readonly MaINSettings _settings = settings ?? throw new ArgumentNullException(nameof(settings)); - private readonly IHttpClientFactory _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); protected override string HttpClientName => ServiceConstants.HttpClients.GroqCloudClient; protected override string ChatCompletionsUrl => ServiceConstants.ApiUrls.GroqCloudOpenAiChatCompletions; diff --git a/src/MaIN.Services/Services/LLMService/OpenAiService.cs b/src/MaIN.Services/Services/LLMService/OpenAiService.cs index 4edbdd5..8bd68b3 100644 --- a/src/MaIN.Services/Services/LLMService/OpenAiService.cs +++ b/src/MaIN.Services/Services/LLMService/OpenAiService.cs @@ -2,8 +2,6 @@ using MaIN.Services.Services.Abstract; using Microsoft.Extensions.Logging; using MaIN.Services.Services.LLMService.Memory; -using System.Net.Http.Headers; -using System.Text.Json; namespace MaIN.Services.Services.LLMService; diff --git a/src/MaIN.Services/Services/LLMService/XaiService.cs b/src/MaIN.Services/Services/LLMService/XaiService.cs new file mode 100644 index 0000000..0b1cd5b --- /dev/null +++ b/src/MaIN.Services/Services/LLMService/XaiService.cs @@ -0,0 +1,77 @@ +using MaIN.Domain.Configuration; +using MaIN.Services.Constants; +using MaIN.Services.Services.Models; +using MaIN.Domain.Entities; +using MaIN.Services.Services.Abstract; +using MaIN.Services.Services.LLMService.Memory; +using Microsoft.Extensions.Logging; +using System.Text; + +namespace MaIN.Services.Services.LLMService; + +public sealed class XaiService( + MaINSettings settings, + INotificationService notificationService, + IHttpClientFactory httpClientFactory, + IMemoryFactory memoryFactory, + IMemoryService memoryService, + ILogger? logger = null) + : OpenAiCompatibleService(notificationService, httpClientFactory, memoryFactory, memoryService, logger) +{ + private readonly MaINSettings _settings = settings ?? throw new ArgumentNullException(nameof(settings)); + + protected override string HttpClientName => ServiceConstants.HttpClients.XaiClient; + protected override string ChatCompletionsUrl => ServiceConstants.ApiUrls.XaiOpenAiChatCompletions; + protected override string ModelsUrl => ServiceConstants.ApiUrls.XaiModels; + + protected override string GetApiKey() + { + return _settings.XaiKey ?? Environment.GetEnvironmentVariable("XAI_API_KEY") ?? + throw new InvalidOperationException("xAI Key not configured"); + } + + protected override void ValidateApiKey() + { + if (string.IsNullOrEmpty(_settings.XaiKey) && string.IsNullOrEmpty(Environment.GetEnvironmentVariable("XAI_API_KEY"))) + { + throw new InvalidOperationException("xAI Key not configured"); + } + } + + public override async Task AskMemory( + Chat chat, + ChatMemoryOptions memoryOptions, + CancellationToken cancellationToken = default) + { + var lastMsg = chat.Messages.Last(); + var filePaths = await DocumentProcessor.ConvertToFilesContent(memoryOptions); + var message = new Message() + { + Role = ServiceConstants.Roles.User, + Content = ComposeMessage(lastMsg, filePaths), + Type = MessageType.CloudLLM + }; + + chat.Messages.Last().Content = message.Content; + chat.Messages.Last().Files = []; + var result = await Send(chat, new ChatRequestOptions(), cancellationToken); + chat.Messages.Last().Content = lastMsg.Content; + return result; + } + + private string ComposeMessage(Message lastMsg, string[] filePaths) + { + var stringBuilder = new StringBuilder(); + stringBuilder.AppendLine($"== FILES IN MEMORY"); + foreach (var path in filePaths) + { + var doc = DocumentProcessor.ProcessDocument(path); + stringBuilder.Append(doc); + stringBuilder.AppendLine(); + } + stringBuilder.AppendLine($"== END OF FILES"); + stringBuilder.AppendLine(); + stringBuilder.Append(lastMsg.Content); + return stringBuilder.ToString(); + } +} \ No newline at end of file diff --git a/src/MaIN.Services/Services/McpService.cs b/src/MaIN.Services/Services/McpService.cs index 4ec74f0..06de96b 100644 --- a/src/MaIN.Services/Services/McpService.cs +++ b/src/MaIN.Services/Services/McpService.cs @@ -107,6 +107,16 @@ private PromptExecutionSettings InitializeChatCompletions(IKernelBuilder kernelB ExtensionData = new Dictionary{ ["max_tokens"] = 4096 } }; + case BackendType.Xai: + kernelBuilder.Services.AddOpenAIChatCompletion( + modelId: model, + apiKey: GetXaiKey() ?? throw new ArgumentNullException(nameof(GetXaiKey)), + endpoint: new Uri("https://api.x.ai/v1")); + return new OpenAIPromptExecutionSettings() + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { RetainArgumentTypes = true }) + }; + case BackendType.Self: throw new NotSupportedException("Self backend (local models) does not support MCP integration."); @@ -123,4 +133,6 @@ private PromptExecutionSettings InitializeChatCompletions(IKernelBuilder kernelB => settings.GroqCloudKey ?? Environment.GetEnvironmentVariable("GROQ_API_KEY"); string? GetAnthropicKey() => settings.AnthropicKey ?? Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY"); + string? GetXaiKey() + => settings.XaiKey ?? Environment.GetEnvironmentVariable("XAI_API_KEY"); } \ No newline at end of file