diff --git a/Examples/Package.resolved b/Examples/Package.resolved index c4bf91aa3..c0fdb4a9d 100644 --- a/Examples/Package.resolved +++ b/Examples/Package.resolved @@ -72,6 +72,15 @@ "version" : "1.4.1" } }, + { + "identity" : "openai", + "kind" : "remoteSourceControl", + "location" : "https://github.com/MacPaw/OpenAI", + "state" : { + "revision" : "cedec2fc80aedafa332f7395b7004f79f547c794", + "version" : "0.4.4" + } + }, { "identity" : "pathkit", "kind" : "remoteSourceControl", @@ -197,6 +206,15 @@ "version" : "0.6.0" } }, + { + "identity" : "swift-http-types", + "kind" : "remoteSourceControl", + "location" : "https://github.com/apple/swift-http-types", + "state" : { + "revision" : "a0a57e949a8903563aba4615869310c0ebf14c03", + "version" : "1.4.0" + } + }, { "identity" : "swift-image-formats", "kind" : "remoteSourceControl", @@ -242,6 +260,15 @@ "version" : "2.76.0" } }, + { + "identity" : "swift-openapi-runtime", + "kind" : "remoteSourceControl", + "location" : "https://github.com/apple/swift-openapi-runtime", + "state" : { + "revision" : "8f33cc5dfe81169fb167da73584b9c72c3e8bc23", + "version" : "1.8.2" + } + }, { "identity" : "swift-overture", "kind" : "remoteSourceControl", diff --git a/Examples/Package.swift b/Examples/Package.swift index 11f210b82..da14c11a0 100644 --- a/Examples/Package.swift +++ b/Examples/Package.swift @@ -22,6 +22,10 @@ let package = Package( url: "https://github.com/stackotter/swift-bundler", revision: "d42d7ffda684cfed9edcfd3581b8127f1dc55c2e" ), + .package( + url: "https://github.com/MacPaw/OpenAI", + from: "0.4.4" + ), ], targets: [ .executableTarget( @@ -72,6 +76,12 @@ let package = Package( .executableTarget( name: "WebViewExample", dependencies: exampleDependencies + ), + .executableTarget( + name: "ChatbotExample", + dependencies: exampleDependencies + [ + .product(name: "OpenAI", package: "OpenAI") + ] ) ] ) diff --git a/Examples/Sources/ChatbotExample/ChatbotApp.swift b/Examples/Sources/ChatbotExample/ChatbotApp.swift new file mode 100644 index 000000000..93da0347d --- /dev/null +++ b/Examples/Sources/ChatbotExample/ChatbotApp.swift @@ -0,0 +1,47 @@ +import DefaultBackend +import SwiftCrossUI + +#if canImport(SwiftBundlerRuntime) + import SwiftBundlerRuntime +#endif + +// MARK: - Main App + +@main +@HotReloadable +struct ChatbotApp: App { + @SwiftCrossUI.State private var viewModel = ChatbotViewModel() + + var body: some Scene { + WindowGroup("ChatBot") { + #hotReloadable { + NavigationSplitView { + // Sidebar content + ThreadSidebarView( + threads: viewModel.threadsBinding, + selectedThread: viewModel.selectedThreadBinding, + onNewThread: viewModel.createNewThread, + onSelectThread: viewModel.selectThread, + onDeleteThread: viewModel.deleteThread + ) + } detail: { + // Main chat area + MainChatView(viewModel: viewModel) + } + .overlay { + // Settings Overlay + if viewModel.showSettings { + ChatSettingsDialog( + isPresented: viewModel.showSettingsBinding, + selectedModel: viewModel.selectedLLMBinding, + openAIService: viewModel.openAIService, + apiKeyStorage: viewModel.apiKeyStorage, + onSave: viewModel.reloadAPIKey + ) + } + } + } + } + .defaultSize(width: 1200, height: 800) + } +} diff --git a/Examples/Sources/ChatbotExample/Models/ChatError.swift b/Examples/Sources/ChatbotExample/Models/ChatError.swift new file mode 100644 index 000000000..c90d64d5d --- /dev/null +++ b/Examples/Sources/ChatbotExample/Models/ChatError.swift @@ -0,0 +1,38 @@ +import Foundation + +// MARK: - Error Types + +enum ChatError: Error, LocalizedError { + case missingAPIKey + case invalidURL + case encodingError + case decodingError + case invalidResponse + case apiError(Int) + + var errorDescription: String? { + switch self { + case .missingAPIKey: + return "Please enter your OpenAI API key" + case .invalidURL: + return "Invalid URL" + case .encodingError: + return "Failed to encode request" + case .decodingError: + return "Failed to decode response" + case .invalidResponse: + return "Invalid response from server" + case .apiError(let code): + switch code { + case 401: + return "API Error 401: Invalid API key. Please check your OpenAI API key and make sure it's valid and has sufficient credits." + case 429: + return "API Error 429: Rate limit exceeded. Please wait a moment and try again." + case 500, 502, 503: + return "API Error \(code): OpenAI server error. Please try again later." + default: + return "API error: \(code)" + } + } + } +} diff --git a/Examples/Sources/ChatbotExample/Models/ChatMessage.swift b/Examples/Sources/ChatbotExample/Models/ChatMessage.swift new file mode 100644 index 000000000..65a6ce571 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Models/ChatMessage.swift @@ -0,0 +1,10 @@ +import Foundation + +// MARK: - Data Models + +struct ChatMessage: Identifiable { + let id = UUID() + let content: String + let isUser: Bool + let timestamp: Date +} diff --git a/Examples/Sources/ChatbotExample/Models/ChatThread.swift b/Examples/Sources/ChatbotExample/Models/ChatThread.swift new file mode 100644 index 000000000..231f0b238 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Models/ChatThread.swift @@ -0,0 +1,47 @@ +import Foundation + +// MARK: - Thread Models + +struct ChatThread: Identifiable, Codable { + let id: String + let title: String + let createdAt: Date + let lastMessageAt: Date + let openAIThreadId: String? // OpenAI Thread ID for API integration + + init(id: String = UUID().uuidString, title: String, openAIThreadId: String? = nil) { + self.id = id + self.title = title + self.createdAt = Date() + self.lastMessageAt = Date() + self.openAIThreadId = openAIThreadId + } + + func updated(with lastMessageTime: Date = Date()) -> ChatThread { + return ChatThread( + id: self.id, + title: self.title, + openAIThreadId: self.openAIThreadId + ) + } +} + +// MARK: - Thread Message + +struct ThreadMessage: Identifiable, Codable { + let id: String + let threadId: String + let content: String + let isUser: Bool + let timestamp: Date + let openAIMessageId: String? // OpenAI Message ID for API integration + + init(id: String = UUID().uuidString, threadId: String, content: String, isUser: Bool, openAIMessageId: String? = nil) { + self.id = id + self.threadId = threadId + self.content = content + self.isUser = isUser + self.timestamp = Date() + self.openAIMessageId = openAIMessageId + } +} diff --git a/Examples/Sources/ChatbotExample/Models/LLMType.swift b/Examples/Sources/ChatbotExample/Models/LLMType.swift new file mode 100644 index 000000000..4ab4601a6 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Models/LLMType.swift @@ -0,0 +1,4 @@ +import OpenAI + +// Type alias to avoid confusion between OpenAI's Model and our ViewModels +typealias LLM = Model diff --git a/Examples/Sources/ChatbotExample/Services/APIKeyStorage.swift b/Examples/Sources/ChatbotExample/Services/APIKeyStorage.swift new file mode 100644 index 000000000..1d211c43f --- /dev/null +++ b/Examples/Sources/ChatbotExample/Services/APIKeyStorage.swift @@ -0,0 +1,29 @@ +import Foundation + +// MARK: - API Key Storage + +class APIKeyStorage { + private let userDefaults = UserDefaults.standard + private let apiKeyKey = "OpenAI_API_Key" + + func saveAPIKey(_ key: String) { + userDefaults.set(key, forKey: apiKeyKey) + userDefaults.synchronize() // Force immediate synchronization + print("🔑 API key saved to disk") + } + + func loadAPIKey() -> String? { + let key = userDefaults.string(forKey: apiKeyKey) + if let key = key, !key.isEmpty { + print("🔑 API key loaded from disk successfully") + return key + } + return nil + } + + func deleteAPIKey() { + userDefaults.removeObject(forKey: apiKeyKey) + userDefaults.synchronize() // Force immediate synchronization + print("🗑️ API key deleted from disk") + } +} diff --git a/Examples/Sources/ChatbotExample/Services/OpenAIService.swift b/Examples/Sources/ChatbotExample/Services/OpenAIService.swift new file mode 100644 index 000000000..4769571af --- /dev/null +++ b/Examples/Sources/ChatbotExample/Services/OpenAIService.swift @@ -0,0 +1,130 @@ +import Foundation +import SwiftCrossUI +import OpenAI + +// MARK: - OpenAI Service + +class OpenAIService { + private var openAI: OpenAI? + + func configure(apiKey: String) { + openAI = OpenAI(apiToken: apiKey) + print("🔑 OpenAI client configured successfully") + } + + // MARK: - Thread-based Chat + + func sendMessageToThread(_ message: String, threadMessages: [ThreadMessage], model: LLM) async throws -> String { + guard let openAI = openAI else { + print("❌ OpenAI client not configured") + throw ChatError.missingAPIKey + } + + print("📤 Sending message to thread conversation") + print("📝 Message length: \(message.count) characters") + print("🤖 Model: \(model)") + print("📚 Context messages: \(threadMessages.count)") + + // Build conversation messages array from thread history + var allMessages = threadMessages.compactMap { threadMessage in + if threadMessage.isUser { + return ChatQuery.ChatCompletionMessageParam(role: .user, content: threadMessage.content) + } else { + return ChatQuery.ChatCompletionMessageParam(role: .assistant, content: threadMessage.content) + } + } + + // Add the new user message + if let newMessage = ChatQuery.ChatCompletionMessageParam(role: .user, content: message) { + allMessages.append(newMessage) + } + + let query = ChatQuery( + messages: allMessages, + model: model + ) + + do { + let result = try await openAI.chats(query: query) + let response = result.choices.first?.message.content ?? "No response" + print("✅ Successfully received response from thread conversation") + return response + } catch let error as APIError { + print("❌ OpenAI API Error: \(error)") + throw ChatError.invalidResponse + } catch { + print("❌ Network error: \(error)") + throw ChatError.invalidResponse + } + } + + func fetchAvailableModels() async throws -> [Model] { + guard let openAI = openAI else { + print("❌ OpenAI client not configured") + throw ChatError.missingAPIKey + } + + print("📋 Fetching available models from OpenAI API") + + do { + let modelsResponse = try await openAI.models() + let availableModels = modelsResponse.data.compactMap { modelData -> Model? in + // Filter for the 5 core chat models we support + let id = modelData.id + switch id { + case "gpt-4o": + return .gpt4_o + case "gpt-4o-mini": + return .gpt4_o_mini + case "gpt-4-turbo": + return .gpt4_turbo + case "gpt-4": + return .gpt4 + case "gpt-3.5-turbo": + return .gpt3_5Turbo + default: + return nil + } + } + + print("✅ Found \(availableModels.count) available chat models") + return Array(Set(availableModels)) // Remove duplicates + } catch { + print("❌ Failed to fetch models: \(error)") + // Return default models as fallback + return [.gpt4_o, .gpt4_o_mini, .gpt4_turbo, .gpt4, .gpt3_5Turbo] + } + } + + func sendMessage(_ message: String, model: LLM) async throws -> String { + guard let openAI = openAI else { + print("❌ OpenAI client not configured") + throw ChatError.missingAPIKey + } + + print("📤 Sending message to OpenAI API") + print("📝 Message length: \(message.count) characters") + print("🤖 Model: \(model)") + + let query = ChatQuery( + messages: [ + .user(.init(content: .string(message))) + ], + model: model + ) + + do { + let result = try await openAI.chats(query: query) + let response = result.choices.first?.message.content ?? "No response" + print("✅ Successfully received response (\(response.count) characters)") + return response + } catch let error as APIError { + print("❌ OpenAI API Error: \(error)") + // Handle different API errors + throw ChatError.invalidResponse + } catch { + print("❌ Network error: \(error)") + throw ChatError.invalidResponse + } + } +} diff --git a/Examples/Sources/ChatbotExample/Services/ThreadStorage.swift b/Examples/Sources/ChatbotExample/Services/ThreadStorage.swift new file mode 100644 index 000000000..74bc52586 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Services/ThreadStorage.swift @@ -0,0 +1,107 @@ +import Foundation +import SwiftCrossUI + +// MARK: - Thread Storage Service + +class ThreadStorage { + private let threadsKey = "chatThreads" + private let messagesKey = "threadMessages" + + // MARK: - Thread Management + + func saveThread(_ thread: ChatThread) { + var threads = loadThreads() + if let index = threads.firstIndex(where: { $0.id == thread.id }) { + threads[index] = thread + } else { + threads.append(thread) + } + + // Sort threads by last message time (most recent first) + threads.sort { $0.lastMessageAt > $1.lastMessageAt } + + if let data = try? JSONEncoder().encode(threads) { + UserDefaults.standard.set(data, forKey: threadsKey) + print("💾 Saved thread: \(thread.title)") + } + } + + func loadThreads() -> [ChatThread] { + guard let data = UserDefaults.standard.data(forKey: threadsKey), + let threads = try? JSONDecoder().decode([ChatThread].self, from: data) else { + return [] + } + return threads.sorted { $0.lastMessageAt > $1.lastMessageAt } + } + + func deleteThread(_ threadId: String) { + var threads = loadThreads() + threads.removeAll { $0.id == threadId } + + if let data = try? JSONEncoder().encode(threads) { + UserDefaults.standard.set(data, forKey: threadsKey) + } + + // Also delete all messages for this thread + deleteMessagesForThread(threadId) + print("🗑️ Deleted thread: \(threadId)") + } + + // MARK: - Message Management + + func saveMessage(_ message: ThreadMessage) { + var messages = loadMessages() + messages.append(message) + + if let data = try? JSONEncoder().encode(messages) { + UserDefaults.standard.set(data, forKey: messagesKey) + print("💬 Saved message to thread: \(message.threadId)") + } + + // Update thread's last message time + updateThreadLastMessageTime(message.threadId) + } + + func loadMessages(for threadId: String) -> [ThreadMessage] { + let allMessages = loadMessages() + return allMessages.filter { $0.threadId == threadId }.sorted { $0.timestamp < $1.timestamp } + } + + private func loadMessages() -> [ThreadMessage] { + guard let data = UserDefaults.standard.data(forKey: messagesKey), + let messages = try? JSONDecoder().decode([ThreadMessage].self, from: data) else { + return [] + } + return messages + } + + private func deleteMessagesForThread(_ threadId: String) { + var messages = loadMessages() + messages.removeAll { $0.threadId == threadId } + + if let data = try? JSONEncoder().encode(messages) { + UserDefaults.standard.set(data, forKey: messagesKey) + } + } + + private func updateThreadLastMessageTime(_ threadId: String) { + var threads = loadThreads() + if let index = threads.firstIndex(where: { $0.id == threadId }) { + let updatedThread = threads[index].updated() + threads[index] = updatedThread + + if let data = try? JSONEncoder().encode(threads) { + UserDefaults.standard.set(data, forKey: threadsKey) + } + } + } + + // MARK: - Utility + + func generateThreadTitle(from firstMessage: String) -> String { + let words = firstMessage.components(separatedBy: .whitespacesAndNewlines) + let limitedWords = Array(words.prefix(4)) + let title = limitedWords.joined(separator: " ") + return title.isEmpty ? "New Conversation" : title + } +} diff --git a/Examples/Sources/ChatbotExample/Utilities/LLMUtilities.swift b/Examples/Sources/ChatbotExample/Utilities/LLMUtilities.swift new file mode 100644 index 000000000..1631c4ca8 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Utilities/LLMUtilities.swift @@ -0,0 +1,195 @@ +import OpenAI + +// MARK: - LLM Utilities + +struct LLMUtilities { + + // MARK: - Display Names + + static func displayName(for model: LLM) -> String { + switch model { + case .gpt4_o: + return "GPT-4o" + case .gpt4_o_mini: + return "GPT-4o Mini" + case .gpt4_turbo: + return "GPT-4 Turbo" + case .gpt4: + return "GPT-4" + case .gpt3_5Turbo: + return "GPT-3.5 Turbo" + default: + return "Unknown Model" + } + } + + static func id(for model: LLM) -> String { + switch model { + case .gpt4_o: + return "gpt-4o" + case .gpt4_o_mini: + return "gpt-4o-mini" + case .gpt4_turbo: + return "gpt-4-turbo" + case .gpt4: + return "gpt-4" + case .gpt3_5Turbo: + return "gpt-3.5-turbo" + default: + return "gpt-3.5-turbo" + } + } + + static func description(for model: LLM) -> String { + switch model { + case .gpt4_o: + return "Flagship multimodal model with vision capabilities" + case .gpt4_o_mini: + return "Fast multimodal model for lightweight tasks" + case .gpt4_turbo: + return "Advanced GPT-4 with enhanced capabilities" + case .gpt4: + return "Most capable model for complex tasks" + case .gpt3_5Turbo: + return "Fast and efficient for most tasks" + default: + return "OpenAI chat model" + } + } + + // MARK: - Model Conversion + + static func model(from id: String) -> LLM { + switch id.lowercased() { + case "gpt-4o": + return .gpt4_o + case "gpt-4o-mini": + return .gpt4_o_mini + case "gpt-4-turbo": + return .gpt4_turbo + case "gpt-4": + return .gpt4 + case "gpt-3.5-turbo": + return .gpt3_5Turbo + default: + return .gpt3_5Turbo // Default fallback + } + } + + // MARK: - Default Models + + static let defaultModels: [LLM] = [ + .gpt4_o, .gpt4_o_mini, .gpt4_turbo, .gpt4, .gpt3_5Turbo + ] + + // MARK: - Model Recommendations + + static func recommendedModel(for taskType: TaskType) -> LLM { + switch taskType { + case .creative: + return .gpt4_o + case .analytical: + return .gpt4 + case .coding: + return .gpt4_turbo + case .conversation: + return .gpt4_o_mini + case .quickQuestions: + return .gpt3_5Turbo + case .multimodal: + return .gpt4_o + } + } + + enum TaskType { + case creative + case analytical + case coding + case conversation + case quickQuestions + case multimodal + } + + // MARK: - Model Capabilities + + static func hasVisionCapabilities(_ model: LLM) -> Bool { + switch model { + case .gpt4_o, .gpt4_o_mini: + return true + default: + return false + } + } + + static func contextWindow(for model: LLM) -> Int { + switch model { + case .gpt4_o, .gpt4_o_mini, .gpt4_turbo: + return 128_000 + case .gpt4: + return 8_192 + case .gpt3_5Turbo: + return 16_385 + default: + return 8_192 + } + } + + static func costTier(for model: LLM) -> CostTier { + switch model { + case .gpt4_o: + return .premium + case .gpt4, .gpt4_turbo: + return .high + case .gpt4_o_mini: + return .medium + case .gpt3_5Turbo: + return .low + default: + return .medium + } + } + + enum CostTier { + case low, medium, high, premium + + var description: String { + switch self { + case .low: return "Most affordable" + case .medium: return "Balanced cost" + case .high: return "Higher cost" + case .premium: return "Premium pricing" + } + } + } + + // MARK: - Enhanced Model Information + + static func detailedDescription(for model: LLM) -> String { + let baseDescription = description(for: model) + let contextSize = contextWindow(for: model) + let cost = costTier(for: model) + let hasVision = hasVisionCapabilities(model) + + var details = [baseDescription] + details.append("Context: \(contextSize/1000)k tokens") + details.append(cost.description) + if hasVision { + details.append("Vision capable") + } + + return details.joined(separator: " • ") + } + + // MARK: - Summary + + static func getSupportedModelSummary() -> String { + return """ + Supported Models: 5 core OpenAI models + • GPT-4o (flagship multimodal) + • GPT-4o Mini (fast multimodal) + • GPT-4 Turbo (enhanced capabilities) + • GPT-4 (most capable) + • GPT-3.5 Turbo (fast and efficient) + """ + } +} diff --git a/Examples/Sources/ChatbotExample/ViewModels/ChatSettingsViewModel.swift b/Examples/Sources/ChatbotExample/ViewModels/ChatSettingsViewModel.swift new file mode 100644 index 000000000..16f01f366 --- /dev/null +++ b/Examples/Sources/ChatbotExample/ViewModels/ChatSettingsViewModel.swift @@ -0,0 +1,108 @@ +import Foundation +import SwiftCrossUI +import OpenAI + +// MARK: - Chat Settings View Model + +@MainActor +class ChatSettingsViewModel: SwiftCrossUI.ObservableObject { + @SwiftCrossUI.Published var apiKey: String = "" + @SwiftCrossUI.Published var apiEndpoint: String = "https://api.openai.com/v1/chat/completions" + @SwiftCrossUI.Published var temperature: Double = 0.7 + @SwiftCrossUI.Published var maxTokens: String = "1000" + @SwiftCrossUI.Published var selectedModel: LLM = .gpt4_o + @SwiftCrossUI.Published var showCopiedFeedback: Bool = false + @SwiftCrossUI.Published var availableModels: [ModelInfo] = [] + @SwiftCrossUI.Published var isLoadingModels: Bool = false + @SwiftCrossUI.Published var modelLoadError: String? + + private let openAIService: OpenAIService + private let apiKeyStorage: APIKeyStorage + + init(openAIService: OpenAIService, apiKeyStorage: APIKeyStorage) { + self.openAIService = openAIService + self.apiKeyStorage = apiKeyStorage + } + + // MARK: - Actions + + func loadCurrentSettings() { + apiKey = apiKeyStorage.loadAPIKey() ?? "" + } + + func loadAvailableModels() { + guard !apiKey.isEmpty else { + // Use default models if no API key + availableModels = getDefaultModels() + return + } + + isLoadingModels = true + modelLoadError = nil + + Task { + do { + // Configure the OpenAI service with the current API key + openAIService.configure(apiKey: apiKey) + + // Fetch available models from the API + let models = try await openAIService.fetchAvailableModels() + + await MainActor.run { + self.availableModels = convertModelsToModelInfo(models) + self.isLoadingModels = false + } + } catch { + await MainActor.run { + self.modelLoadError = error.localizedDescription + self.isLoadingModels = false + // Fallback to default models on error + self.availableModels = getDefaultModels() + } + } + } + } + + func saveSettings() { + if !apiKey.isEmpty { + apiKeyStorage.saveAPIKey(apiKey) + openAIService.configure(apiKey: apiKey) + } + } + + func copyToClipboard(_ text: String) { + // Note: Clipboard functionality would need to be implemented based on platform + showCopiedFeedback = true + Task { + try? await Task.sleep(nanoseconds: 1_500_000_000) + await MainActor.run { + showCopiedFeedback = false + } + } + } + + // MARK: - Helper Methods + + private func convertModelsToModelInfo(_ models: [LLM]) -> [ModelInfo] { + return models.map { model in + let id = LLMUtilities.id(for: model) + return ModelInfo( + id: id, + displayName: LLMUtilities.displayName(for: model), + description: LLMUtilities.description(for: model), + model: model + ) + } + } + + private func getDefaultModels() -> [ModelInfo] { + return LLMUtilities.defaultModels.map { model in + ModelInfo( + id: LLMUtilities.id(for: model), + displayName: LLMUtilities.displayName(for: model), + description: LLMUtilities.description(for: model), + model: model + ) + } + } +} diff --git a/Examples/Sources/ChatbotExample/ViewModels/ChatbotViewModel.swift b/Examples/Sources/ChatbotExample/ViewModels/ChatbotViewModel.swift new file mode 100644 index 000000000..5f3383261 --- /dev/null +++ b/Examples/Sources/ChatbotExample/ViewModels/ChatbotViewModel.swift @@ -0,0 +1,237 @@ +import Foundation +import SwiftCrossUI +import OpenAI + +// MARK: - ChatBot ViewModel + +@MainActor +class ChatbotViewModel: SwiftCrossUI.ObservableObject { + // MARK: - Published Properties + @SwiftCrossUI.Published var selectedThread: ChatThread? + @SwiftCrossUI.Published var threads: [ChatThread] = [] + @SwiftCrossUI.Published var currentMessage = "" + @SwiftCrossUI.Published var selectedLLM: LLM = .gpt3_5Turbo + @SwiftCrossUI.Published var isLoading = false + @SwiftCrossUI.Published var errorMessage: String? + @SwiftCrossUI.Published var showSettings = false + + // MARK: - Dependencies + let threadStorage = ThreadStorage() + let apiKeyStorage = APIKeyStorage() + let openAIService = OpenAIService() + + // MARK: - Computed Properties + + /// Current thread messages converted to ChatMessage format for compatibility + var currentThreadMessages: [ChatMessage] { + guard let thread = selectedThread else { return [] } + let threadMessages = threadStorage.loadMessages(for: thread.id) + return threadMessages.map { threadMessage in + ChatMessage( + content: threadMessage.content, + isUser: threadMessage.isUser, + timestamp: threadMessage.timestamp + ) + } + } + + var isThreadSelected: Bool { + selectedThread != nil + } + + // MARK: - Initialization + + init() { + loadSavedAPIKey() + loadThreads() + } + + // MARK: - API Key Management + + private func loadSavedAPIKey() { + if let savedKey = apiKeyStorage.loadAPIKey() { + openAIService.configure(apiKey: savedKey) + } + } + + func reloadAPIKey() { + if let savedKey = apiKeyStorage.loadAPIKey() { + openAIService.configure(apiKey: savedKey) + } + } + + // MARK: - Thread Management + + private func loadThreads() { + threads = threadStorage.loadThreads() + print("📂 Loaded \(threads.count) threads from storage") + } + + func createNewThread() { + let newThread = ChatThread(title: "New Chat") + print("🆕 Creating new thread: \(newThread.id)") + threadStorage.saveThread(newThread) + loadThreads() + selectedThread = newThread + clearError() + print("✅ New thread created and selected") + } + + func selectThread(_ thread: ChatThread) { + selectedThread = thread + clearError() + print("📌 Selected thread: \(thread.title)") + } + + func deleteThread(_ thread: ChatThread) { + threadStorage.deleteThread(thread.id) + loadThreads() + + // If the deleted thread was selected, clear selection + if selectedThread?.id == thread.id { + selectedThread = nil + } + print("🗑️ Deleted thread: \(thread.title)") + } + + // MARK: - Message Sending + + func sendMessage() { + guard let thread = selectedThread else { + setError("No thread selected") + return + } + + let messageText = currentMessage.trimmingCharacters(in: .whitespacesAndNewlines) + guard !messageText.isEmpty else { return } + + // Save user message + let userMessage = ThreadMessage( + threadId: thread.id, + content: messageText, + isUser: true + ) + threadStorage.saveMessage(userMessage) + + // Update thread title if this is the first message + updateThreadTitleIfNeeded(thread, firstMessage: messageText) + + // Clear input and prepare for response + currentMessage = "" + clearError() + isLoading = true + + Task { + await sendMessageToOpenAI(messageText, thread: thread) + } + } + + private func sendMessageToOpenAI(_ messageText: String, thread: ChatThread) async { + do { + let threadMessages = threadStorage.loadMessages(for: thread.id) + let response = try await openAIService.sendMessageToThread( + messageText, + threadMessages: threadMessages, + model: selectedLLM + ) + + // Save bot response + let botMessage = ThreadMessage( + threadId: thread.id, + content: response, + isUser: false + ) + threadStorage.saveMessage(botMessage) + + // Update UI + loadThreads() // Refresh threads to update last message time + isLoading = false + + } catch { + setError(error.localizedDescription) + isLoading = false + } + } + + // MARK: - UI State Management + + func toggleSettings() { + showSettings.toggle() + } + + private func clearError() { + errorMessage = nil + } + + private func setError(_ message: String) { + errorMessage = message + isLoading = false + } + + // MARK: - Thread Title Generation + + private func generateThreadTitle(from message: String) -> String { + return threadStorage.generateThreadTitle(from: message) + } + + private func updateThreadTitleIfNeeded(_ thread: ChatThread, firstMessage: String) { + if thread.title == "New Chat" || thread.title == "New Conversation" { + let newTitle = generateThreadTitle(from: firstMessage) + let updatedThread = ChatThread( + id: thread.id, + title: newTitle, + openAIThreadId: thread.openAIThreadId + ) + threadStorage.saveThread(updatedThread) + + selectedThread = updatedThread + loadThreads() + } + } +} + +// MARK: - Binding Extensions + +extension ChatbotViewModel { + var threadsBinding: Binding<[ChatThread]> { + Binding( + get: { self.threads }, + set: { self.threads = $0 } + ) + } + + var selectedThreadBinding: Binding { + Binding( + get: { self.selectedThread }, + set: { self.selectedThread = $0 } + ) + } + + var currentMessageBinding: Binding { + Binding( + get: { self.currentMessage }, + set: { self.currentMessage = $0 } + ) + } + + var errorMessageBinding: Binding { + Binding( + get: { self.errorMessage }, + set: { self.errorMessage = $0 } + ) + } + + var showSettingsBinding: Binding { + Binding( + get: { self.showSettings }, + set: { self.showSettings = $0 } + ) + } + + var selectedLLMBinding: Binding { + Binding( + get: { self.selectedLLM }, + set: { self.selectedLLM = $0 } + ) + } +} diff --git a/Examples/Sources/ChatbotExample/Views/ChatInputView.swift b/Examples/Sources/ChatbotExample/Views/ChatInputView.swift new file mode 100644 index 000000000..7ef4250e0 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/ChatInputView.swift @@ -0,0 +1,54 @@ +import SwiftCrossUI + +// MARK: - Chat Input View + +struct ChatInputView: View { + @Binding var currentMessage: String + @Binding var errorMessage: String? + let isLoading: Bool + let messageCount: Int + let onSend: () -> Void + + var body: some View { + VStack(spacing: AppSpacing.md) { + HStack(spacing: AppSpacing.md) { + TextEditor(text: $currentMessage) + .padding(AppSpacing.sm) + .frame(minHeight: 30, maxHeight: 80) // Much more compact input area + .background(AppColors.background) + .cornerRadius(AppCornerRadius.large) + + VStack(spacing: AppSpacing.sm) { + Button("Send") { + onSend() + } + .primaryButtonStyle() + .disabled(currentMessage.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty || isLoading) + + Button("Clear") { + currentMessage = "" + errorMessage = nil + } + .secondaryButtonStyle() + .disabled(isLoading) + } + } + + HStack { + if messageCount == 0 { + Text("Type your message...") + .font(AppFonts.caption) + .foregroundColor(AppColors.textSecondary) + } else { + Text("\(messageCount) messages") + .font(AppFonts.caption) + .foregroundColor(AppColors.textSecondary) + } + + Spacer() + } + } + .padding(AppSpacing.xl) + .background(AppColors.surface) + } +} diff --git a/Examples/Sources/ChatbotExample/Views/ChatMessagesView.swift b/Examples/Sources/ChatbotExample/Views/ChatMessagesView.swift new file mode 100644 index 000000000..0fbad5255 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/ChatMessagesView.swift @@ -0,0 +1,67 @@ +import SwiftCrossUI + +// MARK: - Chat Messages View + +struct ChatMessagesView: View { + let messages: [ChatMessage] + let isLoading: Bool + + var body: some View { + ScrollView { + VStack(spacing: AppSpacing.sm) { + if messages.isEmpty { + EmptyStateView() + } else { + ForEach(messages) { message in + MessageBubble(message: message) + } + } + + if isLoading { + LoadingIndicatorView() + } + } + .padding(.horizontal, AppSpacing.lg) + .padding(.vertical, AppSpacing.md) + } + } +} + +// MARK: - Empty State View + +struct EmptyStateView: View { + var body: some View { + VStack(spacing: AppSpacing.lg) { + Text("👋") + .font(.system(size: 50)) + + Text("Welcome!") + .font(AppFonts.title2) + .fontWeight(.semibold) + .foregroundColor(AppColors.text) + + Text("Start a conversation by typing a message below.") + .font(AppFonts.body) + .foregroundColor(AppColors.textSecondary) + .multilineTextAlignment(.center) + } + .padding(.top, 50) + } +} + +// MARK: - Loading Indicator View + +struct LoadingIndicatorView: View { + var body: some View { + HStack(spacing: AppSpacing.sm) { + Text("🤖") + .font(AppFonts.title2) + Text("Thinking...") + .font(AppFonts.body) + .foregroundColor(AppColors.textSecondary) + Spacer() + } + .padding(.horizontal, AppSpacing.lg) + .surfaceStyle() + } +} diff --git a/Examples/Sources/ChatbotExample/Views/ChatSettingsDialog.swift b/Examples/Sources/ChatbotExample/Views/ChatSettingsDialog.swift new file mode 100644 index 000000000..d9d5f484a --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/ChatSettingsDialog.swift @@ -0,0 +1,466 @@ +import SwiftCrossUI +import OpenAI + +// MARK: - Model Info + +struct ModelInfo: Identifiable, Equatable { + let id: String + let displayName: String + let description: String + let model: LLM + + static func == (lhs: ModelInfo, rhs: ModelInfo) -> Bool { + lhs.id == rhs.id + } +} + +// MARK: - Chat Settings Dialog + +struct ChatSettingsDialog: View { + @Binding var isPresented: Bool + @Binding var selectedModel: LLM + @SwiftCrossUI.State private var model: ChatSettingsViewModel + + let onSave: () -> Void + + init( + isPresented: Binding, + selectedModel: Binding, + openAIService: OpenAIService, + apiKeyStorage: APIKeyStorage, + onSave: @escaping () -> Void + ) { + self._isPresented = isPresented + self._selectedModel = selectedModel + self._model = State(wrappedValue: ChatSettingsViewModel( + openAIService: openAIService, + apiKeyStorage: apiKeyStorage + )) + self.onSave = onSave + } + + var body: some View { + ZStack { + // Background overlay + Color.black.opacity(0.4) + .onTapGesture { + isPresented = false + } + + // Main dialog + VStack(spacing: 0) { + // Header + headerSection + + // Content + ScrollView { + VStack(spacing: 24) { + apiKeySection + apiEndpointSection + llmSelectionSection + temperatureSection + maxTokensSection + } + .padding(24) + } + .frame(maxHeight: .infinity) + + // Footer buttons - ensure it's always visible + Rectangle() + .fill(Color.gray.opacity(0.2)) + .frame(height: 1) + + footerSection + } + .frame(maxWidth: 500, maxHeight: 600) + .background(Color.white) + .cornerRadius(16) + .padding(20) + } + .onAppear { + model.loadCurrentSettings() + model.loadAvailableModels() + // Sync with parent's selectedModel + model.selectedModel = selectedModel + } + .onChange(of: model.selectedModel) { + selectedModel = model.selectedModel + } + .onChange(of: selectedModel) { + model.selectedModel = selectedModel + } + } + + // MARK: - Header Section + + private var headerSection: some View { + VStack(spacing: 8) { + HStack { + VStack(alignment: .leading, spacing: 4) { + Text("Chat Settings") + .font(.title2) + .fontWeight(.bold) + .foregroundColor(.white) + + Text("Configure your AI assistant.") + .font(.subheadline) + .foregroundColor(.white) + } + + Spacer() + + Button("✕") { + isPresented = false + } + .iconButtonLargeStyle() + } + .padding(20) + } + .background(Color.blue) + } + + // MARK: - API Key Section + + private var apiKeySection: some View { + VStack(alignment: .leading, spacing: 12) { + Text("API Key") + .font(.headline) + .fontWeight(.semibold) + + HStack { + TextField("Enter your API key", text: $model.apiKey) + .foregroundColor(.black) + .padding(12) + .background(Color.gray.opacity(0.1)) + .cornerRadius(8) + .onChange(of: model.apiKey) { + // Reload models when API key changes + if !model.apiKey.isEmpty && model.apiKey.count > 20 { // Basic validation + model.loadAvailableModels() + } + } + + Button(model.showCopiedFeedback ? "✓" : "📋") { + model.copyToClipboard(model.apiKey) + } + .iconButtonStyle() + } + + Text("Your API key is stored locally and never shared.") + .font(.caption) + .foregroundColor(.gray) + } + } + + // MARK: - API Endpoint Section + + private var apiEndpointSection: some View { + VStack(alignment: .leading, spacing: 12) { + Text("API Endpoint") + .font(.headline) + .fontWeight(.semibold) + + TextField("API Endpoint", text: $model.apiEndpoint) + .foregroundColor(.black) + .padding(12) + .background(Color.gray.opacity(0.1)) + .cornerRadius(8) + } + } + + // MARK: - LLM Selection Section + + private var llmSelectionSection: some View { + VStack(alignment: .leading, spacing: 12) { + HStack { + Text("Language Model") + .font(.headline) + .fontWeight(.semibold) + + Spacer() + + if !model.apiKey.isEmpty && !model.isLoadingModels { + Button("🔄") { + model.loadAvailableModels() + } + .iconButtonStyle() + } + } + + // Quick Recommendations Section + if !model.availableModels.isEmpty || !model.apiKey.isEmpty { + VStack(alignment: .leading, spacing: 8) { + Text("Quick Recommendations:") + .font(.subheadline) + .fontWeight(.medium) + .foregroundColor(AppColors.textSecondary) + + HStack(spacing: 8) { + RecommendationChip( + title: "Chat", + model: LLMUtilities.recommendedModel(for: .conversation), + selectedModel: $model.selectedModel + ) + RecommendationChip( + title: "Analysis", + model: LLMUtilities.recommendedModel(for: .analytical), + selectedModel: $model.selectedModel + ) + RecommendationChip( + title: "Creative", + model: LLMUtilities.recommendedModel(for: .creative), + selectedModel: $model.selectedModel + ) + } + } + .padding(.bottom, 8) + } + + if model.isLoadingModels { + HStack { + Text("Loading models...") + .font(.subheadline) + .foregroundColor(.gray) + Spacer() + } + .padding(12) + .background(Color.gray.opacity(0.1)) + .cornerRadius(8) + } else if let error = model.modelLoadError { + VStack(alignment: .leading, spacing: 8) { + Text("Failed to load models: \(error)") + .font(.caption) + .foregroundColor(.red) + + Button("Retry") { + model.loadAvailableModels() + } + .font(.caption) + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(4) + } + } else if model.availableModels.isEmpty { + VStack(spacing: 8) { + // Fallback to default models if API call fails + LLMOptionButton( + title: "GPT-3.5 Turbo", + subtitle: "Fast and efficient", + model: .gpt3_5Turbo, + selectedModel: $model.selectedModel + ) + + LLMOptionButton( + title: "GPT-4", + subtitle: "Most capable", + model: .gpt4, + selectedModel: $model.selectedModel + ) + + LLMOptionButton( + title: "GPT-4 Turbo", + subtitle: "Latest and fastest GPT-4", + model: .gpt4_turbo, + selectedModel: $model.selectedModel + ) + } + } else { + VStack(spacing: 8) { + ForEach(model.availableModels) { modelInfo in + DynamicLLMOptionButton( + modelInfo: modelInfo, + selectedModel: $model.selectedModel + ) + } + } + } + } + } + + // MARK: - Temperature Section + + private var temperatureSection: some View { + VStack(alignment: .leading, spacing: 12) { + Text("Temperature") + .font(.headline) + .fontWeight(.semibold) + + VStack(spacing: 8) { + HStack { + Text("More focused") + .font(.caption) + .foregroundColor(.gray) + + Spacer() + + Text("\(String(format: "%.1f", model.temperature))") + .font(.caption) + .fontWeight(.medium) + .foregroundColor(AppColors.primary) + .padding(4) + + Spacer() + + Text("More creative") + .font(.caption) + .foregroundColor(.gray) + } + + // Temperature slider + Slider($model.temperature, minimum: 0.0, maximum: 1.0) + } + } + } + + // MARK: - Max Tokens Section + + private var maxTokensSection: some View { + VStack(alignment: .leading, spacing: 12) { + Text("Max Tokens") + .font(.headline) + .fontWeight(.semibold) + + TextField("1000", text: $model.maxTokens) + .foregroundColor(.black) + .padding(12) + .background(Color.gray.opacity(0.1)) + .cornerRadius(8) + } + } + + // MARK: - Footer Section + + private var footerSection: some View { + HStack(spacing: 16) { + Button("❌ ") { + isPresented = false + } + .foregroundColor(.black) + .secondaryButtonStyle() + .frame(maxWidth: .infinity) + + Button("💾 ") { + model.saveSettings() + onSave() + isPresented = false + } + .foregroundColor(.white) + .secondaryButtonStyle() + .frame(maxWidth: .infinity) + } + .padding(20) + .background(Color.gray.opacity(0.2)) + .frame(maxWidth: .infinity) + } +} + +// MARK: - Model Option Button + +struct LLMOptionButton: View { + let title: String + let subtitle: String + let model: LLM + @Binding var selectedModel: LLM + + var isSelected: Bool { + selectedModel == model + } + + var body: some View { + VStack { + HStack { + VStack(alignment: .leading, spacing: 4) { + Text(title) + .font(.subheadline) + .fontWeight(isSelected ? .semibold : .medium) + .foregroundColor(isSelected ? AppColors.primary : AppColors.text) + + Text(subtitle) + .font(.caption) + .foregroundColor(isSelected ? AppColors.primary : AppColors.textSecondary) + } + + Spacer() + + if isSelected { + Text("✓") + .font(.subheadline) + .foregroundColor(AppColors.primary) + } + } + .padding(12) + .cornerRadius(8) + } + .onTapGesture { + selectedModel = model + } + } +} + +// MARK: - Dynamic Model Option Button + +struct DynamicLLMOptionButton: View { + let modelInfo: ModelInfo + @Binding var selectedModel: LLM + + var isSelected: Bool { + selectedModel == modelInfo.model + } + + var body: some View { + VStack { + HStack { + VStack(alignment: .leading, spacing: 4) { + Text(modelInfo.displayName) + .font(.subheadline) + .fontWeight(isSelected ? .semibold : .medium) + .foregroundColor(isSelected ? AppColors.primary : AppColors.text) + + Text(modelInfo.description) + .font(.caption) + .foregroundColor(isSelected ? AppColors.primary : AppColors.textSecondary) + } + + Spacer() + + if isSelected { + Text("✓") + .font(.subheadline) + .foregroundColor(AppColors.primary) + } + } + .padding(12) + .background(isSelected ? AppColors.primary.opacity(0.1) : Color.gray.opacity(0.05)) + .cornerRadius(8) + } + .onTapGesture { + selectedModel = modelInfo.model + } + } +} + +// MARK: - Recommendation Chip + +struct RecommendationChip: View { + let title: String + let model: LLM + @Binding var selectedModel: LLM + + var isSelected: Bool { + selectedModel == model + } + + var body: some View { + Button(title) { + selectedModel = model + } + .font(AppFonts.caption) + .fontWeight(.medium) + .foregroundColor(isSelected ? AppColors.primary : AppColors.textSecondary) + .padding(.horizontal, AppSpacing.sm) + .padding(.vertical, AppSpacing.xs) + .background(isSelected ? AppColors.primary.opacity(0.1) : AppColors.surface) + .cornerRadius(AppCornerRadius.small) + } +} diff --git a/Examples/Sources/ChatbotExample/Views/DesignSystem.swift b/Examples/Sources/ChatbotExample/Views/DesignSystem.swift new file mode 100644 index 000000000..d33ddd741 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/DesignSystem.swift @@ -0,0 +1,132 @@ +import SwiftCrossUI + +// MARK: - Design System + +struct AppColors { + static let primary = Color.blue + static let secondary = Color.purple + static let accent = Color.green + static let background = Color.white + static let surface = Color.gray.opacity(0.08) + static let border = Color.gray.opacity(0.15) + static let text = Color.black + static let textSecondary = Color.gray + static let error = Color.red + static let success = Color.green +} + +struct AppFonts { + static let title = Font.title + static let title2 = Font.title2 + static let headline = Font.headline + static let subheadline = Font.subheadline + static let body = Font.body + static let caption = Font.caption + static let caption2 = Font.caption2 +} + +struct AppSpacing { + static let xs = 4 + static let sm = 8 + static let md = 12 + static let lg = 16 + static let xl = 24 + static let xxl = 32 +} + +struct AppCornerRadius { + static let small = 6 + static let medium = 10 + static let large = 16 + static let extraLarge = 24 +} + +// MARK: - Button Styles + +extension View { + // Primary action buttons - main calls to action + func primaryButtonStyle() -> some View { + self + .font(AppFonts.body) + .fontWeight(.semibold) + .foregroundColor(AppColors.primary) + .padding(.horizontal, AppSpacing.lg) + .padding(.vertical, AppSpacing.md) + .frame(minHeight: 44) + .cornerRadius(AppCornerRadius.medium) + } + + // Secondary action buttons - supporting actions + func secondaryButtonStyle() -> some View { + self + .font(AppFonts.body) + .fontWeight(.medium) + .foregroundColor(AppColors.textSecondary) + .padding(.horizontal, AppSpacing.lg) + .padding(.vertical, AppSpacing.md) + .frame(minHeight: 44) + .cornerRadius(AppCornerRadius.medium) + } + + // Large icon buttons - for header actions, close buttons + func iconButtonLargeStyle() -> some View { + self + .font(AppFonts.headline) + .fontWeight(.medium) + .foregroundColor(AppColors.textSecondary) + .frame(width: 44, height: 44) + .cornerRadius(AppCornerRadius.medium) + } + + // Regular icon buttons - standard icon interactions + func iconButtonStyle() -> some View { + self + .font(AppFonts.body) + .foregroundColor(AppColors.textSecondary) + .frame(width: 36, height: 36) + .cornerRadius(AppCornerRadius.small) + } + + // Small destructive buttons - for delete actions + func destructiveButtonStyle() -> some View { + self + .font(AppFonts.caption) + .fontWeight(.medium) + .foregroundColor(AppColors.error) + .frame(width: 32, height: 32) + .cornerRadius(AppCornerRadius.small) + } + + // Large destructive buttons - for major destructive actions + func destructiveButtonLargeStyle() -> some View { + self + .font(AppFonts.body) + .fontWeight(.semibold) + .foregroundColor(AppColors.error) + .padding(.horizontal, AppSpacing.lg) + .padding(.vertical, AppSpacing.md) + .frame(minHeight: 44) + .cornerRadius(AppCornerRadius.medium) + } + + func textFieldStyle() -> some View { + self + .padding(AppSpacing.md) + .background(AppColors.surface) + .cornerRadius(AppCornerRadius.medium) + } + + func cardStyle() -> some View { + self + .padding(AppSpacing.lg) + .background(AppColors.background) + .cornerRadius(AppCornerRadius.large) + } + + func surfaceStyle() -> some View { + self + .padding(AppSpacing.lg) + .background(AppColors.surface) + .cornerRadius(AppCornerRadius.medium) + } +} diff --git a/Examples/Sources/ChatbotExample/Views/LLMSelectionView.swift b/Examples/Sources/ChatbotExample/Views/LLMSelectionView.swift new file mode 100644 index 000000000..3b6fcd674 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/LLMSelectionView.swift @@ -0,0 +1,164 @@ +import SwiftCrossUI +import OpenAI + +// MARK: - Array Extension for Chunking + +extension Array { + func chunked(into size: Int) -> [[Element]] { + return stride(from: 0, to: count, by: size).map { + Array(self[$0.., openAIService: OpenAIService) { + self._selectedModel = selectedModel + self.openAIService = openAIService + } + + var body: some View { + VStack(spacing: AppSpacing.sm) { + HStack { + Text("Model:") + .font(AppFonts.subheadline) + .fontWeight(.medium) + .foregroundColor(AppColors.text) + + Spacer() + + if isLoading { + Text("Loading...") + .font(AppFonts.caption2) + .foregroundColor(AppColors.textSecondary) + } + } + + if let errorMessage = errorMessage { + HStack(spacing: AppSpacing.xs) { + Text("⚠️") + Text(errorMessage) + .font(AppFonts.caption2) + .foregroundColor(AppColors.error) + } + .padding(AppSpacing.sm) + .background(AppColors.error.opacity(0.1)) + .cornerRadius(AppCornerRadius.small) + } + + VStack(spacing: AppSpacing.xs) { + // Simple dynamic model layout + ForEach(availableModels) { model in + LLMButton( + title: LLMUtilities.displayName(for: model), + model: model, + selectedModel: $selectedModel + ) + } + } + + Text("Selected: \(LLMUtilities.displayName(for: selectedModel))") + .font(AppFonts.caption2) + .foregroundColor(AppColors.textSecondary) + } + .surfaceStyle() + .onAppear { + loadAvailableModels() + } + } + + private func loadAvailableModels() { + isLoading = true + errorMessage = nil + + Task { + do { + let models = try await openAIService.fetchAvailableModels() + await MainActor.run { + self.availableModels = models.isEmpty ? LLMUtilities.defaultModels : models + self.isLoading = false + } + } catch { + await MainActor.run { + self.errorMessage = "Failed to load models" + self.isLoading = false + // Keep default models on error + } + } + } + } +} + +// MARK: - Model Button + +struct LLMButton: View { + let title: String + let model: LLM + @Binding var selectedModel: LLM + + var isSelected: Bool { + selectedModel == model + } + + var body: some View { + VStack(alignment: .leading, spacing: AppSpacing.xs) { + HStack { + Text(title) + .font(AppFonts.subheadline) + .fontWeight(isSelected ? .semibold : .medium) + .foregroundColor(isSelected ? AppColors.primary : AppColors.text) + + Spacer() + + // Cost indicator + Text(LLMUtilities.costTier(for: model).description) + .font(AppFonts.caption2) + .foregroundColor(AppColors.textSecondary) + .padding(.horizontal, 6) + .padding(.vertical, 2) + .background(AppColors.surface) + .cornerRadius(4) + } + + Text(LLMUtilities.description(for: model)) + .font(AppFonts.caption) + .foregroundColor(AppColors.textSecondary) + + // Capabilities row + HStack(spacing: AppSpacing.xs) { + Text("\(LLMUtilities.contextWindow(for: model)/1000)k") + .font(AppFonts.caption2) + .foregroundColor(AppColors.textSecondary) + + if LLMUtilities.hasVisionCapabilities(model) { + Text("👁️ Vision") + .font(AppFonts.caption2) + .foregroundColor(AppColors.textSecondary) + } + + Spacer() + + if isSelected { + Text("✓") + .font(AppFonts.subheadline) + .foregroundColor(AppColors.primary) + } + } + } + .padding(AppSpacing.sm) + .background(isSelected ? AppColors.primary.opacity(0.1) : AppColors.surface) + .cornerRadius(AppCornerRadius.medium) + .onTapGesture { + selectedModel = model + } + } +} diff --git a/Examples/Sources/ChatbotExample/Views/MainChatView.swift b/Examples/Sources/ChatbotExample/Views/MainChatView.swift new file mode 100644 index 000000000..d3a7a6dfc --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/MainChatView.swift @@ -0,0 +1,156 @@ +import SwiftCrossUI + +// MARK: - Main Chat View + +struct MainChatView: View { + var viewModel: ChatbotViewModel + + var body: some View { + VStack(spacing: 0) { + // Header + ChatHeaderView(viewModel: viewModel) + + // Divider + Rectangle() + .fill(AppColors.border) + .frame(height: 1) + + // Content area + if viewModel.isThreadSelected { + ChatContentView(viewModel: viewModel) + } else { + EmptyThreadView(viewModel: viewModel) + } + + // Error message + if let errorMessage = viewModel.errorMessage { + ErrorMessageView(message: errorMessage) + } + + // Input area + if viewModel.isThreadSelected { + ChatInputView( + currentMessage: viewModel.currentMessageBinding, + errorMessage: viewModel.errorMessageBinding, + isLoading: viewModel.isLoading, + messageCount: viewModel.currentThreadMessages.count, + onSend: viewModel.sendMessage + ) + } + } + } +} + +// MARK: - Chat Header View + +struct ChatHeaderView: View { + var viewModel: ChatbotViewModel + + var body: some View { + HStack { + // Thread title or placeholder + VStack(alignment: .leading, spacing: AppSpacing.xs) { + if let thread = viewModel.selectedThread { + Text(thread.title) + .font(AppFonts.title2) + .fontWeight(.semibold) + .foregroundColor(AppColors.text) + Text("Created today") + .font(AppFonts.caption) + .foregroundColor(AppColors.textSecondary) + } else { + Text("AI Chat Assistant") + .font(AppFonts.title2) + .fontWeight(.semibold) + .foregroundColor(AppColors.text) + } + } + + Spacer() + + Button("⚙️") { + viewModel.toggleSettings() + } + .iconButtonLargeStyle() + } + .padding(.horizontal, AppSpacing.xl) + .padding(.vertical, AppSpacing.lg) + .background(AppColors.background) + } +} + +// MARK: - Chat Content View + +struct ChatContentView: View { + var viewModel: ChatbotViewModel + + var body: some View { + ChatMessagesView( + messages: viewModel.currentThreadMessages, + isLoading: viewModel.isLoading + ) + } +} + +// MARK: - Empty Thread View + +struct EmptyThreadView: View { + var viewModel: ChatbotViewModel + + var body: some View { + VStack { + Spacer() + + VStack(spacing: AppSpacing.xl) { + VStack(spacing: AppSpacing.lg) { + Text("🤖") + .font(.system(size: 64)) + + VStack(spacing: AppSpacing.md) { + Text("Ready to Chat") + .font(AppFonts.title) + .fontWeight(.bold) + .foregroundColor(AppColors.text) + + Text("Create a new conversation to start chatting with AI.\nYour conversations will be saved and you can continue them anytime.") + .font(AppFonts.body) + .foregroundColor(AppColors.textSecondary) + .multilineTextAlignment(.center) + } + } + + Button("+ Start New Conversation") { + viewModel.createNewThread() + } + .primaryButtonStyle() + } + + Spacer() + } + .frame(maxWidth: .infinity, maxHeight: .infinity) + .background(AppColors.background) + .padding(AppSpacing.xl) + } +} + +// MARK: - Error Message View + +struct ErrorMessageView: View { + let message: String + + var body: some View { + HStack(spacing: AppSpacing.sm) { + Text("⚠️") + .font(AppFonts.body) + Text(message) + .font(AppFonts.body) + .foregroundColor(AppColors.error) + Spacer() + } + .padding(AppSpacing.md) + .background(AppColors.error.opacity(0.1)) + .cornerRadius(AppCornerRadius.medium) + .padding(.horizontal, AppSpacing.lg) + .padding(.bottom, AppSpacing.sm) + } +} diff --git a/Examples/Sources/ChatbotExample/Views/MessageBubble.swift b/Examples/Sources/ChatbotExample/Views/MessageBubble.swift new file mode 100644 index 000000000..03193603d --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/MessageBubble.swift @@ -0,0 +1,59 @@ +import SwiftCrossUI +import Foundation + +// MARK: - Message Bubble View + +struct MessageBubble: View { + let message: ChatMessage + + var body: some View { + HStack(spacing: AppSpacing.sm) { + if message.isUser { + Spacer() + + VStack(alignment: .trailing, spacing: AppSpacing.xs) { + Text(message.content) + .font(AppFonts.body) + .foregroundColor(.white) + .padding(AppSpacing.md) + .background(AppColors.primary) + .cornerRadius(AppCornerRadius.large) + .frame(maxWidth: 600, alignment: .trailing) + + Text(formatTime(message.timestamp)) + .font(AppFonts.caption2) + .foregroundColor(AppColors.textSecondary) + } + + Text("👤") + .font(AppFonts.title2) + } else { + Text("🤖") + .font(AppFonts.title2) + + VStack(alignment: .leading, spacing: AppSpacing.xs) { + Text(message.content) + .font(AppFonts.body) + .foregroundColor(AppColors.text) + .padding(AppSpacing.md) + .background(AppColors.surface) + .cornerRadius(AppCornerRadius.large) + .frame(maxWidth: 600, alignment: .leading) + + Text(formatTime(message.timestamp)) + .font(AppFonts.caption2) + .foregroundColor(AppColors.textSecondary) + } + + Spacer() + } + } + .padding(.vertical, AppSpacing.xs) + } + + private func formatTime(_ date: Date) -> String { + let formatter = DateFormatter() + formatter.timeStyle = .short + return formatter.string(from: date) + } +} diff --git a/Examples/Sources/ChatbotExample/Views/ThreadSidebarView.swift b/Examples/Sources/ChatbotExample/Views/ThreadSidebarView.swift new file mode 100644 index 000000000..31915f041 --- /dev/null +++ b/Examples/Sources/ChatbotExample/Views/ThreadSidebarView.swift @@ -0,0 +1,159 @@ +import SwiftCrossUI +import Foundation + +// MARK: - Thread Sidebar View + +struct ThreadSidebarView: View { + @Binding var threads: [ChatThread] + @Binding var selectedThread: ChatThread? + + let onNewThread: () -> Void + let onSelectThread: (ChatThread) -> Void + let onDeleteThread: (ChatThread) -> Void + + var body: some View { + VStack(spacing: 0) { + // Header + HStack { + Text("💬 Conversations") + .font(AppFonts.title2) + .fontWeight(.bold) + .foregroundColor(AppColors.text) + .fixedSize(horizontal: true, vertical: false) + + Spacer() + } + .padding(.horizontal, AppSpacing.xl) + .padding(.vertical, AppSpacing.sm) + .background(AppColors.surface) + + // Thread List + ScrollView { + VStack(spacing: AppSpacing.sm) { + if threads.isEmpty { + EmptyThreadsView() + } else { + ForEach(threads) { thread in + ThreadRowView( + thread: thread, + isSelected: selectedThread?.id == thread.id, + onSelect: { onSelectThread(thread) }, + onDelete: { onDeleteThread(thread) } + ) + } + } + } + .padding(AppSpacing.lg) + } + .frame(maxHeight: .infinity) + .background(AppColors.surface) + + // Footer with action buttons + VStack(spacing: 0) { + Rectangle() + .fill(AppColors.border) + .frame(height: 1) + + Button("💬 New Conversation") { + onNewThread() + } + .primaryButtonStyle() + .frame(maxWidth: .infinity) + .padding(AppSpacing.lg) + } + .background(AppColors.surface) + } + .frame(maxHeight: .infinity) + .background(AppColors.surface) + } +} + +// MARK: - Thread Row View + +struct ThreadRowView: View { + let thread: ChatThread + let isSelected: Bool + let onSelect: () -> Void + let onDelete: () -> Void + + var body: some View { + HStack(spacing: AppSpacing.md) { + // Thread indicator + Circle() + .fill(isSelected ? AppColors.primary : AppColors.border) + .frame(width: 8, height: 8) + + VStack(alignment: .leading, spacing: AppSpacing.xs) { + Text(thread.title) + .font(AppFonts.body) + .fontWeight(isSelected ? .semibold : .medium) + .foregroundColor(isSelected ? AppColors.primary : AppColors.text) + .fixedSize(horizontal: false, vertical: true) + + Text(relativeTime(from: thread.lastMessageAt)) + .font(AppFonts.caption) + .foregroundColor(AppColors.textSecondary) + } + .frame(maxWidth: .infinity, alignment: .leading) + .onTapGesture { + onSelect() + } + + Button("×") { + onDelete() + } + .destructiveButtonStyle() + } + .padding(.horizontal, AppSpacing.lg) + .padding(.vertical, AppSpacing.xl) + .background(isSelected ? AppColors.primary.opacity(0.1) : Color.clear) + .cornerRadius(AppCornerRadius.medium) + } + + private func relativeTime(from date: Date) -> String { + let now = Date() + let interval = now.timeIntervalSince(date) + + if interval < 60 { + return "now" + } else if interval < 3600 { + let minutes = Int(interval / 60) + return "\(minutes)m ago" + } else if interval < 86400 { + let hours = Int(interval / 3600) + return "\(hours)h ago" + } else if interval < 604800 { + let days = Int(interval / 86400) + return "\(days)d ago" + } else { + let formatter = DateFormatter() + formatter.dateStyle = .short + return formatter.string(from: date) + } + } +} + +// MARK: - Empty Threads View + +struct EmptyThreadsView: View { + var body: some View { + VStack(spacing: AppSpacing.xl) { + Text("💭") + .font(.system(size: 48)) + + VStack(spacing: AppSpacing.md) { + Text("No conversations") + .font(AppFonts.headline) + .fontWeight(.semibold) + .foregroundColor(AppColors.text) + + Text("Start a new conversation to\nbegin chatting with AI!") + .font(AppFonts.body) + .foregroundColor(AppColors.textSecondary) + .multilineTextAlignment(.center) + } + } + .padding(.vertical, AppSpacing.xxl) + .frame(maxWidth: .infinity) + } +}