OrbitAI provides a sophisticated Large Language Model integration layer that enables seamless interaction with multiple AI providers through a unified interface.
Multi-Provider Support
OpenAI, Anthropic, and extensible architecture for custom providers
Intelligent Routing
Automatic provider selection based on latency and success rate
Unified Interface
Single API for all LLM operations regardless of provider
Advanced Features
Streaming, tool calling, structured output, and caching
Performance Monitoring
Built-in metrics tracking and health monitoring
Type Safety
Full Swift type safety with compile-time guarantees
let ollamaProvider = try OpenAIProvider( model: .gpt4o, // Use any model enum apiKey: "ollama", // Ollama doesn't require real API key apiBase: "http://localhost:11434/v1", allowInsecureAPIBase: true // Allow HTTP for local development)await manager.registerProvider(ollamaProvider)
Ollama provides a local, OpenAI-compatible API endpoint, making integration seamless.
Implement the LLMProvider protocol for custom APIs:
Copy
public actor CustomModelProvider: LLMProvider, LLMProviderIdentifiable { public let providerName: String = "CustomModel" public let providerID: LLMProviderID = .custom public let modelName: String public let maxTokens: Int = 4096 public let supportsStreaming: Bool = true public let supportsToolCalling: Bool = false private let endpoint: String private let apiKey: String public init(modelName: String, endpoint: String, apiKey: String) { self.modelName = modelName self.endpoint = endpoint self.apiKey = apiKey } public func generateCompletion( messages: [ChatMessage], temperature: Double?, maxTokens: Int?, tools: [ToolSchema]? ) async throws -> LLMResponse { // Implement your custom API integration // Convert ChatMessages to your API format // Make HTTP request to your endpoint // Parse response and return LLMResponse }}
let agent = Agent( role: "Financial Analyst", purpose: "Analyze financial data and provide investment insights", context: """ You are a senior financial analyst with 15 years of experience in equity research. Your expertise includes: - Financial statement analysis - Market trend identification - Risk assessment - Investment recommendations Always provide specific, actionable insights backed by data. Use appropriate financial terminology and explain complex concepts clearly. """, temperature: 0.3 // Lower temperature for analytical tasks)
Use lower temperatures (0.1-0.3) for analytical tasks and higher temperatures (0.7-0.9) for creative tasks.
let webSearchTool = ToolSchema( function: FunctionSchema( name: "web_search", description: "Search the web for current information", parameters: JSONSchema( type: .object, properties: [ "query": JSONSchema( type: .string, description: "Search query" ), "num_results": JSONSchema( type: .integer, description: "Number of results (1-10)" ) ], required: ["query"] ) ))let requestWithTools = LLMRequest( messages: [ .system("You can search the web for current information."), .user("What are the latest developments in AI this week?") ], tools: [webSearchTool])
Batch Processing
Process multiple requests concurrently:
Copy
func processBatch(_ inputs: [String]) async throws -> [String] { return try await withThrowingTaskGroup(of: (Int, String).self) { group in for (index, input) in inputs.enumerated() { group.addTask { let request = LLMRequest(messages: [.user(input)]) let response = try await manager.generateCompletion(request: request) return (index, response.content) } } var results = Array(repeating: "", count: inputs.count) for try await (index, result) in group { results[index] = result } return results }}
Conversation Management
Multi-turn conversations with context:
Copy
actor ConversationManager { private var messages: [ChatMessage] = [] private let manager: LLMManager private let maxContextTokens: Int init(manager: LLMManager, maxContextTokens: Int = 8000) { self.manager = manager self.maxContextTokens = maxContextTokens } func addUserMessage(_ content: String) async throws -> String { messages.append(.user(content)) // Prune context if needed await pruneContextIfNeeded() let response = try await manager.generateCompletion( request: LLMRequest(messages: messages) ) messages.append(.assistant(response.content)) return response.content } private func pruneContextIfNeeded() async { let tokenCount = try? await manager.countTokens( request: LLMRequest(messages: messages) ) if let count = tokenCount, count > maxContextTokens { // Keep system message and recent messages let systemMessages = messages.filter { $0.role == .system } let recentMessages = Array(messages.suffix(10)) messages = systemMessages + recentMessages } }}
Never hardcode API keys in your source code. Use environment variables or secure key storage solutions.
Copy
import Securityclass SecureAPIKeyManager { private let service = "com.yourapp.orbitai" func storeAPIKey(_ key: String, for provider: String) -> Bool { let data = key.data(using: .utf8)! let query: [String: Any] = [ kSecClass as String: kSecClassGenericPassword, kSecAttrService as String: service, kSecAttrAccount as String: provider, kSecValueData as String: data ] SecItemDelete(query as CFDictionary) let status = SecItemAdd(query as CFDictionary, nil) return status == errSecSuccess } func retrieveAPIKey(for provider: String) -> String? { let query: [String: Any] = [ kSecClass as String: kSecClassGenericPassword, kSecAttrService as String: service, kSecAttrAccount as String: provider, kSecReturnData as String: true ] var result: AnyObject? let status = SecItemCopyMatching(query as CFDictionary, &result) guard status == errSecSuccess, let data = result as? Data, let key = String(data: data, encoding: .utf8) else { return nil } return key }}