MoFA provides native Kotlin bindings generated through Mozilla UniFFI. These bindings offer idiomatic Kotlin APIs with coroutine support, null safety, and seamless interoperability with Java.
import org.mofa.*fun main() { // Set your API key val apiKey = System.getenv("OPENAI_API_KEY") ?: error("OPENAI_API_KEY not set") // Create an agent using the builder pattern val builder = UniFFI.INSTANCE.newLlmAgentBuilder() .setId("my-agent") .setName("Kotlin Agent") .setSystemPrompt("You are a helpful assistant.") .setTemperature(0.7f) .setMaxTokens(1000) .setOpenaiProvider( apiKey, System.getenv("OPENAI_BASE_URL"), "gpt-3.5-turbo" ) val agent = builder.build() // Simple Q&A (no context retention) val answer = agent.ask("What is Kotlin?") println("Answer: $answer") // Multi-turn chat (with context) agent.chat("My favorite language is Kotlin.") val response = agent.chat("What did I just tell you?") println("Response: $response") // Get conversation history val history = agent.getHistory() println("Total messages: ${history.size}") // Clear history agent.clearHistory()}
Example 1: Idiomatic Kotlin with Extension Functions
import org.mofa.*// Extension function for builder patternfun LLMAgentBuilder.configure( id: String, name: String, systemPrompt: String = "You are a helpful assistant.", temperature: Float = 0.7f, maxTokens: Int = 1000): LLMAgentBuilder = this .setId(id) .setName(name) .setSystemPrompt(systemPrompt) .setTemperature(temperature) .setMaxTokens(maxTokens)// Extension function for chat with DSLfun LLMAgent.conversation(block: ConversationScope.() -> Unit) { val scope = ConversationScope(this) scope.block()}class ConversationScope(private val agent: LLMAgent) { infix fun String.ask(agent: LLMAgent): String = agent.chat(this) fun user(message: String): String { return agent.chat(message) }}fun main() { val apiKey = System.getenv("OPENAI_API_KEY") ?: error("API key not set") val agent = UniFFI.INSTANCE.newLlmAgentBuilder() .configure( id = "kotlin-agent", name = "Kotlin Assistant", systemPrompt = "You are a Kotlin expert." ) .setOpenaiProvider(apiKey, null, "gpt-4") .build() // Use conversation DSL agent.conversation { println(user("What is Kotlin?")) println(user("Why should I use it?")) }}
import org.mofa.*fun main() { // Create in-memory session manager val manager = SessionManager.newInMemory() // Get or create a session val session = manager.getOrCreate("user-123") // Add messages session.addMessage("user", "Hello!") session.addMessage("assistant", "Hi there! How can I help?") session.addMessage("user", "What's the weather like?") // Retrieve history with destructuring val history = session.getHistory(10) history.forEach { (role, content, timestamp) -> println("[$timestamp] $role: $content") } // Store metadata using JSON session.setMetadata("user_name", "\"Alice\"") session.setMetadata("preferences", """{"theme": "dark"}""") // Retrieve metadata val userName = session.getMetadata("user_name") println("User name: $userName") // Save session manager.saveSession(session) // List all sessions val allSessions = manager.listSessions() println("Total sessions: ${allSessions.size}") // Delete session val deleted = manager.deleteSession("user-123") println("Session deleted: $deleted")}
import org.mofa.*data class AgentConfig( val id: String, val name: String, val systemPrompt: String = "You are a helpful assistant.", val temperature: Float = 0.7f, val maxTokens: Int = 1000, val contextWindowSize: Int? = null)fun LLMAgentBuilder.applyConfig(config: AgentConfig): LLMAgentBuilder { var builder = this .setId(config.id) .setName(config.name) .setSystemPrompt(config.systemPrompt) .setTemperature(config.temperature) .setMaxTokens(config.maxTokens) config.contextWindowSize?.let { builder = builder.setContextWindowSize(it) } return builder}fun main() { val config = AgentConfig( id = "kotlin-agent", name = "Kotlin Assistant", systemPrompt = "You are an expert Kotlin developer.", temperature = 0.8f, maxTokens = 2000, contextWindowSize = 20 ) val apiKey = System.getenv("OPENAI_API_KEY") ?: error("API key not set") val agent = UniFFI.INSTANCE.newLlmAgentBuilder() .applyConfig(config) .setOpenaiProvider(apiKey, null, "gpt-4") .build() println("Agent ${agent.name()} created with ID: ${agent.agentId()}")}
# If you get: UnsatisfiedLinkError: no mofa_ffi in java.library.pathcd mofacargo build --release --features uniffi -p mofa-ffi# Set library path when runningexport LD_LIBRARY_PATH=/path/to/mofa/target/release:$LD_LIBRARY_PATH