Last active
April 1, 2025 23:24
-
-
Save saamerm/bf20dff87f0173817f610290968638f3 to your computer and use it in GitHub Desktop.
Ollama + SwiftUI in a ContentView
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// OllamaContentView.swift | |
// | |
// Created by Saamer Mansoor on 4/1/25. | |
// | |
import SwiftUI | |
import Foundation | |
struct OllamaContentView: View { | |
@State var prompt: PromptModel = .init(prompt: "", model: "", system: "") | |
@State var chatHistory = ChatModel(model: "", messages: []) | |
@State var errorModel: ErrorModel = .init(showError: false, errorTitle: "", errorMessage: "") | |
@State var sentPrompt: [String] = [] | |
@State var tags: tagsParent? | |
@State var receivedResponse: [String] = [] | |
@AppStorage("host") var host = DefaultValues.host | |
@AppStorage("port") var port = DefaultValues.port | |
@AppStorage("timeoutRequest") var timeoutRequest = DefaultValues.timeoutRequest | |
@AppStorage("timeoutResource") var timeoutResource = DefaultValues.timeoutResource | |
@State var disabledEditor: Bool = false | |
@State var errorMessage = "" | |
var apiAddress = "" | |
init() { | |
apiAddress = "\(self.host):\(self.port)" | |
} | |
var body: some View { | |
VStack { | |
Image(systemName: "globe") | |
.imageScale(.large) | |
.foregroundStyle(.tint) | |
Text("Hello, world!") | |
Button("My name is Saamer"){ | |
send("My name is Saamer") | |
} | |
Text(receivedResponse.count > 0 && receivedResponse.last != "" ? receivedResponse.last! : "Response goes here") | |
Button("What's my name?"){ | |
send("What's my name?") | |
} | |
Text(errorMessage) | |
} | |
.padding() | |
} | |
func send(_ promptMessage: String) { | |
Task { | |
do { | |
self.errorModel.showError = false | |
self.disabledEditor = true | |
self.sentPrompt.append(self.prompt.prompt) | |
tags = try await getLocalModels() | |
if(self.tags != nil){ | |
if(self.tags!.models.count > 0){ | |
self.prompt.model = self.tags!.models[0].name | |
print("self.prompt.model") | |
print(self.prompt.model) | |
}else{ | |
self.prompt.model = "" | |
self.errorModel = noModelsError(error: nil) | |
} | |
}else{ | |
self.prompt.model = "" | |
self.errorModel = noModelsError(error: nil) | |
} | |
self.chatHistory.model = self.prompt.model | |
if (chatHistory.model == ""){ | |
chatHistory.model = "llama3.2" | |
} | |
self.chatHistory.messages.append(ChatMessage(role: "user", content: promptMessage)) | |
self.receivedResponse.append("") | |
print("Sending request") | |
let endpoint = apiAddress + "/api/chat" | |
guard let url = URL(string: endpoint) else { | |
throw NetError.invalidURL(error: nil) | |
} | |
var request = URLRequest(url: url) | |
request.httpMethod = "POST" | |
request.setValue("application/json", forHTTPHeaderField: "Content-Type") | |
let encoder = JSONEncoder() | |
encoder.keyEncodingStrategy = .convertToSnakeCase | |
request.httpBody = try encoder.encode(chatHistory) | |
let data: URLSession.AsyncBytes | |
let response: URLResponse | |
print("chatHistory") | |
print(chatHistory) | |
do { | |
let sessionConfig = URLSessionConfiguration.default | |
sessionConfig.timeoutIntervalForRequest = Double(timeoutRequest) ?? 60 | |
sessionConfig.timeoutIntervalForResource = Double(timeoutResource) ?? 604800 | |
(data, response) = try await URLSession(configuration: sessionConfig).bytes(for: request) | |
} catch { | |
throw NetError.unreachable(error: error) | |
} | |
guard let response = response as? HTTPURLResponse, response.statusCode == 200 else { | |
throw NetError.invalidResponse(error: nil) | |
} | |
for try await line in data.lines { | |
let decoder = JSONDecoder() | |
decoder.keyDecodingStrategy = .convertFromSnakeCase | |
let data = line.data(using: .utf8)! | |
let decoded = try decoder.decode(ResponseModel.self, from: data) | |
self.receivedResponse[self.receivedResponse.count - 1].append(decoded.message.content) | |
} | |
self.chatHistory.messages.append(ChatMessage(role: "assistant", content: self.receivedResponse.last!)) | |
self.disabledEditor = false | |
self.prompt.prompt = "" | |
errorMessage = "" | |
} catch let NetError.invalidURL(error) { | |
errorModel = invalidURLError(error: error) | |
} catch let NetError.invalidData(error) { | |
errorModel = invalidDataError(error: error) | |
} catch let NetError.invalidResponse(error) { | |
errorModel = invalidResponseError(error: error) | |
} catch let NetError.unreachable(error) { | |
errorModel = unreachableError(error: error) | |
} catch { | |
self.errorModel = genericError(error: error) | |
} | |
errorMessage = errorModel.errorTitle | |
} | |
} | |
func getLocalModels() async throws -> tagsParent{ | |
let endpoint = apiAddress + "/api/tags" | |
guard let url = URL(string: endpoint) else { | |
throw NetError.invalidURL(error: nil) | |
} | |
let data: Data | |
let response: URLResponse | |
do{ | |
let sessionConfig = URLSessionConfiguration.default | |
sessionConfig.timeoutIntervalForRequest = Double(self.timeoutRequest) ?? 60 | |
sessionConfig.timeoutIntervalForResource = Double(self.timeoutResource) ?? 604800 | |
(data, response) = try await URLSession(configuration: sessionConfig).data(from: url) | |
}catch{ | |
throw NetError.unreachable(error: error) | |
} | |
guard let response = response as? HTTPURLResponse, response.statusCode == 200 else { | |
throw NetError.invalidResponse(error: nil) | |
} | |
do { | |
let decoder = JSONDecoder() | |
decoder.keyDecodingStrategy = .convertFromSnakeCase | |
let decoded = try decoder.decode(tagsParent.self, from: data) | |
return decoded | |
} catch { | |
throw NetError.invalidData(error: error) | |
} | |
} | |
} | |
#Preview { | |
ContentView() | |
} | |
struct ErrorModel { | |
var showError: Bool | |
var errorTitle: String | |
var errorMessage: String | |
} | |
enum DefaultValues{ | |
static let host = "http://127.0.0.1" | |
static let port = "11434" | |
static let timeoutRequest = "60" | |
static let timeoutResource = "604800" | |
} | |
enum NetError: Error { | |
case invalidURL(error: Error?) | |
case invalidResponse(error: Error?) | |
case invalidData(error: Error?) | |
case unreachable(error: Error?) | |
case general(error: Error?) | |
} | |
struct ResponseModel: Decodable, Hashable { | |
let model: String | |
let createdAt: String | |
let response: String? | |
let done: Bool | |
let message : ChatMessage | |
let context: [Int]? | |
let total_duration: Int? | |
let load_duration: Int? | |
let prompt_eval_count: Int? | |
let eval_count: Int? | |
let eval_duration: Int? | |
} | |
struct ChatMessage :Encodable, Equatable, Hashable, Decodable{ | |
var role: String | |
var content: String | |
var images: [String]? | |
} | |
struct ChatModel: Encodable{ | |
var model: String | |
var messages: [ChatMessage] | |
} | |
struct PromptModel: Encodable { | |
var prompt: String | |
var model: String | |
var system: String | |
} | |
func invalidURLError(error: Error?) -> ErrorModel{ | |
return ErrorModel(showError: true, errorTitle: "Invalid URL given", errorMessage: "Make sure that Ollama is installed an online. Check Help for further info. \(String(describing: error?.localizedDescription)))") | |
} | |
func invalidDataError(error: Error?) -> ErrorModel{ | |
return ErrorModel(showError: true, errorTitle: "Invalid Data received", errorMessage: "Looks like there is a problem retrieving the data. \(String(describing: error?.localizedDescription)))") | |
} | |
func invalidTagsDataError(error: Error?) -> ErrorModel{ | |
return ErrorModel(showError: true, errorTitle: "No models have been detected!", errorMessage: "To download your first model, click on 'Manage Models', and enter a model name in the 'Add Model' field and click download. \(String(describing: error?.localizedDescription))") | |
} | |
func invalidResponseError(error: Error?) -> ErrorModel{ | |
return ErrorModel(showError: true, errorTitle: "Invalid Response", errorMessage: "Looks like you are receiving a response other than 200! \(String(describing: error?.localizedDescription)))") | |
} | |
func unreachableError(error: Error?) -> ErrorModel{ | |
return ErrorModel(showError: true, errorTitle: "Server is unreachable - Timeout", errorMessage: "Make sure Ollama ( https://ollama.ai/ ) is installed and running. If a different IP/PORT is used other than the default, change it in the app settings. Adjust the timeout value in the settings. \(String(describing: error?.localizedDescription)))") | |
} | |
func genericError(error: Error?) -> ErrorModel{ | |
return ErrorModel(showError: true, errorTitle: "An error has occured", errorMessage: "If restarting ollama does not fix it, please report the bug. \(String(describing: error?.localizedDescription)))") | |
} | |
func noModelsError(error: Error?) -> ErrorModel{ | |
return ErrorModel(showError: true, errorTitle: "No models found", errorMessage: "Click the gear icon and download a model") | |
} | |
struct tagsParent: Decodable, Hashable { | |
let models: [tagsModel] | |
} | |
struct tagsModel: Decodable, Hashable { | |
let name: String | |
let modifiedAt: String | |
let size: Double | |
let digest: String | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment