Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions mac/FreeChat.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,13 @@
A1F617562A782E4F00F2048C /* ConversationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A1F617552A782E4F00F2048C /* ConversationView.swift */; };
A1F617582A7836AE00F2048C /* Message+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A1F617572A7836AE00F2048C /* Message+Extensions.swift */; };
A1F6175B2A7838F700F2048C /* Conversation+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A1F6175A2A7838F700F2048C /* Conversation+Extensions.swift */; };
DE16617B2B8A40D100826556 /* OpenAIBackend.swift in Sources */ = {isa = PBXBuildFile; fileRef = DE16617A2B8A40D100826556 /* OpenAIBackend.swift */; };
DE7250E12B966D23006A76DF /* String+TrimQuotes.swift in Sources */ = {isa = PBXBuildFile; fileRef = DE7250E02B966D22006A76DF /* String+TrimQuotes.swift */; };
DEA8CF572B51938B007A4CE7 /* FreeChatAppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEA8CF562B51938B007A4CE7 /* FreeChatAppDelegate.swift */; };
DEAE3D482B987DE700257A69 /* Backend.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEAE3D472B987DE700257A69 /* Backend.swift */; };
DEAE3D4A2B987EA400257A69 /* OllamaBackend.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEAE3D492B987EA400257A69 /* OllamaBackend.swift */; };
DEAE3D4C2B987EB300257A69 /* LlamaBackend.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEAE3D4B2B987EB300257A69 /* LlamaBackend.swift */; };
DEAE3D4E2B987EBC00257A69 /* LocalBackend.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEAE3D4D2B987EBC00257A69 /* LocalBackend.swift */; };
DEEA39CC2B586F3800992592 /* ServerHealth.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEEA39CB2B586F3800992592 /* ServerHealth.swift */; };
/* End PBXBuildFile section */

Expand Down Expand Up @@ -176,7 +182,13 @@
A1F617552A782E4F00F2048C /* ConversationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationView.swift; sourceTree = "<group>"; };
A1F617572A7836AE00F2048C /* Message+Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Message+Extensions.swift"; sourceTree = "<group>"; };
A1F6175A2A7838F700F2048C /* Conversation+Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Conversation+Extensions.swift"; sourceTree = "<group>"; };
DE16617A2B8A40D100826556 /* OpenAIBackend.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = OpenAIBackend.swift; path = FreeChat/Models/NPC/OpenAIBackend.swift; sourceTree = SOURCE_ROOT; };
DE7250E02B966D22006A76DF /* String+TrimQuotes.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "String+TrimQuotes.swift"; sourceTree = "<group>"; };
DEA8CF562B51938B007A4CE7 /* FreeChatAppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FreeChatAppDelegate.swift; sourceTree = "<group>"; };
DEAE3D472B987DE700257A69 /* Backend.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Backend.swift; sourceTree = "<group>"; };
DEAE3D492B987EA400257A69 /* OllamaBackend.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OllamaBackend.swift; sourceTree = "<group>"; };
DEAE3D4B2B987EB300257A69 /* LlamaBackend.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaBackend.swift; sourceTree = "<group>"; };
DEAE3D4D2B987EBC00257A69 /* LocalBackend.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocalBackend.swift; sourceTree = "<group>"; };
DEEA39CB2B586F3800992592 /* ServerHealth.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ServerHealth.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */

Expand Down Expand Up @@ -259,7 +271,13 @@
A137A3872AB502DB00BE1AE0 /* ggml-metal.metal */,
A17A2E122A79A005006CDD90 /* Agent.swift */,
A17A2E132A79A005006CDD90 /* LlamaServer.swift */,
DEAE3D472B987DE700257A69 /* Backend.swift */,
DE16617A2B8A40D100826556 /* OpenAIBackend.swift */,
DEAE3D492B987EA400257A69 /* OllamaBackend.swift */,
DEAE3D4B2B987EB300257A69 /* LlamaBackend.swift */,
DEAE3D4D2B987EBC00257A69 /* LocalBackend.swift */,
DEEA39CB2B586F3800992592 /* ServerHealth.swift */,
DE7250E02B966D22006A76DF /* String+TrimQuotes.swift */,
A137A3822AB4FD4800BE1AE0 /* freechat-server */,
A1A286F32A7E17750004967A /* server-watchdog */,
A1A286F92A7E197F0004967A /* README.md */,
Expand Down Expand Up @@ -577,12 +595,17 @@
A1F617582A7836AE00F2048C /* Message+Extensions.swift in Sources */,
A13C8C682A902A1200EC18D8 /* CGKeycode+Extensions.swift in Sources */,
A15D50D22A7F539800FC1681 /* NavList.swift in Sources */,
DEAE3D4C2B987EB300257A69 /* LlamaBackend.swift in Sources */,
DE16617B2B8A40D100826556 /* OpenAIBackend.swift in Sources */,
A1156D342AD1F5EF00081313 /* Templates.swift in Sources */,
A1D4B49D2B9A780B00B9C4BE /* AgentDefaults.swift in Sources */,
A1F617262A782AA100F2048C /* FreeChat.swift in Sources */,
A1156D2F2AD0954C00081313 /* TemplateManager.swift in Sources */,
A1E4A6942A82B41F00BF9D34 /* Model+Extensions.swift in Sources */,
DEAE3D4A2B987EA400257A69 /* OllamaBackend.swift in Sources */,
A12B52DE2AA5228100658707 /* EditModels.swift in Sources */,
DEAE3D482B987DE700257A69 /* Backend.swift in Sources */,
DEAE3D4E2B987EBC00257A69 /* LocalBackend.swift in Sources */,
A17AB1C22ABB4B5E00CD3100 /* CircleMenuStyle.swift in Sources */,
A15D50CF2A7EF73E00FC1681 /* MessageTextField.swift in Sources */,
A1CA32442AAF877600F9D488 /* ConversationManager.swift in Sources */,
Expand All @@ -595,6 +618,7 @@
A16FFF8B2B2E35D200E6AAE2 /* GPU.swift in Sources */,
A18A8BB32B24FC0400D2197C /* AISettingsView.swift in Sources */,
A1F617562A782E4F00F2048C /* ConversationView.swift in Sources */,
DE7250E12B966D23006A76DF /* String+TrimQuotes.swift in Sources */,
A13C8C5A2A8FEEE400EC18D8 /* SplashCodeSyntaxHighlighter.swift in Sources */,
A15D50D42A80BCA900FC1681 /* SettingsView.swift in Sources */,
);
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

15 changes: 13 additions & 2 deletions mac/FreeChat/Chats.xcdatamodeld/Mantras.xcdatamodel/contents
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<model type="com.apple.IDECoreDataModeler.DataModel" documentVersion="1.0" lastSavedToolsVersion="22225" systemVersion="23A344" minimumToolsVersion="Automatic" sourceLanguage="Swift" usedWithCloudKit="YES" userDefinedModelVersionIdentifier="">
<model type="com.apple.IDECoreDataModeler.DataModel" documentVersion="1.0" lastSavedToolsVersion="22522" systemVersion="22G513" minimumToolsVersion="Automatic" sourceLanguage="Swift" usedWithCloudKit="YES" userDefinedModelVersionIdentifier="">
<entity name="BackendConfig" representedClassName="BackendConfig" syncable="YES" codeGenerationType="class">
<attribute name="apiKey" optional="YES" attributeType="String"/>
<attribute name="backendType" optional="YES" attributeType="String"/>
<attribute name="baseURL" optional="YES" attributeType="URI"/>
<attribute name="model" optional="YES" attributeType="String"/>
<uniquenessConstraints>
<uniquenessConstraint>
<constraint value="backendType"/>
</uniquenessConstraint>
</uniquenessConstraints>
</entity>
<entity name="Conversation" representedClassName="Conversation" syncable="YES" codeGenerationType="class">
<attribute name="createdAt" attributeType="Date" defaultDateTimeInterval="712519080" usesScalarValueType="NO"/>
<attribute name="lastMessageAt" optional="YES" attributeType="Date" usesScalarValueType="NO"/>
Expand Down Expand Up @@ -37,4 +48,4 @@
<attribute name="text" optional="YES" attributeType="String"/>
<attribute name="updatedAt" optional="YES" attributeType="Date" usesScalarValueType="NO"/>
</entity>
</model>
</model>
14 changes: 9 additions & 5 deletions mac/FreeChat/FreeChatAppDelegate.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,25 @@ import SwiftUI

class FreeChatAppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
@AppStorage("selectedModelId") private var selectedModelId: String?

@AppStorage("backendTypeID") private var backendTypeID: String = BackendType.local.rawValue

func application(_ application: NSApplication, open urls: [URL]) {
backendTypeID = BackendType.local.rawValue
let viewContext = PersistenceController.shared.container.viewContext
do {
let req = Model.fetchRequest()
req.predicate = NSPredicate(format: "name IN %@", urls.map({ $0.lastPathComponent }))
let existingModels = try viewContext.fetch(req).compactMap({ $0.url })
let existingModels = try viewContext.fetch(req)

for url in urls {
guard !existingModels.contains(url) else { continue }
guard !existingModels.compactMap({ $0.url }).contains(url) else { continue }
let insertedModel = try Model.create(context: viewContext, fileURL: url)
selectedModelId = insertedModel.id?.uuidString
}

NotificationCenter.default.post(name: NSNotification.Name("selectedModelDidChange"), object: selectedModelId)

if urls.count == 1, let modelID = existingModels.first(where: { $0.url == urls.first })?.id?.uuidString { selectedModelId = modelID }

NotificationCenter.default.post(name: NSNotification.Name("selectedLocalModelDidChange"), object: selectedModelId)
NotificationCenter.default.post(name: NSNotification.Name("needStartNewConversation"), object: selectedModelId)
} catch {
print("error saving model:", error)
Expand Down
22 changes: 17 additions & 5 deletions mac/FreeChat/Models/ConversationManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ class ConversationManager: ObservableObject {

var summonRegistered = false

@AppStorage("backendTypeID") private var backendTypeID: String?
@AppStorage("systemPrompt") private var systemPrompt: String = DEFAULT_SYSTEM_PROMPT
@AppStorage("contextLength") private var contextLength: Int = DEFAULT_CONTEXT_LENGTH

Expand Down Expand Up @@ -72,23 +73,34 @@ class ConversationManager: ObservableObject {

@MainActor
func rebootAgent(systemPrompt: String? = nil, model: Model, viewContext: NSManagedObjectContext) {
guard let url = model.url else { return }
let systemPrompt = systemPrompt ?? self.systemPrompt
guard let url = model.url else {
return
}

Task {
await agent.llama.stopServer()

let messages = currentConversation.orderedMessages.map { $0.text ?? "" }
let convoPrompt = model.template.run(systemPrompt: systemPrompt, messages: messages)
agent = Agent(id: "Llama", prompt: convoPrompt, systemPrompt: systemPrompt, modelPath: url.path, contextLength: contextLength)
loadingModelId = model.id?.uuidString

model.error = nil
do {
let backendType: BackendType = BackendType(rawValue: backendTypeID ?? "") ?? .local
let context = PersistenceController.shared.container.newBackgroundContext()
let config = try fetchBackendConfig(context: context) ?? BackendConfig(context: context)
agent.createBackend(backendType, contextLength: contextLength, config: config)
} catch { print("error fetching backend config", error) }
loadingModelId = model.id?.uuidString

model.error = nil
loadingModelId = nil
try? viewContext.save()
}
}

private func fetchBackendConfig(context: NSManagedObjectContext) throws -> BackendConfig? {
let backendType: BackendType = BackendType(rawValue: backendTypeID ?? "") ?? .local
let req = BackendConfig.fetchRequest()
req.predicate = NSPredicate(format: "backendType == %@", backendType.rawValue)
return try context.fetch(req).first
}
}
50 changes: 26 additions & 24 deletions mac/FreeChat/Models/NPC/Agent.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,42 +21,44 @@ class Agent: ObservableObject {

// each agent runs their own server
var llama: LlamaServer
private var backend: Backend

init(id: String, prompt: String, systemPrompt: String, modelPath: String, contextLength: Int) {
self.id = id
self.prompt = prompt
self.systemPrompt = systemPrompt
llama = LlamaServer(modelPath: modelPath, contextLength: contextLength)
self.llama = LlamaServer(modelPath: modelPath, contextLength: contextLength)
self.backend = LocalBackend(baseURL: BackendType.local.defaultURL, apiKey: nil)
}

func createBackend(_ backend: BackendType, contextLength: Int, config: BackendConfig) {
let baseURL = config.baseURL ?? backend.defaultURL

switch backend {
case .local:
self.backend = LocalBackend(baseURL: baseURL, apiKey: config.apiKey)
case .llama:
self.backend = LlamaBackend(baseURL: baseURL, apiKey: config.apiKey)
case .openai:
self.backend = OpenAIBackend(baseURL: baseURL, apiKey: config.apiKey)
case .ollama:
self.backend = OllamaBackend(baseURL: baseURL, apiKey: config.apiKey)
}
}

// this is the main loop of the agent
// listen -> respond -> update mental model and save checkpoint
// we respond before updating to avoid a long delay after user input
func listenThinkRespond(
speakerId: String, messages: [String], template: Template, temperature: Double?
) async throws -> LlamaServer.CompleteResponse {
if status == .cold {
status = .coldProcessing
} else {
status = .processing
}

prompt = template.run(systemPrompt: systemPrompt, messages: messages)

func listenThinkRespond(speakerId: String, params: CompleteParams) async throws -> CompleteResponseSummary {
status = status == .cold ? .coldProcessing : .processing
pendingMessage = ""

let response = try await llama.complete(
prompt: prompt, stop: template.stopWords, temperature: temperature
) { partialResponse in
DispatchQueue.main.async {
self.handleCompletionProgress(partialResponse: partialResponse)
}
for try await partialResponse in try await backend.complete(params: params) {
self.pendingMessage += partialResponse
self.prompt = pendingMessage
}

pendingMessage = response.text
status = .ready

return response
return CompleteResponseSummary(text: pendingMessage, responseStartSeconds: 0)
}

func handleCompletionProgress(partialResponse: String) {
Expand All @@ -66,13 +68,13 @@ class Agent: ObservableObject {

func interrupt() async {
if status != .processing, status != .coldProcessing { return }
await llama.interrupt()
await backend.interrupt()
}

func warmup() async throws {
if prompt.isEmpty, systemPrompt.isEmpty { return }
do {
_ = try await llama.complete(prompt: prompt, stop: nil, temperature: nil)
_ = try await backend.complete(params: CompleteParams(messages: [], model: "", numCTX: 2048, temperature: 0.7))
status = .ready
} catch {
status = .cold
Expand Down
Loading