diff --git a/Chowder/Chowder/Info.plist b/Chowder/Chowder/Info.plist
index 6a6654d..f874666 100644
--- a/Chowder/Chowder/Info.plist
+++ b/Chowder/Chowder/Info.plist
@@ -7,5 +7,13 @@
NSAllowsArbitraryLoads
+ NSMicrophoneUsageDescription
+ Chowder uses the microphone for voice input so you can speak your messages.
+ NSSpeechRecognitionUsageDescription
+ Chowder uses speech recognition to transcribe your voice into text messages.
+ NSCameraUsageDescription
+ Chowder uses the camera to take photos for the agent to analyze.
+ NSPhotoLibraryUsageDescription
+ Chowder accesses your photo library to send images to the agent.
diff --git a/Chowder/Chowder/Models/Message.swift b/Chowder/Chowder/Models/Message.swift
index 26fb86c..b37c07c 100644
--- a/Chowder/Chowder/Models/Message.swift
+++ b/Chowder/Chowder/Models/Message.swift
@@ -10,11 +10,13 @@ struct Message: Identifiable, Codable {
let role: MessageRole
var content: String
let timestamp: Date
+ var imageData: Data?
- init(id: UUID = UUID(), role: MessageRole, content: String, timestamp: Date = Date()) {
+ init(id: UUID = UUID(), role: MessageRole, content: String, timestamp: Date = Date(), imageData: Data? = nil) {
self.id = id
self.role = role
self.content = content
self.timestamp = timestamp
+ self.imageData = imageData
}
}
diff --git a/Chowder/Chowder/Models/SavedSession.swift b/Chowder/Chowder/Models/SavedSession.swift
new file mode 100644
index 0000000..7e5270b
--- /dev/null
+++ b/Chowder/Chowder/Models/SavedSession.swift
@@ -0,0 +1,22 @@
+import Foundation
+
+struct SavedSession: Identifiable, Codable, Equatable {
+ let id: UUID
+ var key: String // e.g. "agent:main:main"
+ var label: String // user-friendly label e.g. "Main", "Research"
+ var lastUsed: Date
+ var messageCount: Int
+
+ init(id: UUID = UUID(), key: String, label: String, lastUsed: Date = Date(), messageCount: Int = 0) {
+ self.id = id
+ self.key = key
+ self.label = label
+ self.lastUsed = lastUsed
+ self.messageCount = messageCount
+ }
+
+ /// Default session matching the app's initial config.
+ static var defaultSession: SavedSession {
+ SavedSession(key: "agent:main:main", label: "Main")
+ }
+}
diff --git a/Chowder/Chowder/Services/ChatService.swift b/Chowder/Chowder/Services/ChatService.swift
index f8653f8..90a1e4a 100644
--- a/Chowder/Chowder/Services/ChatService.swift
+++ b/Chowder/Chowder/Services/ChatService.swift
@@ -11,8 +11,11 @@ protocol ChatServiceDelegate: AnyObject {
func chatServiceDidReceiveThinkingDelta(_ text: String)
func chatServiceDidReceiveToolEvent(name: String, path: String?, args: [String: Any]?)
func chatServiceDidUpdateBotIdentity(_ identity: BotIdentity)
+ func chatServiceDidUpdateAvatar(_ image: UIImage)
func chatServiceDidUpdateUserProfile(_ profile: UserProfile)
func chatServiceDidReceiveHistoryMessages(_ messages: [[String: Any]])
+ func chatServiceDidReceiveApproval(_ request: ApprovalRequest)
+ func chatServiceDidReceiveNotPaired()
}
final class ChatService: NSObject {
@@ -211,6 +214,89 @@ final class ChatService: NSObject {
}
}
+ /// Send a message with an image attachment.
+ /// The gateway expects `message` as a string, so we embed the image as a
+ /// base64 data URI in the `attachments` array and send text separately.
+ func sendWithImage(text: String, imageData: Data) {
+ guard isConnected else {
+ log("[SEND] ⚠️ Not connected — dropping message")
+ return
+ }
+
+ let requestId = makeRequestId()
+ let idempotencyKey = UUID().uuidString
+ let base64Image = imageData.base64EncodedString()
+
+ // Gateway expects: message (string), attachments (array of {mimeType, content})
+ // content must be raw base64, NOT a data URI
+ let messageText = text.isEmpty ? "[Attached image]" : text
+ let frame: [String: Any] = [
+ "type": "req",
+ "id": requestId,
+ "method": "chat.send",
+ "params": [
+ "message": messageText,
+ "sessionKey": sessionKey,
+ "idempotencyKey": idempotencyKey,
+ "deliver": true,
+ "attachments": [
+ [
+ "type": "image",
+ "mimeType": "image/jpeg",
+ "fileName": "photo.jpg",
+ "content": base64Image
+ ]
+ ]
+ ]
+ ]
+
+ guard let data = try? JSONSerialization.data(withJSONObject: frame),
+ let jsonString = String(data: data, encoding: .utf8) else { return }
+
+ log("[SEND] Sending chat.send with image id=\(requestId) (\(text.count) chars + \(imageData.count) bytes image)")
+ webSocketTask?.send(.string(jsonString)) { [weak self] error in
+ if let error {
+ self?.log("[SEND] ❌ Error: \(error.localizedDescription)")
+ DispatchQueue.main.async {
+ self?.delegate?.chatServiceDidReceiveError(error)
+ }
+ } else {
+ self?.log("[SEND] ✅ chat.send with image sent OK")
+ }
+ }
+ }
+
+ /// Respond to an agent approval request.
+ func respondToApproval(requestId: String, approved: Bool) {
+ guard isConnected else {
+ log("[APPROVAL] ⚠️ Not connected — dropping response")
+ return
+ }
+
+ let reqId = makeRequestId()
+ let frame: [String: Any] = [
+ "type": "req",
+ "id": reqId,
+ "method": "exec.approval.resolve",
+ "params": [
+ "id": requestId,
+ "decision": approved ? "allow-once" : "deny"
+ ]
+ ]
+
+ guard let data = try? JSONSerialization.data(withJSONObject: frame),
+ let jsonString = String(data: data, encoding: .utf8) else { return }
+
+ log("[APPROVAL] Sending \(approved ? "approve" : "deny") for \(requestId)")
+ webSocketTask?.send(.string(jsonString)) { [weak self] error in
+ if let error {
+ self?.log("[APPROVAL] ❌ Error: \(error.localizedDescription)")
+ } else {
+ self?.log("[APPROVAL] ✅ Response sent")
+ }
+ }
+ }
+
/// Request chat history for the current session (for polling during active runs)
private func requestChatHistory() {
guard isConnected, activeRunId != nil, !historyRequestInFlight else {
@@ -319,6 +405,181 @@ final class ChatService: NSObject {
}
}
+ // MARK: - Private: Workspace File Sync
+
+ /// Fetch a workspace file from the gateway after connecting.
+ private func fetchWorkspaceFile(_ fileName: String) {
+ let requestId = makeRequestId()
+ let frame: [String: Any] = [
+ "type": "req",
+ "id": requestId,
+ "method": "agents.files.get",
+ "params": [
+ "agentId": "main",
+ "name": fileName
+ ]
+ ]
+ guard let data = try? JSONSerialization.data(withJSONObject: frame),
+ let jsonString = String(data: data, encoding: .utf8) else {
+ log("[SYNC] ❌ Failed to serialize \(fileName) fetch request")
+ return
+ }
+ log("[SYNC] Fetching \(fileName)")
+ webSocketTask?.send(.string(jsonString)) { [weak self] error in
+ if let error {
+ self?.log("[SYNC] ❌ Error fetching \(fileName): \(error.localizedDescription)")
+ }
+ }
+ }
+
+ // MARK: - Private: Avatar Fetch
+
+ /// Fetch the agent's avatar image from IDENTITY.md's avatar field.
+ /// Supports http/https URLs and workspace-relative paths (fetched via agents.files.get).
+ private func fetchAvatarImage(from avatarString: String) {
+ let trimmed = avatarString.trimmingCharacters(in: .whitespacesAndNewlines)
+ guard !trimmed.isEmpty else { return }
+
+ if trimmed.lowercased().hasPrefix("http://") || trimmed.lowercased().hasPrefix("https://") {
+ // Direct URL download
+ guard let url = URL(string: trimmed) else {
+ log("[AVATAR] ❌ Invalid URL: \(trimmed)")
+ return
+ }
+ log("[AVATAR] Downloading from URL: \(trimmed)")
+ URLSession.shared.dataTask(with: url) { [weak self] data, _, error in
+ if let error {
+ self?.log("[AVATAR] ❌ Download error: \(error.localizedDescription)")
+ return
+ }
+ guard let data, let image = UIImage(data: data) else {
+ self?.log("[AVATAR] ❌ Could not create image from data")
+ return
+ }
+ self?.log("[AVATAR] ✅ Downloaded avatar image")
+ DispatchQueue.main.async {
+ self?.delegate?.chatServiceDidUpdateAvatar(image)
+ }
+ }.resume()
+ } else if trimmed.hasPrefix("data:") {
+ // Data URI (e.g. data:image/png;base64,...)
+ if let commaIndex = trimmed.firstIndex(of: ",") {
+ let base64 = String(trimmed[trimmed.index(after: commaIndex)...])
+ if let data = Data(base64Encoded: base64), let image = UIImage(data: data) {
+ log("[AVATAR] ✅ Decoded data URI avatar")
+ DispatchQueue.main.async { [weak self] in
+ self?.delegate?.chatServiceDidUpdateAvatar(image)
+ }
+ }
+ }
+ } else {
+ // Workspace-relative path — fetch via gateway
+ log("[AVATAR] Fetching workspace file: \(trimmed)")
+ fetchAvatarWorkspaceFile(trimmed)
+ }
+ }
+
+ /// Fetch an avatar image file from the gateway workspace using agents.files.get.
+ private func fetchAvatarWorkspaceFile(_ path: String) {
+ let requestId = makeRequestId()
+ let frame: [String: Any] = [
+ "type": "req",
+ "id": requestId,
+ "method": "agents.files.get",
+ "params": [
+ "agentId": "main",
+ "name": path
+ ]
+ ]
+ guard let data = try? JSONSerialization.data(withJSONObject: frame),
+ let jsonString = String(data: data, encoding: .utf8) else { return }
+ log("[AVATAR] Requesting workspace file: \(path)")
+ // Tag the request so handleResponse can identify avatar file responses
+ pendingAvatarFileRequest = requestId
+ webSocketTask?.send(.string(jsonString)) { [weak self] error in
+ if let error {
+ self?.log("[AVATAR] ❌ Error: \(error.localizedDescription)")
+ }
+ }
+ }
+
+ /// Tracks the request ID for an in-flight avatar file fetch.
+ private var pendingAvatarFileRequest: String?
+
+ /// Generic response handlers keyed by request ID.
+ /// When a `type:"res"` frame arrives, if a handler exists for its ID,
+ /// the handler is called and removed — bypassing the default response logic.
+ private var responseHandlers: [String: (Bool, [String: Any]?) -> Void] = [:]
+
+ /// Send a generic request and register a one-shot response handler.
+ func sendRequest(method: String, params: [String: Any] = [:], completion: @escaping (Bool, [String: Any]?) -> Void) {
+ guard isConnected else {
+ log("[REQUEST] \u{26a0}\u{fe0f} Not connected — dropping \(method)")
+ completion(false, nil)
+ return
+ }
+
+ let requestId = makeRequestId()
+ responseHandlers[requestId] = completion
+
+ let frame: [String: Any] = [
+ "type": "req",
+ "id": requestId,
+ "method": method,
+ "params": params
+ ]
+
+ guard let data = try? JSONSerialization.data(withJSONObject: frame),
+ let jsonString = String(data: data, encoding: .utf8) else {
+ responseHandlers.removeValue(forKey: requestId)
+ completion(false, nil)
+ return
+ }
+
+ log("[REQUEST] Sending \(method) id=\(requestId)")
+ webSocketTask?.send(.string(jsonString)) { [weak self] error in
+ if let error {
+ self?.log("[REQUEST] \u{274c} Error sending \(method): \(error.localizedDescription)")
+ DispatchQueue.main.async {
+ self?.responseHandlers.removeValue(forKey: requestId)
+ completion(false, nil)
+ }
+ }
+ }
+ }
+
+ // MARK: - Cron Job Methods
+
+ /// Fetch all cron jobs (including disabled ones).
+ func fetchCronJobs(completion: @escaping (Bool, [[String: Any]]?) -> Void) {
+ sendRequest(method: "cron.list", params: ["includeDisabled": true]) { ok, payload in
+ let jobs = payload?["jobs"] as? [[String: Any]]
+ completion(ok, jobs)
+ }
+ }
+
+ /// Fetch run history for a specific cron job.
+ func fetchCronRuns(jobId: String, limit: Int = 20, completion: @escaping (Bool, [[String: Any]]?) -> Void) {
+ sendRequest(method: "cron.runs", params: ["jobId": jobId, "limit": limit]) { ok, payload in
+ let runs = payload?["runs"] as? [[String: Any]]
+ completion(ok, runs)
+ }
+ }
+
+ /// Fetch cron status overview.
+ func fetchCronStatus(completion: @escaping (Bool, [String: Any]?) -> Void) {
+ sendRequest(method: "cron.status") { ok, payload in
+ completion(ok, payload)
+ }
+ }
+
+ /// Trigger a manual cron job run.
+ func runCronJob(jobId: String, completion: @escaping (Bool, [String: Any]?) -> Void) {
+ sendRequest(method: "cron.run", params: ["jobId": jobId]) { ok, payload in
+ completion(ok, payload)
+ }
+ }
+
// MARK: - Private: Connect Handshake
/// Send the `connect` request after receiving the gateway's challenge nonce.
@@ -328,7 +589,20 @@ final class ChatService: NSObject {
// Valid client IDs: webchat-ui, openclaw-control-ui, webchat, cli,
// gateway-client, openclaw-macos, openclaw-ios, openclaw-android, node-host, test
// Valid client modes: webchat, cli, ui, backend, node, probe, test
- // Device identity is schema-optional; omit until we implement keypair signing.
+ let clientId = "openclaw-ios"
+ let clientMode = "ui"
+ let role = "operator"
+ let scopes = ["operator.read", "operator.write", "operator.approvals", "operator.admin"]
+
+ let signed = DeviceIdentity.sign(
+ clientId: clientId,
+ clientMode: clientMode,
+ role: role,
+ scopes: scopes,
+ token: token,
+ nonce: nonce
+ )
+
let frame: [String: Any] = [
"type": "req",
"id": requestId,
@@ -337,16 +611,24 @@ final class ChatService: NSObject {
"minProtocol": 3,
"maxProtocol": 3,
"client": [
- "id": "openclaw-ios",
+ "id": clientId,
"version": "1.0.0",
"platform": "ios",
- "mode": "ui"
+ "mode": clientMode
],
- "role": "operator",
- "scopes": ["operator.read", "operator.write"],
+ "role": role,
+ "scopes": scopes,
"auth": [
"token": token
],
+ "device": [
+ "id": DeviceIdentity.deviceId,
+ "publicKey": DeviceIdentity.publicKeyBase64Url,
+ "signature": signed.signature,
+ "signedAt": signed.signedAt,
+ "nonce": nonce
+ ],
+ "caps": ["tool-events"],
"locale": Locale.current.identifier,
"userAgent": "chowder-ios/1.0.0"
]
@@ -522,8 +804,11 @@ final class ChatService: NSObject {
let content = args?["content"] as? String {
if filePath.hasSuffix("IDENTITY.md") {
let identity = BotIdentity.from(markdown: content)
- self.log("[SYNC] Detected write to IDENTITY.md — name=\(identity.name)")
+ self.log("[SYNC] Detected write to IDENTITY.md — name=\(identity.name) avatar=\(identity.avatar)")
self.delegate?.chatServiceDidUpdateBotIdentity(identity)
+ if !identity.avatar.isEmpty {
+ self.fetchAvatarImage(from: identity.avatar)
+ }
} else if filePath.hasSuffix("USER.md") {
let profile = UserProfile.from(markdown: content)
self.log("[SYNC] Detected write to USER.md — name=\(profile.name)")
@@ -587,7 +872,30 @@ final class ChatService: NSObject {
self.delegate?.chatServiceDidReceiveError(ChatServiceError.gatewayError(msg))
default:
- self.log("[HANDLE] Event: \(event)")
+ // Check for approval request events (e.g. exec.approval.requested)
+ if event.contains("approval") {
+ self.log("[HANDLE] 🔐 Approval event: \(event)")
+ let requestId = payload?["id"] as? String ?? UUID().uuidString
+ // The payload has a nested "request" object with command, host, cwd, etc.
+ let innerRequest = payload?["request"] as? [String: Any]
+ let command = innerRequest?["command"] as? String ?? "Unknown command"
+ let cwd = innerRequest?["cwd"] as? String
+ let host = innerRequest?["host"] as? String ?? "node"
+ let desc = cwd != nil
+ ? "Run \(command) in \(cwd!)"
+ : "Run \(command)"
+
+ let request = ApprovalRequest(
+ id: requestId,
+ toolName: "\(host): \(command)",
+ description: desc,
+ args: innerRequest,
+ timestamp: Date()
+ )
+ self.delegate?.chatServiceDidReceiveApproval(request)
+ } else {
+ self.log("[HANDLE] Event: \(event)")
+ }
}
}
}
@@ -599,6 +907,20 @@ final class ChatService: NSObject {
let payload = json["payload"] as? [String: Any]
let error = json["error"] as? [String: Any]
+ // Check for a registered one-shot response handler first.
+ if let handler = responseHandlers.removeValue(forKey: id) {
+ let result = ok ? payload : error
+ if !ok {
+ let code = error?["code"] as? String ?? "unknown"
+ let message = error?["message"] as? String ?? "Request failed"
+ log("[REQUEST] \u{274c} Response error id=\(id) code=\(code) message=\(message)")
+ }
+ DispatchQueue.main.async {
+ handler(ok, result)
+ }
+ return
+ }
+
if ok {
let payloadType = payload?["type"] as? String
@@ -610,6 +932,50 @@ final class ChatService: NSObject {
self?.isConnected = true
self?.delegate?.chatServiceDidConnect()
}
+
+ // Fetch IDENTITY.md and USER.md from workspace
+ fetchWorkspaceFile("IDENTITY.md")
+ fetchWorkspaceFile("USER.md")
+ return
+ }
+
+ // Handle agents.files.get response (workspace file sync)
+ if let file = payload?["file"] as? [String: Any],
+ let content = file["content"] as? String,
+ let filePath = (file["name"] as? String) ?? (file["path"] as? String) {
+ if filePath.hasSuffix("IDENTITY.md") {
+ let identity = BotIdentity.from(markdown: content)
+ log("[SYNC] Fetched IDENTITY.md — name=\(identity.name) avatar=\(identity.avatar)")
+ DispatchQueue.main.async { [weak self] in
+ self?.delegate?.chatServiceDidUpdateBotIdentity(identity)
+ }
+ if !identity.avatar.isEmpty {
+ fetchAvatarImage(from: identity.avatar)
+ }
+ } else if filePath.hasSuffix("USER.md") {
+ let profile = UserProfile.from(markdown: content)
+ log("[SYNC] Fetched USER.md — name=\(profile.name)")
+ DispatchQueue.main.async { [weak self] in
+ self?.delegate?.chatServiceDidUpdateUserProfile(profile)
+ }
+ }
+ return
+ }
+
+ // Handle avatar workspace file response (binary content as base64)
+ if let reqId = pendingAvatarFileRequest, id == reqId {
+ pendingAvatarFileRequest = nil
+ if let file = payload?["file"] as? [String: Any],
+ let contentB64 = file["content"] as? String,
+ let data = Data(base64Encoded: contentB64),
+ let image = UIImage(data: data) {
+ log("[AVATAR] ✅ Loaded avatar from workspace file")
+ DispatchQueue.main.async { [weak self] in
+ self?.delegate?.chatServiceDidUpdateAvatar(image)
+ }
+ } else {
+ log("[AVATAR] ⚠️ Could not decode avatar from workspace response")
+ }
return
}
@@ -657,8 +1023,20 @@ final class ChatService: NSObject {
let code = error?["code"] as? String ?? "unknown"
let message = error?["message"] as? String ?? json["error"] as? String ?? "Request failed"
log("[HANDLE] ❌ res error id=\(id) code=\(code) message=\(message)")
- DispatchQueue.main.async { [weak self] in
- self?.delegate?.chatServiceDidReceiveError(ChatServiceError.gatewayError("\(code): \(message)"))
+
+ if code == "NOT_PAIRED" {
+ // Device identity changed (e.g. Keychain wiped on reinstall).
+ // Stop reconnect loop and notify delegate to show re-pair UI.
+ shouldReconnect = false
+ stopHistoryPolling()
+ DispatchQueue.main.async { [weak self] in
+ self?.isConnected = false
+ self?.delegate?.chatServiceDidReceiveNotPaired()
+ }
+ } else {
+ DispatchQueue.main.async { [weak self] in
+ self?.delegate?.chatServiceDidReceiveError(ChatServiceError.gatewayError("\(code): \(message)"))
+ }
}
}
}
diff --git a/Chowder/Chowder/Services/DeviceIdentity.swift b/Chowder/Chowder/Services/DeviceIdentity.swift
new file mode 100644
index 0000000..c99f5ca
--- /dev/null
+++ b/Chowder/Chowder/Services/DeviceIdentity.swift
@@ -0,0 +1,93 @@
+import Foundation
+import CryptoKit
+import CommonCrypto
+
+/// Manages Ed25519 device identity for OpenClaw gateway authentication.
+/// The keypair is generated once and stored in Keychain. The device ID
+/// is the SHA-256 hash of the raw public key bytes (hex-encoded).
+enum DeviceIdentity {
+
+ private static let privateKeyTag = "deviceEd25519PrivateKey"
+
+ // MARK: - Public API
+
+ /// Returns the device ID (SHA-256 of raw public key, hex-encoded).
+ static var deviceId: String {
+ let key = loadOrCreatePrivateKey()
+ let rawPublicKey = key.publicKey.rawRepresentation
+ return SHA256.hash(data: rawPublicKey)
+ .map { String(format: "%02x", $0) }
+ .joined()
+ }
+
+ /// Returns the raw public key as base64url (no padding).
+ static var publicKeyBase64Url: String {
+ let key = loadOrCreatePrivateKey()
+ return base64UrlEncode(key.publicKey.rawRepresentation)
+ }
+
+ /// Signs the device auth payload for the gateway connect handshake.
+ /// Payload format (v2): version|deviceId|clientId|clientMode|role|scopes|signedAtMs|token|nonce
+ static func sign(
+ clientId: String,
+ clientMode: String,
+ role: String,
+ scopes: [String],
+ token: String,
+ nonce: String
+ ) -> (signature: String, signedAt: Int64) {
+ let key = loadOrCreatePrivateKey()
+ let signedAtMs = Int64(Date().timeIntervalSince1970 * 1000)
+ let scopesStr = scopes.joined(separator: ",")
+
+ let payload = [
+ "v2",
+ deviceId,
+ clientId,
+ clientMode,
+ role,
+ scopesStr,
+ String(signedAtMs),
+ token,
+ nonce
+ ].joined(separator: "|")
+
+ let payloadData = Data(payload.utf8)
+ let signatureRaw = try! key.signature(for: payloadData)
+ let signatureData = signatureRaw.withUnsafeBytes { Data($0) }
+ let signatureBase64Url = base64UrlEncode(signatureData)
+
+ return (signatureBase64Url, signedAtMs)
+ }
+
+ // MARK: - Private
+
+ private static func loadOrCreatePrivateKey() -> Curve25519.Signing.PrivateKey {
+ if let stored = loadPrivateKey() {
+ return stored
+ }
+ let newKey = Curve25519.Signing.PrivateKey()
+ savePrivateKey(newKey)
+ return newKey
+ }
+
+ private static func loadPrivateKey() -> Curve25519.Signing.PrivateKey? {
+ guard let b64 = KeychainService.load(key: privateKeyTag),
+ let data = Data(base64Encoded: b64) else {
+ return nil
+ }
+ return try? Curve25519.Signing.PrivateKey(rawRepresentation: data)
+ }
+
+ private static func savePrivateKey(_ key: Curve25519.Signing.PrivateKey) {
+ let b64 = key.rawRepresentation.base64EncodedString()
+ KeychainService.save(key: privateKeyTag, value: b64)
+ }
+
+ private static func base64UrlEncode(_ data: Data) -> String {
+ data.base64EncodedString()
+ .replacingOccurrences(of: "+", with: "-")
+ .replacingOccurrences(of: "/", with: "_")
+ .replacingOccurrences(of: "=", with: "")
+ }
+}
diff --git a/Chowder/Chowder/Services/LocalStorage.swift b/Chowder/Chowder/Services/LocalStorage.swift
index d221948..e18ce9f 100644
--- a/Chowder/Chowder/Services/LocalStorage.swift
+++ b/Chowder/Chowder/Services/LocalStorage.swift
@@ -11,34 +11,99 @@ enum LocalStorage {
FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
}
- // MARK: - Chat History
-
- private static var chatHistoryURL: URL {
- documentsURL.appendingPathComponent("chat_history.json")
+ /// Per-session directory for message storage.
+ private static func sessionDirectory(for sessionKey: String) -> URL {
+ let sanitized = sessionKey
+ .replacingOccurrences(of: ":", with: "_")
+ .replacingOccurrences(of: "/", with: "_")
+ let dir = documentsURL.appendingPathComponent("sessions/\(sanitized)")
+ try? FileManager.default.createDirectory(at: dir, withIntermediateDirectories: true)
+ return dir
}
- static func saveMessages(_ messages: [Message]) {
+ // MARK: - Chat History (per-session)
+
+ static func saveMessages(_ messages: [Message], forSession sessionKey: String) {
+ let url = sessionDirectory(for: sessionKey).appendingPathComponent("chat_history.json")
do {
let data = try JSONEncoder().encode(messages)
- try data.write(to: chatHistoryURL, options: .atomic)
+ try data.write(to: url, options: .atomic)
} catch {
print("[LocalStorage] Failed to save messages: \(error)")
}
}
- static func loadMessages() -> [Message] {
+ static func loadMessages(forSession sessionKey: String) -> [Message] {
+ let url = sessionDirectory(for: sessionKey).appendingPathComponent("chat_history.json")
+ if FileManager.default.fileExists(atPath: url.path) {
+ do {
+ let data = try Data(contentsOf: url)
+ let messages = try JSONDecoder().decode([Message].self, from: data)
+ if !messages.isEmpty {
+ return messages
+ }
+ // File exists but is empty — fall through to legacy
+ } catch {
+ print("[LocalStorage] Failed to load per-session messages: \(error)")
+ // Fall through to legacy on decode error (e.g. schema change)
+ }
+ }
+
+ // Migration: try loading from legacy location
+ let legacy = loadLegacyMessages()
+ if !legacy.isEmpty {
+ print("[LocalStorage] Migrated \(legacy.count) messages from legacy to session \(sessionKey)")
+ // Save to per-session location so we don't migrate again
+ saveMessages(legacy, forSession: sessionKey)
+ }
+ return legacy
+ }
+
+ static func deleteMessages(forSession sessionKey: String) {
+ let url = sessionDirectory(for: sessionKey).appendingPathComponent("chat_history.json")
+ try? FileManager.default.removeItem(at: url)
+ }
+
+ // Legacy support (single-session)
+ private static var chatHistoryURL: URL {
+ documentsURL.appendingPathComponent("chat_history.json")
+ }
+
+ private static func loadLegacyMessages() -> [Message] {
guard FileManager.default.fileExists(atPath: chatHistoryURL.path) else { return [] }
do {
let data = try Data(contentsOf: chatHistoryURL)
return try JSONDecoder().decode([Message].self, from: data)
} catch {
- print("[LocalStorage] Failed to load messages: \(error)")
+ print("[LocalStorage] Failed to load legacy messages: \(error)")
return []
}
}
- static func deleteMessages() {
- try? FileManager.default.removeItem(at: chatHistoryURL)
+ // MARK: - Saved Sessions
+
+ private static var sessionsURL: URL {
+ documentsURL.appendingPathComponent("saved_sessions.json")
+ }
+
+ static func saveSessions(_ sessions: [SavedSession]) {
+ do {
+ let data = try JSONEncoder().encode(sessions)
+ try data.write(to: sessionsURL, options: .atomic)
+ } catch {
+ print("[LocalStorage] Failed to save sessions: \(error)")
+ }
+ }
+
+ static func loadSessions() -> [SavedSession] {
+ guard FileManager.default.fileExists(atPath: sessionsURL.path) else { return [] }
+ do {
+ let data = try Data(contentsOf: sessionsURL)
+ return try JSONDecoder().decode([SavedSession].self, from: data)
+ } catch {
+ print("[LocalStorage] Failed to load sessions: \(error)")
+ return []
+ }
}
// MARK: - Agent Avatar
diff --git a/Chowder/Chowder/Services/VoiceInputManager.swift b/Chowder/Chowder/Services/VoiceInputManager.swift
new file mode 100644
index 0000000..ddc3e4b
--- /dev/null
+++ b/Chowder/Chowder/Services/VoiceInputManager.swift
@@ -0,0 +1,122 @@
+import Foundation
+import Speech
+import AVFoundation
+
+final class VoiceInputManager {
+ private(set) var isListening = false
+ var transcribedText = ""
+ var error: String?
+
+ private var speechRecognizer: SFSpeechRecognizer?
+ private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
+ private var recognitionTask: SFSpeechRecognitionTask?
+ private let audioEngine = AVAudioEngine()
+
+ /// Called on main thread when listening stops (either manually or from timeout/error).
+ var onStoppedListening: (() -> Void)?
+
+ init() {
+ speechRecognizer = SFSpeechRecognizer(locale: Locale.current)
+ }
+
+ /// Request speech recognition and microphone permissions.
+ func requestPermissions(completion: @escaping (Bool) -> Void) {
+ SFSpeechRecognizer.requestAuthorization { status in
+ DispatchQueue.main.async {
+ guard status == .authorized else {
+ self.error = "Speech recognition not authorized"
+ completion(false)
+ return
+ }
+ AVAudioApplication.requestRecordPermission { granted in
+ DispatchQueue.main.async {
+ if !granted {
+ self.error = "Microphone access not authorized"
+ }
+ completion(granted)
+ }
+ }
+ }
+ }
+ }
+
+ /// Start speech recognition.
+ func startListening(onTranscription: @escaping (String) -> Void) {
+ guard let speechRecognizer, speechRecognizer.isAvailable else {
+ error = "Speech recognition unavailable"
+ return
+ }
+
+ // Stop any existing task
+ if isListening {
+ stopListening()
+ }
+ recognitionTask?.cancel()
+ recognitionTask = nil
+
+ let audioSession = AVAudioSession.sharedInstance()
+ do {
+ try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers)
+ try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
+ } catch {
+ self.error = "Audio session setup failed: \(error.localizedDescription)"
+ return
+ }
+
+ recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
+ guard let recognitionRequest else { return }
+ recognitionRequest.shouldReportPartialResults = true
+
+ if speechRecognizer.supportsOnDeviceRecognition {
+ recognitionRequest.requiresOnDeviceRecognition = true
+ }
+
+ recognitionTask = speechRecognizer.recognitionTask(with: recognitionRequest) { [weak self] result, error in
+ guard let self else { return }
+ if let result {
+ let text = result.bestTranscription.formattedString
+ DispatchQueue.main.async {
+ self.transcribedText = text
+ onTranscription(text)
+ }
+ }
+ if error != nil || (result?.isFinal ?? false) {
+ DispatchQueue.main.async {
+ self.stopListening()
+ }
+ }
+ }
+
+ let inputNode = audioEngine.inputNode
+ let recordingFormat = inputNode.outputFormat(forBus: 0)
+ inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { [weak self] buffer, _ in
+ self?.recognitionRequest?.append(buffer)
+ }
+
+ do {
+ audioEngine.prepare()
+ try audioEngine.start()
+ isListening = true
+ transcribedText = ""
+ } catch {
+ self.error = "Audio engine failed to start: \(error.localizedDescription)"
+ }
+ }
+
+ /// Stop speech recognition.
+ func stopListening() {
+ guard isListening else { return }
+ audioEngine.stop()
+ audioEngine.inputNode.removeTap(onBus: 0)
+ recognitionRequest?.endAudio()
+ recognitionRequest = nil
+ recognitionTask?.cancel()
+ recognitionTask = nil
+ isListening = false
+
+ // Deactivate audio session so TTS or other audio can resume
+ try? AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
+
+ onStoppedListening?()
+ }
+}
diff --git a/Chowder/Chowder/Services/VoiceOutputManager.swift b/Chowder/Chowder/Services/VoiceOutputManager.swift
new file mode 100644
index 0000000..5038fcb
--- /dev/null
+++ b/Chowder/Chowder/Services/VoiceOutputManager.swift
@@ -0,0 +1,91 @@
+import Foundation
+import AVFoundation
+
+@Observable
+final class VoiceOutputManager: NSObject, AVSpeechSynthesizerDelegate {
+ var isEnabled = false
+ var isSpeaking = false
+
+ @ObservationIgnored private let synthesizer = AVSpeechSynthesizer()
+ @ObservationIgnored private var queue: [String] = []
+
+ override init() {
+ super.init()
+ synthesizer.delegate = self
+ }
+
+ /// Toggle TTS on/off. When disabled, stops any current speech.
+ func toggle() {
+ isEnabled.toggle()
+ if !isEnabled {
+ stop()
+ }
+ }
+
+ /// Speak a message. If already speaking, queues it.
+ func speak(_ text: String) {
+ guard isEnabled, !text.isEmpty else { return }
+
+ // Strip markdown formatting for cleaner speech
+ let cleaned = text
+ .replacingOccurrences(of: "**", with: "")
+ .replacingOccurrences(of: "__", with: "")
+ .replacingOccurrences(of: "```", with: "")
+ .replacingOccurrences(of: "`", with: "")
+ .replacingOccurrences(of: "###", with: "")
+ .replacingOccurrences(of: "##", with: "")
+ .replacingOccurrences(of: "#", with: "")
+
+ if synthesizer.isSpeaking {
+ queue.append(cleaned)
+ } else {
+ speakNow(cleaned)
+ }
+ }
+
+ /// Stop all speech and clear the queue.
+ func stop() {
+ synthesizer.stopSpeaking(at: .immediate)
+ queue.removeAll()
+ isSpeaking = false
+ }
+
+ private func speakNow(_ text: String) {
+ let utterance = AVSpeechUtterance(string: text)
+ utterance.rate = AVSpeechUtteranceDefaultSpeechRate
+ utterance.pitchMultiplier = 1.0
+ utterance.volume = 1.0
+
+ // Use a high-quality voice if available
+ if let voice = AVSpeechSynthesisVoice(language: Locale.current.language.languageCode?.identifier ?? "en") {
+ utterance.voice = voice
+ }
+
+ do {
+ let session = AVAudioSession.sharedInstance()
+ try session.setCategory(.playback, mode: .default, options: .duckOthers)
+ try session.setActive(true)
+ } catch {
+ print("[VoiceOutput] Audio session error: \(error)")
+ }
+
+ isSpeaking = true
+ synthesizer.speak(utterance)
+ }
+
+ // MARK: - AVSpeechSynthesizerDelegate
+
+ func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) {
+ if let next = queue.first {
+ queue.removeFirst()
+ speakNow(next)
+ } else {
+ isSpeaking = false
+ try? AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
+ }
+ }
+
+ func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didCancel utterance: AVSpeechUtterance) {
+ isSpeaking = false
+ }
+}
diff --git a/Chowder/Chowder/ViewModels/ChatViewModel.swift b/Chowder/Chowder/ViewModels/ChatViewModel.swift
index d43058b..9a912cb 100644
--- a/Chowder/Chowder/ViewModels/ChatViewModel.swift
+++ b/Chowder/Chowder/ViewModels/ChatViewModel.swift
@@ -1,5 +1,6 @@
import SwiftUI
import UIKit
+import UserNotifications
@Observable
final class ChatViewModel: ChatServiceDelegate {
@@ -35,11 +36,170 @@ final class ChatViewModel: ChatServiceDelegate {
var showSettings: Bool = false
var debugLog: [String] = []
var showDebugLog: Bool = false
+ var isInBackground: Bool = false
+ var pendingApprovals: [ApprovalRequest] = []
+ var showNotPairedAlert: Bool = false
+ @ObservationIgnored private var hasAttemptedIdentityReset = false
+
+ // MARK: - Voice Input / Output
+
+ @ObservationIgnored private let voiceInput = VoiceInputManager()
+ @ObservationIgnored private let voiceOutput = VoiceOutputManager()
+ @ObservationIgnored private var voicePermissionGranted: Bool?
+
+ /// Observable state — updated manually when voice manager changes.
+ var isListening: Bool = false
+ var isSpeakerEnabled: Bool = false
+
+ func toggleVoiceInput() {
+ // If already listening, just stop
+ if voiceInput.isListening {
+ voiceInput.stopListening()
+ isListening = false
+ return
+ }
+
+ // Check if we already know the permission result
+ if let granted = voicePermissionGranted {
+ if granted {
+ startVoiceListening()
+ } else {
+ log("Voice permissions denied — cannot start listening")
+ }
+ return
+ }
+
+ // First time: request permissions
+ voiceInput.requestPermissions { [weak self] granted in
+ guard let self else { return }
+ self.voicePermissionGranted = granted
+ if granted {
+ self.startVoiceListening()
+ } else {
+ self.log("Voice permissions denied: \(self.voiceInput.error ?? "unknown")")
+ }
+ }
+ }
+
+ private func startVoiceListening() {
+ voiceInput.onStoppedListening = { [weak self] in
+ self?.isListening = false
+ }
+ voiceInput.startListening { [weak self] text in
+ self?.inputText = text
+ }
+ isListening = true
+ }
+
+ func toggleSpeaker() {
+ voiceOutput.toggle()
+ isSpeakerEnabled = voiceOutput.isEnabled
+ }
+
+ // MARK: - Image Input
+
+ var stagedImage: UIImage?
+ var showImagePicker: Bool = false
+
+ /// Whether the send button should be enabled (text or image present).
+ var canSend: Bool {
+ let hasText = !inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty
+ let hasImage = stagedImage != nil
+ return (hasText || hasImage) && !isLoading
+ }
+
+ // MARK: - Multi-Session
+
+ var savedSessions: [SavedSession] = []
+ var currentSessionKey: String = ConnectionConfig().sessionKey
+
+ func switchToSession(_ session: SavedSession) {
+ // Save current session's messages
+ LocalStorage.saveMessages(messages, forSession: currentSessionKey)
+ updateSessionMessageCount()
+
+ // Switch
+ currentSessionKey = session.key
+ messages = LocalStorage.loadMessages(forSession: session.key)
+ displayLimit = 50
+
+ // Update last used
+ if let idx = savedSessions.firstIndex(where: { $0.id == session.id }) {
+ savedSessions[idx].lastUsed = Date()
+ }
+ LocalStorage.saveSessions(savedSessions)
+
+ // Reconnect with new session key
+ chatService?.disconnect()
+ chatService = nil
+ isConnected = false
+
+ let config = ConnectionConfig()
+ let service = ChatService(
+ gatewayURL: config.gatewayURL,
+ token: config.token,
+ sessionKey: session.key
+ )
+ service.delegate = self
+ self.chatService = service
+ service.connect()
+ log("Switched to session: \(session.label) (\(session.key))")
+ }
+
+ func createSession(label: String, key: String) {
+ let session = SavedSession(key: key, label: label)
+ savedSessions.append(session)
+ LocalStorage.saveSessions(savedSessions)
+ log("Created session: \(label) (\(key))")
+ }
+
+ func deleteSession(_ session: SavedSession) {
+ savedSessions.removeAll { $0.id == session.id }
+ LocalStorage.saveSessions(savedSessions)
+ LocalStorage.deleteMessages(forSession: session.key)
+ log("Deleted session: \(session.label)")
+ }
+
+ func renameSession(_ session: SavedSession, newLabel: String) {
+ if let idx = savedSessions.firstIndex(where: { $0.id == session.id }) {
+ savedSessions[idx].label = newLabel
+ LocalStorage.saveSessions(savedSessions)
+ }
+ }
+
+ private func updateSessionMessageCount() {
+ if let idx = savedSessions.firstIndex(where: { $0.key == currentSessionKey }) {
+ savedSessions[idx].messageCount = messages.count
+ savedSessions[idx].lastUsed = Date()
+ LocalStorage.saveSessions(savedSessions)
+ }
+ }
+
+ private func loadSessions() {
+ savedSessions = LocalStorage.loadSessions()
+ if savedSessions.isEmpty {
+ // Create default session
+ let defaultSession = SavedSession.defaultSession
+ savedSessions = [defaultSession]
+ LocalStorage.saveSessions(savedSessions)
+ }
+ }
+
+ /// Background task identifier to keep WebSocket alive briefly after backgrounding.
+ @ObservationIgnored private var backgroundTaskId: UIBackgroundTaskIdentifier = .invalid
+ /// Tracks whether the agent was running when we entered background, so we can
+ /// check for a missed completion on return to foreground.
+ @ObservationIgnored private var wasLoadingWhenBackgrounded: Bool = false
+ /// Snapshot of assistant message count when we backgrounded, to detect new responses.
+ @ObservationIgnored private var assistantMessageCountAtBackground: Int = 0
// Workspace-synced data from the gateway
var botIdentity: BotIdentity = LocalStorage.loadBotIdentity()
var userProfile: UserProfile = LocalStorage.loadUserProfile()
+ /// Current avatar image — observable so views update reactively.
+ var avatarImage: UIImage? = LocalStorage.loadAvatar()
+
/// The bot's display name — uses IDENTITY.md name, falls back to "Chowder".
var botName: String {
botIdentity.name.isEmpty ? "Chowder" : botIdentity.name
@@ -70,6 +230,9 @@ final class ChatViewModel: ChatServiceDelegate {
private var chatService: ChatService?
+ /// Expose the chat service so other tabs can make requests (e.g. cron).
+ var exposedChatService: ChatService? { chatService }
+
var isConfigured: Bool {
ConnectionConfig().isConfigured
}
@@ -163,6 +326,7 @@ final class ChatViewModel: ChatServiceDelegate {
/// Push current tracking state to the Live Activity.
private func pushLiveActivityUpdate(isAISubject: Bool = false) {
+ let approvalTool = pendingApprovals.first(where: { !$0.resolved })?.toolName
LiveActivityManager.shared.update(
subject: liveActivitySubject,
currentIntent: liveActivityBottomText,
@@ -171,7 +335,8 @@ final class ChatViewModel: ChatServiceDelegate {
secondPreviousIntent: liveActivityGreyIntent,
stepNumber: liveActivityStepNumber,
costTotal: liveActivityCost,
- isAISubject: isAISubject
+ isAISubject: isAISubject,
+ pendingApprovalTool: approvalTool
)
}
@@ -238,11 +403,14 @@ final class ChatViewModel: ChatServiceDelegate {
func connect() {
log("connect() called")
+ // Load saved sessions
+ loadSessions()
+
// Restore chat history from disk on first launch
if messages.isEmpty {
- messages = LocalStorage.loadMessages()
+ messages = LocalStorage.loadMessages(forSession: currentSessionKey)
if !messages.isEmpty {
- log("Restored \(messages.count) messages from disk")
+ log("Restored \(messages.count) messages from disk for session \(currentSessionKey)")
}
}
@@ -278,26 +446,36 @@ final class ChatViewModel: ChatServiceDelegate {
func send() {
log("send() — isConnected=\(isConnected) isLoading=\(isLoading)")
let text = inputText.trimmingCharacters(in: .whitespacesAndNewlines)
- guard !text.isEmpty, !isLoading else { return }
+ let image = stagedImage
+ guard (!text.isEmpty || image != nil), !isLoading else { return }
+
+ // Stop voice input if active
+ if voiceInput.isListening {
+ voiceInput.stopListening()
+ isListening = false
+ }
hasPlayedResponseHaptic = false
hasReceivedAnyDelta = false
responseHaptic.prepare()
- messages.append(Message(role: .user, content: text))
+ // Build the user message (with optional image attachment)
+ let userMessage = Message(role: .user, content: text, imageData: image?.jpegData(compressionQuality: 0.7))
+ messages.append(userMessage)
inputText = ""
+ stagedImage = nil
isLoading = true
// Start a fresh activity tracker for this agent turn
currentActivity = AgentActivity()
currentActivity?.currentLabel = "Thinking..."
shimmerStartTime = Date()
-
+
// Increment generation counter and capture start time to filter old items
currentRunGeneration += 1
currentRunStartTime = Date()
log("Starting new run generation \(currentRunGeneration) at \(currentRunStartTime!)")
-
+
// Clear history parsing state for new run
seenThinkingIds.removeAll()
seenToolCallIds.removeAll()
@@ -308,11 +486,13 @@ final class ChatViewModel: ChatServiceDelegate {
messages.append(Message(role: .assistant, content: ""))
- LocalStorage.saveMessages(messages)
+ LocalStorage.saveMessages(messages, forSession: currentSessionKey)
+ updateSessionMessageCount()
// Start the Live Activity immediately (subject will be updated when ready)
let agentName = botName
- LiveActivityManager.shared.startActivity(agentName: agentName, userTask: text, subject: nil)
+ let displayText = text.isEmpty ? "[Image]" : text
+ LiveActivityManager.shared.startActivity(agentName: agentName, userTask: displayText, subject: nil, avatarImage: avatarImage)
// Generate AI summary for every message sent
// Include up to the last 5 user messages to identify the overall task
@@ -331,14 +511,21 @@ final class ChatViewModel: ChatServiceDelegate {
}
}
- chatService?.send(text: text)
- log("chatService.send() called")
+ // Send with or without image
+ if let imageData = image?.jpegData(compressionQuality: 0.7) {
+ chatService?.sendWithImage(text: text, imageData: imageData)
+ log("chatService.sendWithImage() called")
+ } else {
+ chatService?.send(text: text)
+ log("chatService.send() called")
+ }
}
func clearMessages() {
messages.removeAll()
- LocalStorage.deleteMessages()
- log("Chat history cleared")
+ LocalStorage.deleteMessages(forSession: currentSessionKey)
+ updateSessionMessageCount()
+ log("Chat history cleared for session \(currentSessionKey)")
}
// MARK: - ChatServiceDelegate (main chat session)
@@ -346,6 +533,7 @@ final class ChatViewModel: ChatServiceDelegate {
func chatServiceDidConnect() {
log("CONNECTED")
isConnected = true
+ hasAttemptedIdentityReset = false
// Workspace sync disabled - identity/profile are updated via tool events
// when the agent writes to IDENTITY.md or USER.md
@@ -381,14 +569,8 @@ final class ChatViewModel: ChatServiceDelegate {
lastCompletedActivity = currentActivity
currentActivity = nil
shimmerStartTime = nil
- // End the Lock Screen Live Activity now that the answer is streaming
- let taskTitle = liveActivitySubject
- Task {
- let completionSummary = await generateCompletionSummary(from: taskTitle)
- await MainActor.run {
- LiveActivityManager.shared.endActivity(completionSummary: completionSummary)
- }
- }
+ // Don't end the Live Activity here — wait for chatServiceDidFinishMessage
+ // so we can show the full response in the Live Activity.
log("Cleared activity on first delta")
}
}
@@ -399,12 +581,18 @@ final class ChatViewModel: ChatServiceDelegate {
}
func chatServiceDidFinishMessage() {
- log("message.done - isLoading was \(isLoading)")
-
+ log("message.done - isLoading was \(isLoading), isInBackground=\(isInBackground)")
+
+ // Fire local notification if app is backgrounded (WebSocket stayed alive via background task)
+ if isInBackground {
+ wasLoadingWhenBackgrounded = false
+ endBackgroundTask()
+ }
+
// Force isLoading false
isLoading = false
hasPlayedResponseHaptic = false
-
+
log("Set isLoading=false, hasPlayedResponseHaptic=false, hasReceivedAnyDelta=\(hasReceivedAnyDelta)")
// Mark all remaining in-progress steps as completed
@@ -415,13 +603,14 @@ final class ChatViewModel: ChatServiceDelegate {
lastCompletedActivity = activity
log("Preserved activity with \(activity.steps.count) steps")
}
-
- // End the Lock Screen Live Activity with completion summary
+
+ // End the Lock Screen Live Activity: show "Complete", then the response preview
let taskTitle = liveActivitySubject
+ let responsePreview = messages.last(where: { $0.role == .assistant })?.content
Task {
let completionSummary = await generateCompletionSummary(from: taskTitle)
await MainActor.run {
- LiveActivityManager.shared.endActivity(completionSummary: completionSummary)
+ LiveActivityManager.shared.endActivity(completionSummary: completionSummary, responsePreview: responsePreview)
}
}
@@ -453,13 +642,20 @@ final class ChatViewModel: ChatServiceDelegate {
self.messages[lastIdx].content.isEmpty {
self.messages.remove(at: lastIdx)
self.log("Removed empty assistant bubble (final fetch timeout)")
- LocalStorage.saveMessages(self.messages)
+ LocalStorage.saveMessages(self.messages, forSession: self.currentSessionKey)
}
}
}
}
-
- LocalStorage.saveMessages(messages)
+
+ // Auto-speak the assistant's response if TTS is enabled
+ if let lastMsg = messages.last(where: { $0.role == .assistant }),
+ !lastMsg.content.isEmpty {
+ voiceOutput.speak(lastMsg.content)
+ }
+
+ LocalStorage.saveMessages(messages, forSession: currentSessionKey)
+ updateSessionMessageCount()
}
func chatServiceDidReceiveError(_ error: Error) {
@@ -473,7 +669,7 @@ final class ChatViewModel: ChatServiceDelegate {
isLoading = false
currentActivity = nil
LiveActivityManager.shared.endActivity()
- LocalStorage.saveMessages(messages)
+ LocalStorage.saveMessages(messages, forSession: currentSessionKey)
}
/// Map raw system errors into short, human-friendly messages.
@@ -667,6 +863,59 @@ final class ChatViewModel: ChatServiceDelegate {
LocalStorage.saveBotIdentity(identity)
}
+ func chatServiceDidUpdateAvatar(_ image: UIImage) {
+ log("Avatar image updated — saving to local and shared storage")
+ LocalStorage.saveAvatar(image)
+ avatarImage = image
+ }
+
+ func chatServiceDidReceiveApproval(_ request: ApprovalRequest) {
+ log("🔐 Approval request received: \(request.toolName) — \(request.description)")
+ pendingApprovals.append(request)
+
+ // Update Live Activity to show waiting for approval
+ LiveActivityManager.shared.updateIntent("Waiting for approval: \(request.toolName)")
+
+ // Fire notification if backgrounded
+ if isInBackground {
+ let content = UNMutableNotificationContent()
+ content.title = "\(botName) needs approval"
+ content.body = "\(request.toolName): \(request.description)"
+ content.sound = .default
+ let notifRequest = UNNotificationRequest(identifier: request.id, content: content, trigger: nil)
+ UNUserNotificationCenter.current().add(notifRequest)
+ }
+ }
+
+ func handleApprovalResponse(id: String, approved: Bool) {
+ log("🔐 Approval response: \(id) — \(approved ? "approved" : "denied")")
+ chatService?.respondToApproval(requestId: id, approved: approved)
+
+ // Mark as resolved in the list
+ if let index = pendingApprovals.firstIndex(where: { $0.id == id }) {
+ pendingApprovals[index].resolved = true
+ pendingApprovals[index].approved = approved
+ }
+ }
+
+ func chatServiceDidReceiveNotPaired() {
+ isConnected = false
+ isLoading = false
+
+ if !hasAttemptedIdentityReset {
+ // First attempt: reset keypair and reconnect automatically.
+ // The gateway will see a fresh device with a valid token and auto-pair it.
+ hasAttemptedIdentityReset = true
+ log("🔐 NOT_PAIRED — resetting device identity and reconnecting")
+ DeviceIdentity.resetIdentity()
+ reconnect()
+ } else {
+ // Already tried resetting — gateway requires manual approval.
+ log("🔐 NOT_PAIRED again — showing alert for manual re-pair")
+ showNotPairedAlert = true
+ }
+ }
+
func chatServiceDidUpdateUserProfile(_ profile: UserProfile) {
log("User profile updated via tool event — name=\(profile.name)")
self.userProfile = profile
@@ -738,7 +987,7 @@ final class ChatViewModel: ChatServiceDelegate {
hasPlayedResponseHaptic = true
responseHaptic.impactOccurred()
}
- LocalStorage.saveMessages(self.messages)
+ LocalStorage.saveMessages(self.messages, forSession: self.currentSessionKey)
}
/// Parse a single history item and update activity
@@ -782,7 +1031,7 @@ final class ChatViewModel: ChatServiceDelegate {
messages[lastIndex].role == .assistant,
messages[lastIndex].content.isEmpty {
messages[lastIndex].content = "Error: \(errorMessage)"
- LocalStorage.saveMessages(messages)
+ LocalStorage.saveMessages(messages, forSession: currentSessionKey)
}
return
}
@@ -1210,6 +1459,90 @@ final class ChatViewModel: ChatServiceDelegate {
// MARK: - Workspace Data Management
+ // MARK: - Background / Foreground Lifecycle
+
+ /// Called when the app enters background. Starts a background task to keep the
+ /// WebSocket alive so `chatServiceDidFinishMessage` can fire and send a notification.
+ func didEnterBackground() {
+ isInBackground = true
+ wasLoadingWhenBackgrounded = isLoading
+ assistantMessageCountAtBackground = messages.filter { $0.role == .assistant }.count
+ log("📱 Entered background — isLoading=\(isLoading)")
+
+ guard isLoading else { return }
+
+ // Request background execution time (~30s) so the WebSocket can receive the finish event
+ backgroundTaskId = UIApplication.shared.beginBackgroundTask(withName: "AgentResponse") { [weak self] in
+ // Expiration handler — OS is about to suspend us
+ self?.log("📱 Background task expired")
+ self?.endBackgroundTask()
+ }
+ log("📱 Started background task \(backgroundTaskId.rawValue)")
+ }
+
+ /// Called when the app returns to foreground. Checks if we missed a completion while away.
+ func didReturnToForeground() {
+ let wasBg = isInBackground
+ isInBackground = false
+ endBackgroundTask()
+ log("📱 Returned to foreground — wasLoading=\(wasLoadingWhenBackgrounded)")
+
+ // Clear delivered notifications from the lock screen / notification center
+ UNUserNotificationCenter.current().removeAllDeliveredNotifications()
+
+ // Dismiss only ended Live Activities — active ones (agent still running) stay.
+ // This means the activity persists on the lock screen after task completion,
+ // and clears once the user opens the app (by tapping it or otherwise).
+ LiveActivityManager.shared.dismissEndedActivities()
+
+ // If agent was running when we backgrounded and finished while we were away,
+ // the notification should have already fired from chatServiceDidFinishMessage.
+ // But if the WebSocket died before the event arrived, detect it on reconnect:
+ // reconnect() is called separately by ChatView, which will re-establish the
+ // connection. If the agent finished, lifecycle.end will arrive and
+ // chatServiceDidFinishMessage will fire. But isInBackground is now false.
+ // So we need a different approach: check after reconnect if loading ended.
+ if wasLoadingWhenBackgrounded && wasBg {
+ // Schedule a check after reconnect settles — if the agent completed while
+ // we were suspended (no lifecycle.end received), the history fetch or
+ // reconnect will set isLoading=false. Fire notification then.
+ let gen = currentRunGeneration
+ DispatchQueue.main.asyncAfter(deadline: .now() + 3.0) { [weak self] in
+ guard let self, self.currentRunGeneration == gen else { return }
+ if !self.isLoading && self.wasLoadingWhenBackgrounded {
+ // Agent finished while we were away — check if new assistant message appeared
+ let currentAssistantCount = self.messages.filter { $0.role == .assistant }.count
+ let lastMsg = self.messages.last(where: { $0.role == .assistant })?.content ?? ""
+ if currentAssistantCount > self.assistantMessageCountAtBackground || !lastMsg.isEmpty {
+ // Notification disabled — response is shown in Live Activity instead
+ }
+ self.wasLoadingWhenBackgrounded = false
+ }
+ }
+ }
+ }
+
+ /// Fire a local notification with the latest agent response.
+ private func fireBackgroundNotification() {
+ let lastMsg = messages.last(where: { $0.role == .assistant })?.content ?? ""
+ let body = lastMsg.isEmpty ? "Your agent has replied." : String(lastMsg.prefix(100))
+
+ let content = UNMutableNotificationContent()
+ content.title = botName
+ content.body = body
+ content.sound = .default
+ let request = UNNotificationRequest(identifier: UUID().uuidString, content: content, trigger: nil)
+ UNUserNotificationCenter.current().add(request)
+ log("🔔 Fired background notification: \(body.prefix(50))")
+ }
+
+ private func endBackgroundTask() {
+ guard backgroundTaskId != .invalid else { return }
+ log("📱 Ending background task \(backgroundTaskId.rawValue)")
+ UIApplication.shared.endBackgroundTask(backgroundTaskId)
+ backgroundTaskId = .invalid
+ }
+
/// Save workspace data to local cache (used by Settings save).
func saveWorkspaceData(identity: BotIdentity, profile: UserProfile) {
self.botIdentity = identity
@@ -1218,4 +1551,18 @@ final class ChatViewModel: ChatServiceDelegate {
LocalStorage.saveUserProfile(profile)
log("Settings saved to local cache")
}
+
+ /// Save a manually uploaded avatar image (from Settings photo picker).
+ func saveManualAvatar(_ image: UIImage) {
+ LocalStorage.saveAvatar(image)
+ avatarImage = image
+ log("Manual avatar saved")
+ }
+
+ /// Delete the avatar image (from Settings).
+ func deleteAvatar() {
+ LocalStorage.deleteAvatar()
+ avatarImage = nil
+ log("Avatar deleted")
+ }
}
diff --git a/Chowder/Chowder/Views/ChatHeaderView.swift b/Chowder/Chowder/Views/ChatHeaderView.swift
index 2c88b01..3b34a63 100644
--- a/Chowder/Chowder/Views/ChatHeaderView.swift
+++ b/Chowder/Chowder/Views/ChatHeaderView.swift
@@ -3,19 +3,23 @@ import SwiftUI
struct ChatHeaderView: View {
let botName: String
let isOnline: Bool
- var taskSummary: String?
+ var avatarImage: UIImage?
+ @Binding var selectedTab: Tab
var onSettingsTapped: (() -> Void)?
var onDebugTapped: (() -> Void)?
+ var onSearchTapped: (() -> Void)?
+ var isSpeakerEnabled: Bool = false
+ var onSpeakerToggle: (() -> Void)?
var body: some View {
VStack(spacing: 0) {
- HStack(spacing: 10) {
- // Avatar + name tappable to open settings
+ HStack(spacing: 8) {
+ // Avatar tappable to open settings
Button {
onSettingsTapped?()
} label: {
- HStack(spacing: 10) {
- if let customAvatar = LocalStorage.loadAvatar() {
+ HStack(spacing: 8) {
+ if let customAvatar = avatarImage {
Image(uiImage: customAvatar)
.resizable()
.scaledToFill()
@@ -42,48 +46,97 @@ struct ChatHeaderView: View {
Text(botName)
.font(.system(size: 16, weight: .semibold))
.foregroundStyle(.primary)
+ .lineLimit(1)
- HStack(spacing: 4) {
- Text(isOnline ? "Online" : "Offline")
- .font(.system(size: 13, weight: .regular))
- .foregroundStyle(.gray)
-
- if let summary = taskSummary {
- Text("·")
- .font(.system(size: 13, weight: .regular))
- .foregroundStyle(.gray)
- Text(summary)
- .font(.system(size: 13, weight: .medium))
- .foregroundStyle(.secondary)
- .lineLimit(1)
- }
- }
+ Text(isOnline ? "Online" : "Offline")
+ .font(.system(size: 12))
+ .foregroundStyle(.gray)
}
}
}
.buttonStyle(.plain)
- Spacer()
+ Spacer(minLength: 4)
+ // Speaker toggle (TTS)
+ Button {
+ onSpeakerToggle?()
+ } label: {
+ Image(systemName: isSpeakerEnabled ? "speaker.wave.2.fill" : "speaker.slash")
+ .font(.system(size: 16))
+ .foregroundStyle(isSpeakerEnabled ? .blue : .gray)
+ .frame(width: 34, height: 34)
+ }
+
+ // Search button
+ Button {
+ onSearchTapped?()
+ } label: {
+ Image(systemName: "magnifyingglass")
+ .font(.system(size: 16))
+ .foregroundStyle(.gray)
+ .frame(width: 34, height: 34)
+ }
+
+ // Sessions / Chat / Cron pill toggle
+ TabPillToggle(selectedTab: $selectedTab)
+
+ // Debug button
Button {
onDebugTapped?()
} label: {
Image(systemName: "ant")
.font(.system(size: 16))
.foregroundStyle(.gray)
+ .frame(width: 34, height: 34)
}
}
- .padding(.horizontal, 16)
- .padding(.vertical, 12)
-
- Rectangle()
- .fill(Color(.systemGray5))
- .frame(height: 0.5)
+ .padding(.horizontal, 12)
+ .padding(.vertical, 10)
}
.background {
- Color.white.opacity(0.75)
- .background(.thinMaterial)
+ Rectangle()
+ .fill(.ultraThinMaterial)
.ignoresSafeArea(edges: .top)
}
}
}
+
+// MARK: - Compact Pill Toggle
+
+struct TabPillToggle: View {
+ @Binding var selectedTab: Tab
+
+ var body: some View {
+ HStack(spacing: 0) {
+ tabButton(tab: .sessions, icon: "tray.full")
+ tabButton(tab: .chat, icon: "bubble.left.and.bubble.right")
+ tabButton(tab: .cron, icon: "clock.arrow.circlepath")
+ }
+ .padding(3)
+ .background(
+ Capsule()
+ .fill(.ultraThinMaterial)
+ )
+ }
+
+ @ViewBuilder
+ private func tabButton(tab: Tab, icon: String) -> some View {
+ Button {
+ withAnimation(.easeInOut(duration: 0.2)) {
+ selectedTab = tab
+ }
+ } label: {
+ Image(systemName: icon)
+ .font(.system(size: 13, weight: .medium))
+ .foregroundStyle(selectedTab == tab ? .white : .gray)
+ .frame(width: 30, height: 30)
+ .background(
+ selectedTab == tab
+ ? Capsule().fill(Color.blue)
+ : Capsule().fill(Color.clear)
+ )
+ }
+ .buttonStyle(.plain)
+ }
+}
diff --git a/Chowder/Chowder/Views/ChatView.swift b/Chowder/Chowder/Views/ChatView.swift
index 60f49ad..24b562d 100644
--- a/Chowder/Chowder/Views/ChatView.swift
+++ b/Chowder/Chowder/Views/ChatView.swift
@@ -1,10 +1,11 @@
import SwiftUI
struct ChatView: View {
- @State private var viewModel = ChatViewModel()
+ @Bindable var viewModel: ChatViewModel
+ @Binding var selectedTab: Tab
@State private var isAtBottom = true
+ @State private var showSearch = false
@FocusState private var isInputFocused: Bool
- @Environment(\.scenePhase) private var scenePhase
var body: some View {
VStack(spacing: 0) {
@@ -53,6 +54,16 @@ struct ChatView: View {
.transition(.opacity.animation(.easeOut(duration: 0.15)))
}
+ // Approval cards — shown inline when agent needs user approval
+ ForEach(viewModel.pendingApprovals, id: \.id) { request in
+ ApprovalCardView(
+ request: request,
+ onApprove: { viewModel.handleApprovalResponse(id: request.id, approved: true) },
+ onDeny: { viewModel.handleApprovalResponse(id: request.id, approved: false) }
+ )
+ .transition(.opacity.combined(with: .move(edge: .bottom)))
+ }
+
// Thinking shimmer — shown while the agent is working
if let activity = viewModel.currentActivity,
!activity.currentLabel.isEmpty {
@@ -77,6 +88,7 @@ struct ChatView: View {
.padding(.horizontal, 16)
.padding(.bottom, 16)
}
+ .defaultScrollAnchor(.bottom)
.overlay(alignment: .bottom) {
if !isAtBottom {
Button {
@@ -98,12 +110,75 @@ struct ChatView: View {
.transition(.scale(scale: 0.5).combined(with: .opacity))
}
}
- // -- Auto-scroll handlers removed; add back as needed --
+ .onChange(of: viewModel.messages.count) {
+ // Scroll to bottom when new messages arrive
+ if isAtBottom {
+ DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) {
+ withAnimation(.easeOut(duration: 0.15)) {
+ proxy.scrollTo("bottom", anchor: .bottom)
+ }
+ }
+ }
+ }
+ .onChange(of: viewModel.messages.last?.content) {
+ // Auto-scroll as streaming message content grows
+ if isAtBottom {
+ proxy.scrollTo("bottom", anchor: .bottom)
+ }
+ }
.scrollDismissesKeyboard(.interactively)
+ // Search overlay
+ .overlay(alignment: .top) {
+ if showSearch {
+ MessageSearchView(
+ messages: viewModel.messages,
+ isPresented: $showSearch,
+ onResultTapped: { messageId in
+ showSearch = false
+ DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
+ withAnimation {
+ proxy.scrollTo(messageId, anchor: .center)
+ }
+ }
+ }
+ )
+ .transition(.move(edge: .top).combined(with: .opacity))
+ }
+ }
}
// Input bar
- HStack(spacing: 12) {
+ HStack(spacing: 8) {
+ // Image picker button
+ Button {
+ viewModel.showImagePicker = true
+ } label: {
+ Image(systemName: "photo.on.rectangle.angled")
+ .font(.system(size: 20))
+ .foregroundStyle(.gray)
+ }
+
+ // Image thumbnail preview (if image is staged)
+ if let stagedImage = viewModel.stagedImage {
+ ZStack(alignment: .topTrailing) {
+ Image(uiImage: stagedImage)
+ .resizable()
+ .scaledToFill()
+ .frame(width: 40, height: 40)
+ .clipShape(RoundedRectangle(cornerRadius: 8))
+
+ Button {
+ viewModel.stagedImage = nil
+ } label: {
+ Image(systemName: "xmark.circle.fill")
+ .font(.system(size: 14))
+ .foregroundStyle(.white)
+ .background(Circle().fill(Color.black.opacity(0.6)))
+ }
+ .offset(x: 4, y: -4)
+ }
+ }
+
TextField("Message...", text: $viewModel.inputText, axis: .vertical)
.focused($isInputFocused)
.lineLimit(1...5)
@@ -112,6 +187,15 @@ struct ChatView: View {
.background(Color(.systemGray6))
.clipShape(RoundedRectangle(cornerRadius: 20))
+ // Mic button for voice input
+ Button {
+ viewModel.toggleVoiceInput()
+ } label: {
+ Image(systemName: viewModel.isListening ? "mic.fill" : "mic")
+ .font(.system(size: 20))
+ .foregroundStyle(viewModel.isListening ? .red : .gray)
+ }
+
Button {
isInputFocused = false
viewModel.send()
@@ -119,12 +203,12 @@ struct ChatView: View {
Image(systemName: "arrow.up.circle.fill")
.font(.system(size: 32))
.foregroundStyle(
- viewModel.inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty || viewModel.isLoading
- ? Color(.systemGray4)
- : Color.blue
+ viewModel.canSend
+ ? Color.blue
+ : Color(.systemGray4)
)
}
- .disabled(viewModel.inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty || viewModel.isLoading)
+ .disabled(!viewModel.canSend)
}
.padding(.horizontal, 16)
.padding(.vertical, 8)
@@ -134,28 +218,35 @@ struct ChatView: View {
ChatHeaderView(
botName: viewModel.botName,
isOnline: viewModel.isConnected,
- taskSummary: viewModel.currentTaskSummary,
+ avatarImage: viewModel.avatarImage,
+ selectedTab: $selectedTab,
onSettingsTapped: { viewModel.showSettings = true },
- onDebugTapped: { viewModel.showDebugLog = true }
+ onDebugTapped: { viewModel.showDebugLog = true },
+ onSearchTapped: {
+ withAnimation(.easeInOut(duration: 0.25)) {
+ showSearch.toggle()
+ }
+ },
+ isSpeakerEnabled: viewModel.isSpeakerEnabled,
+ onSpeakerToggle: { viewModel.toggleSpeaker() }
)
}
.navigationBarHidden(true)
- .onAppear {
- viewModel.connect()
- }
- .onChange(of: scenePhase) { oldPhase, newPhase in
- if oldPhase == .background && newPhase == .active {
- viewModel.reconnect()
- }
- }
.sheet(isPresented: $viewModel.showSettings) {
SettingsView(
currentIdentity: viewModel.botIdentity,
currentProfile: viewModel.userProfile,
+ currentAvatar: viewModel.avatarImage,
isConnected: viewModel.isConnected,
onSave: { identity, profile in
viewModel.saveWorkspaceData(identity: identity, profile: profile)
},
+ onSaveAvatar: { image in
+ viewModel.saveManualAvatar(image)
+ },
+ onDeleteAvatar: {
+ viewModel.deleteAvatar()
+ },
onSaveConnection: {
viewModel.reconnect()
},
@@ -198,9 +289,14 @@ struct ChatView: View {
}
}
}
+ .sheet(isPresented: $viewModel.showImagePicker) {
+ ImagePickerView { image in
+ viewModel.stagedImage = image
+ }
+ }
}
}
#Preview {
- ChatView()
+ ChatView(viewModel: ChatViewModel(), selectedTab: .constant(.chat))
}
diff --git a/Chowder/Chowder/Views/CronJobsView.swift b/Chowder/Chowder/Views/CronJobsView.swift
new file mode 100644
index 0000000..d5e45e5
--- /dev/null
+++ b/Chowder/Chowder/Views/CronJobsView.swift
@@ -0,0 +1,155 @@
+import SwiftUI
+
+struct CronJobsView: View {
+ var chatService: ChatService?
+ var isConnected: Bool
+ @Binding var selectedTab: Tab
+ var viewModel: ChatViewModel
+
+ @State private var jobs: [CronJob] = []
+ @State private var isLoading = false
+ @State private var errorMessage: String?
+
+ var body: some View {
+ NavigationStack {
+ VStack(spacing: 0) {
+ // Reuse the same header with the pill toggle
+ ChatHeaderView(
+ botName: viewModel.botName,
+ isOnline: viewModel.isConnected,
+ avatarImage: viewModel.avatarImage,
+ selectedTab: $selectedTab,
+ onSettingsTapped: { viewModel.showSettings = true },
+ onDebugTapped: { viewModel.showDebugLog = true }
+ )
+
+ Group {
+ if !isConnected {
+ ContentUnavailableView(
+ "Not Connected",
+ systemImage: "wifi.slash",
+ description: Text("Connect to the gateway to view cron jobs.")
+ )
+ } else if isLoading && jobs.isEmpty {
+ ProgressView("Loading cron jobs...")
+ } else if let error = errorMessage, jobs.isEmpty {
+ ContentUnavailableView(
+ "Error",
+ systemImage: "exclamationmark.triangle",
+ description: Text(error)
+ )
+ } else if jobs.isEmpty {
+ ContentUnavailableView(
+ "No Cron Jobs",
+ systemImage: "clock.arrow.circlepath",
+ description: Text("No cron jobs configured on the gateway.")
+ )
+ } else {
+ List {
+ ForEach(jobs) { job in
+ NavigationLink(destination: CronJobDetailView(job: job, chatService: chatService)) {
+ CronJobRow(job: job)
+ }
+ }
+ }
+ .refreshable {
+ await fetchJobs()
+ }
+ }
+ }
+ }
+ .navigationBarHidden(true)
+ .onAppear {
+ if jobs.isEmpty {
+ Task { await fetchJobs() }
+ }
+ }
+ .onChange(of: isConnected) { _, connected in
+ if connected && jobs.isEmpty {
+ Task { await fetchJobs() }
+ }
+ }
+ }
+ }
+
+ @MainActor
+ private func fetchJobs() async {
+ guard let chatService, isConnected else { return }
+ isLoading = true
+ errorMessage = nil
+
+ await withCheckedContinuation { continuation in
+ chatService.fetchCronJobs { ok, rawJobs in
+ if ok, let rawJobs {
+ self.jobs = rawJobs.compactMap { CronJob.from(dict: $0) }
+ .sorted { $0.name.localizedCaseInsensitiveCompare($1.name) == .orderedAscending }
+ } else if !ok {
+ self.errorMessage = "Failed to fetch cron jobs."
+ }
+ self.isLoading = false
+ continuation.resume()
+ }
+ }
+ }
+}
+
+// MARK: - Row
+
+private struct CronJobRow: View {
+ let job: CronJob
+
+ var body: some View {
+ HStack(spacing: 12) {
+ // Status indicator
+ Circle()
+ .fill(statusColor)
+ .frame(width: 10, height: 10)
+
+ VStack(alignment: .leading, spacing: 4) {
+ HStack {
+ Text(job.name)
+ .font(.system(size: 16, weight: .medium))
+ .lineLimit(1)
+
+ if !job.enabled {
+ Text("DISABLED")
+ .font(.system(size: 10, weight: .semibold))
+ .foregroundStyle(.secondary)
+ .padding(.horizontal, 6)
+ .padding(.vertical, 2)
+ .background(Color(.systemGray5))
+ .clipShape(Capsule())
+ }
+ }
+
+ Text(job.schedule.humanReadable)
+ .font(.system(size: 13))
+ .foregroundStyle(.secondary)
+ }
+
+ Spacer()
+
+ // Next run
+ if let nextMs = job.state.nextRunAtMs {
+ VStack(alignment: .trailing, spacing: 2) {
+ Text("Next")
+ .font(.system(size: 10))
+ .foregroundStyle(.tertiary)
+ Text(CronJob.relativeTime(fromMs: nextMs))
+ .font(.system(size: 12, weight: .medium))
+ .foregroundStyle(.secondary)
+ }
+ }
+ }
+ .padding(.vertical, 4)
+ }
+
+ private var statusColor: Color {
+ guard job.enabled else { return Color(.systemGray4) }
+ switch job.state.lastRunStatus {
+ case "ok": return .green
+ case "error": return .red
+ default: return Color(.systemGray4)
+ }
+ }
+}
diff --git a/Chowder/Chowder/Views/ImagePickerView.swift b/Chowder/Chowder/Views/ImagePickerView.swift
new file mode 100644
index 0000000..d531007
--- /dev/null
+++ b/Chowder/Chowder/Views/ImagePickerView.swift
@@ -0,0 +1,127 @@
+import SwiftUI
+import PhotosUI
+
+struct ImagePickerView: View {
+ @Environment(\.dismiss) private var dismiss
+ @State private var selectedItem: PhotosPickerItem?
+ @State private var showCamera = false
+ var onImageSelected: (UIImage) -> Void
+
+ var body: some View {
+ NavigationStack {
+ VStack(spacing: 24) {
+ // Camera option
+ Button {
+ showCamera = true
+ } label: {
+ Label("Take Photo", systemImage: "camera.fill")
+ .font(.system(size: 17, weight: .medium))
+ .frame(maxWidth: .infinity)
+ .padding(.vertical, 14)
+ .background(Color(.systemGray6))
+ .clipShape(RoundedRectangle(cornerRadius: 12))
+ }
+
+ // Photo library picker
+ PhotosPicker(selection: $selectedItem, matching: .images) {
+ Label("Choose from Library", systemImage: "photo.on.rectangle")
+ .font(.system(size: 17, weight: .medium))
+ .frame(maxWidth: .infinity)
+ .padding(.vertical, 14)
+ .background(Color(.systemGray6))
+ .clipShape(RoundedRectangle(cornerRadius: 12))
+ }
+
+ Spacer()
+ }
+ .padding(24)
+ .navigationTitle("Add Image")
+ .navigationBarTitleDisplayMode(.inline)
+ .toolbar {
+ ToolbarItem(placement: .cancellationAction) {
+ Button("Cancel") { dismiss() }
+ }
+ }
+ .onChange(of: selectedItem) { _, newItem in
+ guard let newItem else { return }
+ Task {
+ if let data = try? await newItem.loadTransferable(type: Data.self),
+ let uiImage = UIImage(data: data) {
+ let compressed = compressImage(uiImage)
+ onImageSelected(compressed)
+ dismiss()
+ }
+ }
+ }
+ .fullScreenCover(isPresented: $showCamera) {
+ CameraView { image in
+ let compressed = compressImage(image)
+ onImageSelected(compressed)
+ dismiss()
+ }
+ .ignoresSafeArea()
+ }
+ }
+ }
+
+ /// Compress image to max 1MB JPEG at 0.7 quality, downscaling if needed.
+ private func compressImage(_ image: UIImage) -> UIImage {
+ let maxDimension: CGFloat = 1920
+ var resized = image
+
+ // Downscale if too large
+ if image.size.width > maxDimension || image.size.height > maxDimension {
+ let scale = min(maxDimension / image.size.width, maxDimension / image.size.height)
+ let newSize = CGSize(width: image.size.width * scale, height: image.size.height * scale)
+ let renderer = UIGraphicsImageRenderer(size: newSize)
+ resized = renderer.image { _ in
+ image.draw(in: CGRect(origin: .zero, size: newSize))
+ }
+ }
+
+ // Compress to JPEG
+ if let data = resized.jpegData(compressionQuality: 0.7),
+ let compressed = UIImage(data: data) {
+ return compressed
+ }
+ return resized
+ }
+}
+
+// MARK: - Camera View (UIKit wrapper)
+
+struct CameraView: UIViewControllerRepresentable {
+ var onImageCaptured: (UIImage) -> Void
+
+ func makeUIViewController(context: Context) -> UIImagePickerController {
+ let picker = UIImagePickerController()
+ picker.sourceType = .camera
+ picker.delegate = context.coordinator
+ return picker
+ }
+
+ func updateUIViewController(_ uiViewController: UIImagePickerController, context: Context) {}
+
+ func makeCoordinator() -> Coordinator {
+ Coordinator(onImageCaptured: onImageCaptured)
+ }
+
+ class Coordinator: NSObject, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
+ let onImageCaptured: (UIImage) -> Void
+
+ init(onImageCaptured: @escaping (UIImage) -> Void) {
+ self.onImageCaptured = onImageCaptured
+ }
+
+ func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey: Any]) {
+ if let image = info[.originalImage] as? UIImage {
+ onImageCaptured(image)
+ }
+ picker.dismiss(animated: true)
+ }
+
+ func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
+ picker.dismiss(animated: true)
+ }
+ }
+}
diff --git a/Chowder/Chowder/Views/MainTabView.swift b/Chowder/Chowder/Views/MainTabView.swift
new file mode 100644
index 0000000..7693f36
--- /dev/null
+++ b/Chowder/Chowder/Views/MainTabView.swift
@@ -0,0 +1,61 @@
+import SwiftUI
+
+enum Tab: String, CaseIterable {
+ case sessions, chat, cron
+}
+
+struct MainTabView: View {
+ @State private var viewModel = ChatViewModel()
+ @State private var selectedTab: Tab = .chat
+ @Environment(\.scenePhase) private var scenePhase
+
+ var body: some View {
+ ZStack {
+ SessionListView(viewModel: viewModel, selectedTab: $selectedTab)
+ .opacity(selectedTab == .sessions ? 1 : 0)
+ .allowsHitTesting(selectedTab == .sessions)
+
+ ChatView(viewModel: viewModel, selectedTab: $selectedTab)
+ .opacity(selectedTab == .chat ? 1 : 0)
+ .allowsHitTesting(selectedTab == .chat)
+
+ CronJobsView(
+ chatService: viewModel.exposedChatService,
+ isConnected: viewModel.isConnected,
+ selectedTab: $selectedTab,
+ viewModel: viewModel
+ )
+ .opacity(selectedTab == .cron ? 1 : 0)
+ .allowsHitTesting(selectedTab == .cron)
+ }
+ .onAppear {
+ viewModel.connect()
+ }
+ .onChange(of: scenePhase) { oldPhase, newPhase in
+ if newPhase == .background {
+ viewModel.didEnterBackground()
+ } else if newPhase == .active && oldPhase != .active {
+ viewModel.didReturnToForeground()
+ if oldPhase == .background {
+ viewModel.reconnect()
+ }
+ }
+ }
+ .alert("Device Not Paired", isPresented: $viewModel.showNotPairedAlert) {
+ Button("Open Gateway") {
+ let urlString = ConnectionConfig().gatewayURL
+ .replacingOccurrences(of: "ws://", with: "http://")
+ .replacingOccurrences(of: "wss://", with: "https://")
+ if let url = URL(string: urlString) {
+ UIApplication.shared.open(url)
+ }
+ }
+ Button("Open Settings") {
+ viewModel.showSettings = true
+ }
+ Button("Cancel", role: .cancel) {}
+ } message: {
+ Text("This device's identity has changed (e.g. after reinstall). Please re-approve it in your gateway's device management, then reconnect.")
+ }
+ }
+}
diff --git a/Chowder/Chowder/Views/MessageBubbleView.swift b/Chowder/Chowder/Views/MessageBubbleView.swift
index 1fcdfdf..47b6002 100644
--- a/Chowder/Chowder/Views/MessageBubbleView.swift
+++ b/Chowder/Chowder/Views/MessageBubbleView.swift
@@ -9,24 +9,39 @@ struct MessageBubbleView: View {
Spacer(minLength: 60)
}
- Group {
- if message.role == .assistant {
- MarkdownContentView(message.content, foregroundColor: Color(.label))
- .font(.system(size: 17))
- .textSelection(.enabled)
- } else {
- Text(message.content)
- .font(.system(size: 17, weight: .regular, design: .default))
- .foregroundStyle(.white)
+ VStack(alignment: message.role == .user ? .trailing : .leading, spacing: 6) {
+ // Image attachment (if any)
+ if let imageData = message.imageData,
+ let uiImage = UIImage(data: imageData) {
+ Image(uiImage: uiImage)
+ .resizable()
+ .scaledToFit()
+ .frame(maxWidth: 220, maxHeight: 220)
+ .clipShape(RoundedRectangle(cornerRadius: 14))
+ }
+
+ // Text content
+ if !message.content.isEmpty {
+ Group {
+ if message.role == .assistant {
+ MarkdownContentView(message.content, foregroundColor: Color(.label))
+ .font(.system(size: 17))
+ .textSelection(.enabled)
+ } else {
+ Text(message.content)
+ .font(.system(size: 17, weight: .regular, design: .default))
+ .foregroundStyle(.white)
+ }
+ }
+ .padding(message.role == .user ? 12 : 0)
+ .background(
+ message.role == .user
+ ? RoundedRectangle(cornerRadius: 18)
+ .fill(Color.blue)
+ : nil
+ )
}
}
- .padding(message.role == .user ? 12 : 0)
- .background(
- message.role == .user
- ? RoundedRectangle(cornerRadius: 18)
- .fill(Color.blue)
- : nil
- )
.contextMenu {
Button("Copy") {
UIPasteboard.general.string = message.content
diff --git a/Chowder/Chowder/Views/MessageSearchView.swift b/Chowder/Chowder/Views/MessageSearchView.swift
new file mode 100644
index 0000000..4d0e4e0
--- /dev/null
+++ b/Chowder/Chowder/Views/MessageSearchView.swift
@@ -0,0 +1,131 @@
+import SwiftUI
+import Combine
+
+struct MessageSearchView: View {
+ let messages: [Message]
+ @Binding var isPresented: Bool
+ var onResultTapped: (UUID) -> Void
+
+ @State private var query = ""
+ @State private var results: [Message] = []
+ @FocusState private var isFocused: Bool
+
+ var body: some View {
+ VStack(spacing: 0) {
+ // Search bar
+ HStack(spacing: 10) {
+ HStack(spacing: 8) {
+ Image(systemName: "magnifyingglass")
+ .font(.system(size: 14))
+ .foregroundStyle(.gray)
+
+ TextField("Search messages...", text: $query)
+ .font(.system(size: 15))
+ .focused($isFocused)
+ .autocorrectionDisabled()
+ }
+ .padding(.horizontal, 12)
+ .padding(.vertical, 8)
+ .background(Color(.systemGray5))
+ .clipShape(RoundedRectangle(cornerRadius: 10))
+
+ Button("Cancel") {
+ isPresented = false
+ }
+ .font(.system(size: 15))
+ }
+ .padding(.horizontal, 16)
+ .padding(.vertical, 10)
+ .background(.ultraThinMaterial)
+
+ // Results
+ if !query.isEmpty {
+ if results.isEmpty {
+ VStack(spacing: 8) {
+ Text("No results")
+ .font(.system(size: 15, weight: .medium))
+ .foregroundStyle(.secondary)
+ }
+ .frame(maxWidth: .infinity)
+ .padding(.top, 24)
+ .background(Color(.systemBackground).opacity(0.95))
+ } else {
+ ScrollView {
+ LazyVStack(alignment: .leading, spacing: 2) {
+ ForEach(results) { message in
+ Button {
+ onResultTapped(message.id)
+ } label: {
+ VStack(alignment: .leading, spacing: 4) {
+ HStack(spacing: 6) {
+ Text(message.role == .user ? "You" : "Assistant")
+ .font(.system(size: 12, weight: .semibold))
+ .foregroundStyle(message.role == .user ? .blue : .green)
+
+ Text(message.timestamp, style: .relative)
+ .font(.system(size: 11))
+ .foregroundStyle(.tertiary)
+ }
+
+ Text(highlightedSnippet(message.content, query: query))
+ .font(.system(size: 14))
+ .foregroundStyle(.primary)
+ .lineLimit(3)
+ }
+ .padding(.horizontal, 16)
+ .padding(.vertical, 10)
+ .frame(maxWidth: .infinity, alignment: .leading)
+ }
+ .buttonStyle(.plain)
+
+ Divider().padding(.leading, 16)
+ }
+ }
+ }
+ .background(Color(.systemBackground).opacity(0.95))
+ .frame(maxHeight: 300)
+ }
+ }
+ }
+ .onAppear {
+ isFocused = true
+ }
+ .onChange(of: query) {
+ performSearch()
+ }
+ }
+
+ /// Debounced search — filters messages matching the query (case-insensitive).
+ private func performSearch() {
+ let trimmed = query.trimmingCharacters(in: .whitespacesAndNewlines).lowercased()
+ guard !trimmed.isEmpty else {
+ results = []
+ return
+ }
+ results = messages.filter { msg in
+ msg.content.lowercased().contains(trimmed)
+ }
+ }
+
+ /// Build an attributed snippet with the query highlighted.
+ private func highlightedSnippet(_ content: String, query: String) -> AttributedString {
+ let snippet = String(content.prefix(200))
+ var attributed = AttributedString(snippet)
+
+ let lowSnippet = snippet.lowercased()
+ let lowQuery = query.lowercased()
+
+ var searchStart = lowSnippet.startIndex
+ while let range = lowSnippet.range(of: lowQuery, range: searchStart..