Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds support for on-device LLMs with SpeziLLMLocal #39

Merged
merged 14 commits into from
May 13, 2024
Prev Previous commit
Next Next commit
Update UI tests
vishnuravi committed Apr 4, 2024
commit ad5196cea216c962f167f69dfb790164e4be38b4
1 change: 0 additions & 1 deletion HealthGPT/Onboarding/LLMSource.swift
Original file line number Diff line number Diff line change
@@ -7,7 +7,6 @@
//

import Foundation
import SpeziLLMOpenAI


enum LLMSource: String, CaseIterable, Identifiable, Codable {
10 changes: 9 additions & 1 deletion HealthGPTUITests/OnboardingUITests.swift
Original file line number Diff line number Diff line change
@@ -73,8 +73,16 @@ extension XCUIApplication {

let picker = pickers["llmSourcePicker"]
let optionToSelect = picker.pickerWheels.element(boundBy: 0)
optionToSelect.adjust(toPickerWheelValue: "Open AI LLM")
optionToSelect.adjust(toPickerWheelValue: "On-device LLM")

XCTAssertTrue(buttons["Save Choice"].waitForExistence(timeout: 5))
buttons["Save Choice"].tap()

XCTAssertTrue(staticTexts["LLM Download"].waitForExistence(timeout: 5))
XCTAssertTrue(buttons["Back"].waitForExistence(timeout: 2))
buttons["Back"].tap()

optionToSelect.adjust(toPickerWheelValue: "Open AI LLM")
XCTAssertTrue(buttons["Save Choice"].waitForExistence(timeout: 5))
buttons["Save Choice"].tap()
}

Unchanged files with check annotations Beta

if FeatureFlags.mockMode {
await healthDataInterpreter.prepareLLM(with: LLMMockSchema())
} else if FeatureFlags.localLLM || llmSource == .local {
await healthDataInterpreter.prepareLLM(with: LLMLocalSchema(modelPath: .cachesDirectory.appending(path: "llm.gguf")))

Check warning on line 74 in HealthGPT/HealthGPT/HealthGPTView.swift

Codecov / codecov/patch

HealthGPT/HealthGPT/HealthGPTView.swift#L74

Added line #L74 was not covered by tests
} else {
await healthDataInterpreter.prepareLLM(with: LLMOpenAISchema(parameters: .init(modelType: openAIModel)))
}
) { model in
Task {
openAIModel = model
await healthDataInterpreter.prepareLLM(with: LLMOpenAISchema(parameters: .init(modelType: model)))

Check warning on line 109 in HealthGPT/HealthGPT/SettingsView.swift

Codecov / codecov/patch

HealthGPT/HealthGPT/SettingsView.swift#L109

Added line #L109 was not covered by tests
path.removeLast()
}
}
llmDownloadUrl: LLMLocalDownloadManager.LLMUrlDefaults.llama2ChatModelUrl,
llmStorageUrl: .cachesDirectory.appending(path: "llm.gguf")
) {
onboardingNavigationPath.nextStep()
}

Check warning on line 24 in HealthGPT/Onboarding/LLMLocalDownload.swift

Codecov / codecov/patch

HealthGPT/Onboarding/LLMLocalDownload.swift#L23-L24

Added lines #L23 - L24 were not covered by tests
}
}
Disclaimer()
if FeatureFlags.localLLM {
LLMLocalDownload()

Check warning on line 27 in HealthGPT/Onboarding/OnboardingFlow.swift

Codecov / codecov/patch

HealthGPT/Onboarding/OnboardingFlow.swift#L27

Added line #L27 was not covered by tests
} else {
LLMSourceSelection()
}