Skip to content

Commit

Permalink
Bring the vulkan backend to the GUI.
Browse files Browse the repository at this point in the history
  • Loading branch information
manyoso committed Sep 13, 2023
1 parent f0735ef commit 8f99dca
Show file tree
Hide file tree
Showing 6 changed files with 133 additions and 17 deletions.
2 changes: 1 addition & 1 deletion gpt4all-backend/llama.cpp-mainline
34 changes: 34 additions & 0 deletions gpt4all-chat/chatllm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ ChatLLM::ChatLLM(Chat *parent, bool isServer)
connect(parent, &Chat::idChanged, this, &ChatLLM::handleChatIdChanged);
connect(&m_llmThread, &QThread::started, this, &ChatLLM::handleThreadStarted);
connect(MySettings::globalInstance(), &MySettings::forceMetalChanged, this, &ChatLLM::handleForceMetalChanged);
connect(MySettings::globalInstance(), &MySettings::deviceChanged, this, &ChatLLM::handleDeviceChanged);

// The following are blocking operations and will block the llm thread
connect(this, &ChatLLM::requestRetrieveFromDB, LocalDocs::globalInstance()->database(), &Database::retrieveFromDB,
Expand Down Expand Up @@ -124,6 +125,16 @@ void ChatLLM::handleForceMetalChanged(bool forceMetal)
#endif
}

void ChatLLM::handleDeviceChanged()
{
if (isModelLoaded() && m_shouldBeLoaded) {
m_reloadingToChangeVariant = true;
unloadModel();
reloadModel();
m_reloadingToChangeVariant = false;
}
}

bool ChatLLM::loadDefaultModel()
{
ModelInfo defaultModel = ModelList::globalInstance()->defaultModelInfo();
Expand Down Expand Up @@ -250,7 +261,30 @@ bool ChatLLM::loadModel(const ModelInfo &modelInfo)
#endif

if (m_llModelInfo.model) {
// Update the settings that a model is being loaded and update the device list
MySettings::globalInstance()->setAttemptModelLoad(filePath);
std::vector<LLModel::GPUDevice> devices = m_llModelInfo.model->availableGPUDevices(0);
QVector<QString> deviceList{ "Auto" };
for (LLModel::GPUDevice &d : devices)
deviceList << QString::fromStdString(d.name);
deviceList << "CPU";
MySettings::globalInstance()->setDeviceList(deviceList);

// Pick the best match for the device
const QString requestedDevice = MySettings::globalInstance()->device();
if (requestedDevice != "CPU") {
const size_t requiredMemory = m_llModelInfo.model->requiredMem(filePath.toStdString());
std::vector<LLModel::GPUDevice> availableDevices = m_llModelInfo.model->availableGPUDevices(requiredMemory);
if (!availableDevices.empty() && requestedDevice == "Auto") {
m_llModelInfo.model->initializeGPUDevice(devices.front());
} else {
for (LLModel::GPUDevice &d : availableDevices) {
if (QString::fromStdString(d.name) == requestedDevice)
m_llModelInfo.model->initializeGPUDevice(d);
}
}
}

bool success = m_llModelInfo.model->loadModel(filePath.toStdString());
MySettings::globalInstance()->setAttemptModelLoad(QString());
if (!success) {
Expand Down
1 change: 1 addition & 0 deletions gpt4all-chat/chatllm.h
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ public Q_SLOTS:
void handleShouldBeLoadedChanged();
void handleThreadStarted();
void handleForceMetalChanged(bool forceMetal);
void handleDeviceChanged();
void processSystemPrompt();

Q_SIGNALS:
Expand Down
37 changes: 35 additions & 2 deletions gpt4all-chat/mysettings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ static bool default_localDocsShowReferences = true;
static QString default_networkAttribution = "";
static bool default_networkIsActive = false;
static bool default_networkUsageStatsActive = false;
static QString default_device = "Auto";

static QString defaultLocalModelsPath()
{
Expand Down Expand Up @@ -64,6 +65,17 @@ MySettings::MySettings()
QSettings::setDefaultFormat(QSettings::IniFormat);
}

Q_INVOKABLE QVector<QString> MySettings::deviceList() const
{
return m_deviceList;
}

void MySettings::setDeviceList(const QVector<QString> &deviceList)
{
m_deviceList = deviceList;
emit deviceListChanged();
}

void MySettings::restoreModelDefaults(const ModelInfo &model)
{
setModelTemperature(model, model.m_temperature);
Expand All @@ -79,6 +91,9 @@ void MySettings::restoreModelDefaults(const ModelInfo &model)

void MySettings::restoreApplicationDefaults()
{
setChatTheme(default_chatTheme);
setFontSize(default_fontSize);
setDevice(default_device);
setThreadCount(default_threadCount);
setSaveChats(default_saveChats);
setSaveChatGPTChats(default_saveChatGPTChats);
Expand Down Expand Up @@ -485,7 +500,7 @@ QString MySettings::chatTheme() const

void MySettings::setChatTheme(const QString &u)
{
if(chatTheme() == u)
if (chatTheme() == u)
return;

QSettings setting;
Expand All @@ -503,7 +518,7 @@ QString MySettings::fontSize() const

void MySettings::setFontSize(const QString &u)
{
if(fontSize() == u)
if (fontSize() == u)
return;

QSettings setting;
Expand All @@ -512,6 +527,24 @@ void MySettings::setFontSize(const QString &u)
emit fontSizeChanged();
}

QString MySettings::device() const
{
QSettings setting;
setting.sync();
return setting.value("device", default_device).toString();
}

void MySettings::setDevice(const QString &u)
{
if (device() == u)
return;

QSettings setting;
setting.setValue("device", u);
setting.sync();
emit deviceChanged();
}

bool MySettings::forceMetal() const
{
return m_forceMetal;
Expand Down
10 changes: 10 additions & 0 deletions gpt4all-chat/mysettings.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ class MySettings : public QObject
Q_PROPERTY(QString networkAttribution READ networkAttribution WRITE setNetworkAttribution NOTIFY networkAttributionChanged)
Q_PROPERTY(bool networkIsActive READ networkIsActive WRITE setNetworkIsActive NOTIFY networkIsActiveChanged)
Q_PROPERTY(bool networkUsageStatsActive READ networkUsageStatsActive WRITE setNetworkUsageStatsActive NOTIFY networkUsageStatsActiveChanged)
Q_PROPERTY(QString device READ device WRITE setDevice NOTIFY deviceChanged)
Q_PROPERTY(QVector<QString> deviceList READ deviceList NOTIFY deviceListChanged)

public:
static MySettings *globalInstance();
Expand Down Expand Up @@ -78,6 +80,8 @@ class MySettings : public QObject
void setFontSize(const QString &u);
bool forceMetal() const;
void setForceMetal(bool b);
QString device() const;
void setDevice(const QString &u);

// Release/Download settings
QString lastVersionStarted() const;
Expand All @@ -102,6 +106,9 @@ class MySettings : public QObject
QString attemptModelLoad() const;
void setAttemptModelLoad(const QString &modelFile);

QVector<QString> deviceList() const;
void setDeviceList(const QVector<QString> &deviceList);

Q_SIGNALS:
void nameChanged(const ModelInfo &model);
void filenameChanged(const ModelInfo &model);
Expand Down Expand Up @@ -131,9 +138,12 @@ class MySettings : public QObject
void networkIsActiveChanged();
void networkUsageStatsActiveChanged();
void attemptModelLoadChanged();
void deviceChanged();
void deviceListChanged();

private:
bool m_forceMetal;
QVector<QString> m_deviceList;

private:
explicit MySettings();
Expand Down
66 changes: 52 additions & 14 deletions gpt4all-chat/qml/ApplicationSettings.qml
Original file line number Diff line number Diff line change
Expand Up @@ -88,17 +88,55 @@ MySettingsTab {
MySettings.fontSize = fontBox.currentText
}
}
Label {
id: deviceLabel
text: qsTr("Device:")
color: theme.textColor
font.pixelSize: theme.fontSizeLarge
Layout.row: 3
Layout.column: 0
}
MyComboBox {
id: deviceBox
Layout.row: 3
Layout.column: 1
Layout.columnSpan: 1
Layout.minimumWidth: 350
Layout.fillWidth: false
model: MySettings.deviceList
Accessible.role: Accessible.ComboBox
Accessible.name: qsTr("ComboBox for displaying/picking the device")
Accessible.description: qsTr("Use this for picking the device of the chat client")
function updateModel() {
deviceBox.currentIndex = deviceBox.indexOfValue(MySettings.device);
}
Component.onCompleted: {
deviceBox.updateModel()
}
Connections {
target: MySettings
function onDeviceChanged() {
deviceBox.updateModel()
}
function onDeviceListChanged() {
deviceBox.updateModel()
}
}
onActivated: {
MySettings.device = deviceBox.currentText
}
}
Label {
id: defaultModelLabel
text: qsTr("Default model:")
color: theme.textColor
font.pixelSize: theme.fontSizeLarge
Layout.row: 3
Layout.row: 4
Layout.column: 0
}
MyComboBox {
id: comboBox
Layout.row: 3
Layout.row: 4
Layout.column: 1
Layout.columnSpan: 2
Layout.minimumWidth: 350
Expand Down Expand Up @@ -128,15 +166,15 @@ MySettingsTab {
text: qsTr("Download path:")
color: theme.textColor
font.pixelSize: theme.fontSizeLarge
Layout.row: 4
Layout.row: 5
Layout.column: 0
}
MyDirectoryField {
id: modelPathDisplayField
text: MySettings.modelPath
font.pixelSize: theme.fontSizeLarge
implicitWidth: 300
Layout.row: 4
Layout.row: 5
Layout.column: 1
Layout.fillWidth: true
ToolTip.text: qsTr("Path where model files will be downloaded to")
Expand All @@ -153,7 +191,7 @@ MySettingsTab {
}
}
MyButton {
Layout.row: 4
Layout.row: 5
Layout.column: 2
text: qsTr("Browse")
Accessible.description: qsTr("Opens a folder picker dialog to choose where to save model files")
Expand All @@ -168,7 +206,7 @@ MySettingsTab {
text: qsTr("CPU Threads:")
color: theme.textColor
font.pixelSize: theme.fontSizeLarge
Layout.row: 5
Layout.row: 6
Layout.column: 0
}
MyTextField {
Expand All @@ -177,7 +215,7 @@ MySettingsTab {
font.pixelSize: theme.fontSizeLarge
ToolTip.text: qsTr("Amount of processing threads to use bounded by 1 and number of logical processors")
ToolTip.visible: hovered
Layout.row: 5
Layout.row: 6
Layout.column: 1
validator: IntValidator {
bottom: 1
Expand All @@ -200,12 +238,12 @@ MySettingsTab {
text: qsTr("Save chats to disk:")
color: theme.textColor
font.pixelSize: theme.fontSizeLarge
Layout.row: 6
Layout.row: 7
Layout.column: 0
}
MyCheckBox {
id: saveChatsBox
Layout.row: 6
Layout.row: 7
Layout.column: 1
checked: MySettings.saveChats
onClicked: {
Expand All @@ -220,12 +258,12 @@ MySettingsTab {
text: qsTr("Save ChatGPT chats to disk:")
color: theme.textColor
font.pixelSize: theme.fontSizeLarge
Layout.row: 7
Layout.row: 8
Layout.column: 0
}
MyCheckBox {
id: saveChatGPTChatsBox
Layout.row: 7
Layout.row: 8
Layout.column: 1
checked: MySettings.saveChatGPTChats
onClicked: {
Expand All @@ -237,12 +275,12 @@ MySettingsTab {
text: qsTr("Enable API server:")
color: theme.textColor
font.pixelSize: theme.fontSizeLarge
Layout.row: 8
Layout.row: 9
Layout.column: 0
}
MyCheckBox {
id: serverChatBox
Layout.row: 8
Layout.row: 9
Layout.column: 1
checked: MySettings.serverChat
onClicked: {
Expand All @@ -252,7 +290,7 @@ MySettingsTab {
ToolTip.visible: hovered
}
Rectangle {
Layout.row: 9
Layout.row: 10
Layout.column: 0
Layout.columnSpan: 3
Layout.fillWidth: true
Expand Down

0 comments on commit 8f99dca

Please sign in to comment.