Skip to content

Commit

Permalink
chat: set search path early
Browse files Browse the repository at this point in the history
This fixes the issues with installed versions of v2.6.0.
  • Loading branch information
cebtenzzre committed Jan 11, 2024
1 parent f7aeeca commit 7e9786f
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 27 deletions.
6 changes: 3 additions & 3 deletions gpt4all-backend/llmodel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ LLModel *LLModel::Implementation::construct(const std::string &modelPath, std::s
return fres;
}

LLModel *LLModel::Implementation::constructCpuLlama() {
LLModel *LLModel::Implementation::constructDefaultLlama() {
const LLModel::Implementation *impl = nullptr;
for (const auto &i : implementationList()) {
if (i.m_buildVariant == "metal" || i.m_modelType != "LLaMA") continue;
Expand All @@ -208,8 +208,8 @@ LLModel *LLModel::Implementation::constructCpuLlama() {
}

std::vector<LLModel::GPUDevice> LLModel::Implementation::availableGPUDevices() {
static LLModel *cpuLlama = LLModel::Implementation::constructCpuLlama(); // (memory leak)
if (cpuLlama) { return cpuLlama->availableGPUDevices(0); }
static LLModel *llama = LLModel::Implementation::constructDefaultLlama(); // (memory leak)
if (llama) { return llama->availableGPUDevices(0); }
return {};
}

Expand Down
2 changes: 1 addition & 1 deletion gpt4all-backend/llmodel.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ class LLModel {
static const std::string& implementationsSearchPath();

private:
static LLModel *constructCpuLlama();
static LLModel *constructDefaultLlama();

bool (*m_magicMatch)(const char *fname);
LLModel *(*m_construct)();
Expand Down
23 changes: 4 additions & 19 deletions gpt4all-chat/llm.cpp
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
#include "llm.h"
#include "../gpt4all-backend/sysinfo.h"
#include "../gpt4all-backend/llmodel.h"
#include "network.h"

This comment has been minimized.

Copy link
@Rotonen

Rotonen Jan 11, 2024

Dropping this include breaks the build.

This comment has been minimized.

Copy link
@draeath

draeath Jan 18, 2024

I concur, I have to add `#include "network.h" manually in order to build this locally.

This comment has been minimized.

Copy link
@cebtenzzre

cebtenzzre Jan 18, 2024

Author Member

This was fixed in b803d51. I didn't notice that this was needed because I was building the offline installer.


#include <QCoreApplication>
#include <QDesktopServices>
#include <QDir>
#include <QFile>
#include <QProcess>
#include <QResource>
#include <QSettings>
#include <QDesktopServices>
#include <QUrl>
#include <fstream>

class MyLLM: public LLM { };
Expand All @@ -23,20 +22,6 @@ LLM::LLM()
: QObject{nullptr}
, m_compatHardware(true)
{
QString llmodelSearchPaths = QCoreApplication::applicationDirPath();
const QString libDir = QCoreApplication::applicationDirPath() + "/../lib/";
if (directoryExists(libDir))
llmodelSearchPaths += ";" + libDir;
#if defined(Q_OS_MAC)
const QString binDir = QCoreApplication::applicationDirPath() + "/../../../";
if (directoryExists(binDir))
llmodelSearchPaths += ";" + binDir;
const QString frameworksDir = QCoreApplication::applicationDirPath() + "/../Frameworks/";
if (directoryExists(frameworksDir))
llmodelSearchPaths += ";" + frameworksDir;
#endif
LLModel::Implementation::setImplementationsSearchPath(llmodelSearchPaths.toStdString());

#if defined(__x86_64__)
#ifndef _MSC_VER
const bool minimal(__builtin_cpu_supports("avx"));
Expand Down Expand Up @@ -86,15 +71,15 @@ bool LLM::checkForUpdates() const
#endif
}

bool LLM::directoryExists(const QString &path) const
bool LLM::directoryExists(const QString &path)
{
const QUrl url(path);
const QString localFilePath = url.isLocalFile() ? url.toLocalFile() : path;
const QFileInfo info(localFilePath);
return info.exists() && info.isDir();
}

bool LLM::fileExists(const QString &path) const
bool LLM::fileExists(const QString &path)
{
const QUrl url(path);
const QString localFilePath = url.isLocalFile() ? url.toLocalFile() : path;
Expand Down
4 changes: 2 additions & 2 deletions gpt4all-chat/llm.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ class LLM : public QObject
Q_INVOKABLE bool compatHardware() const { return m_compatHardware; }

Q_INVOKABLE bool checkForUpdates() const;
Q_INVOKABLE bool directoryExists(const QString &path) const;
Q_INVOKABLE bool fileExists(const QString &path) const;
Q_INVOKABLE static bool directoryExists(const QString &path);
Q_INVOKABLE static bool fileExists(const QString &path);
Q_INVOKABLE qint64 systemTotalRAMInGB() const;
Q_INVOKABLE QString systemTotalRAMInGBString() const;

Expand Down
19 changes: 17 additions & 2 deletions gpt4all-chat/main.cpp
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
#include <QDirIterator>
#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include <QQmlContext>

#include <QDirIterator>

#include "llm.h"
#include "modellist.h"
#include "chatlistmodel.h"
Expand All @@ -13,6 +12,7 @@
#include "mysettings.h"
#include "config.h"
#include "logger.h"
#include "../gpt4all-backend/llmodel.h"

int main(int argc, char *argv[])
{
Expand All @@ -25,6 +25,21 @@ int main(int argc, char *argv[])

QGuiApplication app(argc, argv);
QQmlApplicationEngine engine;

QString llmodelSearchPaths = QCoreApplication::applicationDirPath();
const QString libDir = QCoreApplication::applicationDirPath() + "/../lib/";
if (LLM::directoryExists(libDir))
llmodelSearchPaths += ";" + libDir;
#if defined(Q_OS_MAC)
const QString binDir = QCoreApplication::applicationDirPath() + "/../../../";
if (LLM::directoryExists(binDir))
llmodelSearchPaths += ";" + binDir;
const QString frameworksDir = QCoreApplication::applicationDirPath() + "/../Frameworks/";
if (LLM::directoryExists(frameworksDir))
llmodelSearchPaths += ";" + frameworksDir;
#endif
LLModel::Implementation::setImplementationsSearchPath(llmodelSearchPaths.toStdString());

qmlRegisterSingletonInstance("mysettings", 1, 0, "MySettings", MySettings::globalInstance());
qmlRegisterSingletonInstance("modellist", 1, 0, "ModelList", ModelList::globalInstance());
qmlRegisterSingletonInstance("chatlistmodel", 1, 0, "ChatListModel", ChatListModel::globalInstance());
Expand Down

0 comments on commit 7e9786f

Please sign in to comment.