Skip to content
This repository has been archived by the owner on Oct 30, 2024. It is now read-only.

Commit

Permalink
llama-cpp now in generateObject
Browse files Browse the repository at this point in the history
  • Loading branch information
AlephNotation committed Aug 27, 2024
1 parent ba61528 commit b0fef86
Show file tree
Hide file tree
Showing 8 changed files with 276 additions and 164 deletions.
302 changes: 242 additions & 60 deletions npm-shrinkwrap.json

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"@types/inquirer": "^9.0.7",
"@types/lodash": "^4.14.202",
"@types/turndown": "^5.0.4",
"ai": "^3.3.7",
"ai": "^3.3.19",
"chrome-paths": "^1.0.1",
"debug": "^4.3.4",
"dotenv": "^16.4.4",
Expand All @@ -46,7 +46,7 @@
"lodash": "^4.17.21",
"node-llama-cpp": "^2.8.12",
"ollama-ai-provider": "^0.12.0",
"openai": "^4.56.0",
"openai": "^4.56.1",
"out-url": "^1.2.2",
"puppeteer": "^22.0.0",
"puppeteer-extra": "^3.3.6",
Expand Down
6 changes: 2 additions & 4 deletions src/agent/config.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import { createLLMClient } from "llm-polyglot";
import { ollama } from "ollama-ai-provider";

import {
ObjectGeneratorOptions,
DefaultObjectGeneratorOptions,
Expand All @@ -12,13 +9,14 @@ export const MinimumResponseTokens = 200;
export const MaximumResponseTokens = 8_000;

export type ThirdPartyProviders = "openai" | "anthropic";
export type LocalProviders = "ollama";
export type LocalProviders = "ollama" | "local";
export type Providers = ThirdPartyProviders | LocalProviders;

export type ProviderConfig = {
provider: Providers;
apiKey: string;
model: string;
path?: string;
};

export type AgentConfig = {
Expand Down
7 changes: 7 additions & 0 deletions src/agent/generators/generateObject.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { ProviderConfig } from "../config";

import { generateObjectOllama } from "./generateObjectOllama";
import { generateObjectInstructor } from "./generateObjectInstructor";
import { generateObjectLocal } from "./generateObjectLocal";

// import { generateObjectOpenAI } from "./generateObjectOpenAi";

Expand Down Expand Up @@ -38,6 +39,12 @@ export async function generateObject<T extends z.ZodSchema<any>>(
case "ollama": // Ollama has its own function to generate objects because of error handling reasons
response = await generateObjectOllama(config.model, messages, options);
break;
case "local":
if (!config.path) {
throw new Error("Local provider requires a path to the model");
}
response = await generateObjectLocal(config.path, messages, options);
break;
case "openai":
case "anthropic":
response = await generateObjectInstructor(config, messages, options);
Expand Down
45 changes: 0 additions & 45 deletions src/agent/generators/generateObjectProvider.ts

This file was deleted.

1 change: 0 additions & 1 deletion src/agent/messages.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ import { Inventory } from "../inventory";
import { ObjectiveState, StateType } from "../types/browser";
import { Memory } from "../types/memory.types";
import { CoreMessage } from "ai";

/**
* Stringify an array of objects
* @param obj - The array of objects to stringify
Expand Down
24 changes: 23 additions & 1 deletion tests/agent/generators/generateObject.test.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { describe, expect, it } from "@jest/globals";
import { objectiveStateExample1 } from "../../../src/collectiveMemory/examples";
import { ModelResponseSchema } from "../../../src/types/browser/actionStep.types";
import { commandPrompt, generateObject } from "../../../src/agent";
import { commandPrompt, generateObject, getPrompt } from "../../../src/agent";
import { ProviderConfig } from "../../../src/agent/config";
import { z } from "zod";

describe("generateObjectLocal", () => {
it("Should generate an object using the top level function", async () => {
Expand All @@ -23,4 +24,25 @@ describe("generateObjectLocal", () => {

expect(parsedRes.command[0].kind).toBeDefined();
});

it.skip("Should generate an object using the top level function with local provider", async () => {
const ariaTreeExample =
'"[0,"RootWebArea","About - High Dimensional Research",[[1,"link","HIGH DIMENSIONAL RESEARCH"],"⚲",[2,"link","NOLITA↗"],[3,"link","MEMORY INDEX"],[4,"link","BLOG"],[5,"link","DOCUMENTATION"],[6,"link","COMPANY"],[7,"heading","We *connect and empower++ new intelligence. "],"WE WANT THE WORLD...","+===+==========================================+\n| 1 | to have a diversity of models, |\n+---+------------------------------------------+\n| 2 | working as closely together as possible, |\n+---+------------------------------------------+\n| 3 | as productively as possible, |\n+---+------------------------------------------+\n| 4 | for the betterment of mankind. |\n+---+------------------------------------------+","We build software for and between models, with a goal of leveraging and coordinating models for autonomous tasks.","",[8,"heading","### TEAM"],[9,"heading","Gates Torrey"],"finance, operations",[10,"link","gates.torrey@hdr.is"],[11,"heading","Tynan Daly"],"product, back-end",[12,"link","tynan.daly@hdr.is"],[13,"heading","Matilde Park"],"product, front-end",[14,"link","matilde.park@hdr.is"],[15,"link","CAREERS →"],[16,"link","JOIN THE MAILING LIST"],[17,"link","DASHBOARD "],"HIGH DIMENSIONAL RESEARCH",[18,"heading","CORE"],[19,"link","NOLITA"],[20,"link","MEMORY INDEX"],[21,"link","BLOG"],[22,"heading","TECHNICAL"],[23,"link","MEMORY API"],[24,"link","GITHUB"],[25,"heading","COMPANY"],[26,"link","PEOPLE"],[27,"link","CAREERS"],[28,"link","TWITTER"],[29,"heading","SUPPORT"],[30,"link","DISCORD"],"SUPPORT@HDR.IS",[31,"link","PRIVACY POLICY"],[32,"link","TERMS OF SERVICE"]]]"';
const messages = getPrompt(ariaTreeExample);
const providerConfig: ProviderConfig = {
provider: "local",
model: "null",
apiKey: "null",
path: "/Users/tynandaly/basin/hdr/browser/capybarahermes-2.5-mistral-7b.Q2_K.gguf",
};

const res = await generateObject(providerConfig, messages, {
schema: z.object({ emails: z.array(z.string()) }),
name: "ModelResponseSchema",
model: "gpt-4-turbo",
objectMode: "TOOLS",
});

expect(res.emails).toContain("matilde.park@hdr.is");
});
});
51 changes: 0 additions & 51 deletions tests/agent/generators/generateObjectProvider.test.ts

This file was deleted.

0 comments on commit b0fef86

Please sign in to comment.