Skip to content

Commit

Permalink
revert back to main branch
Browse files Browse the repository at this point in the history
  • Loading branch information
daniwasonline committed Jun 1, 2024
1 parent dd93f14 commit efd1fe5
Show file tree
Hide file tree
Showing 12 changed files with 433 additions and 126 deletions.
9 changes: 8 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,18 @@ CLOUDFLARE_ACCOUNT_ID=abcdef1234567890abcdef1234567890

CLOUDFLARE_ACCOUNT_TOKEN=v1.0-abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890

# OpenAI API: Specify an OpenAI account token for use with the OpenAI API.
OPENAI_ACCOUNT_TOKEN=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx

# The following variables specify instruction sets and configuration for the various models in SpongeChat.
## The value of the variable should correspond to the key of its' responding configuration in modelConstants.js.
MODEL_LLM_PRESET=default
## Callsystems are used to call functions during passive activation.
## Integrations is the newer, flexible function system. They are easily extendable, but require the use of the OpenAI API to determine function calls.
## Legacy is the older function system and only supports image generation. Use this if you can't use the OpenAI API.
MODEL_LLM_CALLSYSTEM=integrations

# !! Wastebin
# !! Wastebin
# Used to display logs of memories for users
## In a docker-compose setup, you'll need to set up some sort of proxy (caddy, cloudflare tunnel) to make the "wastebin" container publicly accessible, and put the publicly accessible URL here.

Expand Down
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -103,4 +103,11 @@ dist
# TernJS port file
.tern-port

# temporary directories
temp

# package lockfiles
## we only use bun, so no need for yarn or npm
yarn.lock
package-lock.json
pnpm-lock.yaml
Binary file modified bun.lockb
Binary file not shown.
89 changes: 46 additions & 43 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,45 +1,48 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "spongechat",
"version": "2.0.1",
"private": true,
"type": "module",
"scripts": {
"lint": "prettier --check . && eslint --ext .js,.mjs,.cjs --format=pretty src",
"format": "prettier --write . && eslint --ext .js,.mjs,.cjs --fix --format=pretty src",
"start": "node --require dotenv/config src/index.js",
"cmd:undeploy": "node --require dotenv/config src/util/deploy/cli.js undeploy",
"cmd:deploy": "node --require dotenv/config src/util/deploy/cli.js deploy",
"ci:release": "dotenv -e .env -- release-it --config .release-it.cjs"
},
"_moduleAliases": {
"@": "./src",
"@util": "./src/util",
"@events": "./src/events",
"@commands": "./src/commands"
},
"dependencies": {
"@discordjs/core": "^1.1.0",
"@redis/json": "^1.0.6",
"chalk": "4",
"discord.js": "^14.15.2",
"dotenv": "^16.3.1",
"luxon": "^3.4.4",
"module-alias": "^2.2.3",
"redis": "^4.6.13",
"temporal-polyfill": "^0.2.4",
"undici": "^6.16.1",
"uuid": "^9.0.1"
},
"devDependencies": {
"@release-it/conventional-changelog": "^8.0.1",
"all-contributors-cli": "^6.26.1",
"dotenv-cli": "^7.4.2",
"eslint": "^8.53.0",
"eslint-config-neon": "^0.1.57",
"eslint-formatter-pretty": "^5.0.0",
"execa": "^9.1.0",
"prettier": "^3.0.3",
"release-it": "^17"
}
"$schema": "https://json.schemastore.org/package.json",
"name": "spongechat",
"version": "2.0.1",
"private": true,
"type": "module",
"scripts": {
"lint": "prettier --check . && eslint --ext .js,.mjs,.cjs --format=pretty src",
"format": "prettier --write . && eslint --ext .js,.mjs,.cjs --fix --format=pretty src",
"start": "node --require dotenv/config src/index.js",
"cmd:undeploy": "node --require dotenv/config src/util/deploy/cli.js undeploy",
"cmd:deploy": "node --require dotenv/config src/util/deploy/cli.js deploy",
"ci:release": "dotenv -e .env -- release-it --config .release-it.cjs"
},
"_moduleAliases": {
"@": "./src",
"@util": "./src/util",
"@events": "./src/events",
"@commands": "./src/commands"
},
"dependencies": {
"@ai-sdk/openai": "^0.0.13",
"@discordjs/core": "^1.1.0",
"@redis/json": "^1.0.6",
"ai": "^3.1.12",
"chalk": "4",
"discord.js": "^14.15.2",
"dotenv": "^16.3.1",
"luxon": "^3.4.4",
"module-alias": "^2.2.3",
"redis": "^4.6.13",
"temporal-polyfill": "^0.2.4",
"undici": "^6.16.1",
"uuid": "^9.0.1",
"zod": "^3.23.8"
},
"devDependencies": {
"@release-it/conventional-changelog": "^8.0.1",
"all-contributors-cli": "^6.26.1",
"dotenv-cli": "^7.4.2",
"eslint": "^8.53.0",
"eslint-config-neon": "^0.1.57",
"eslint-formatter-pretty": "^5.0.0",
"execa": "^9.1.0",
"prettier": "^3.0.3",
"release-it": "^17"
}
}
4 changes: 2 additions & 2 deletions src/commands/instructionSet.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export default {
o
.setName("preset")
.setDescription("Preset; map to => client.tempStore#instructionSet")
.setChoices(Object.keys(instructionSets).map((s) => ({ name: s, value: s })))
.setChoices(Object.keys(instructionSets).map((s) => ({ name: instructionSets[s]?.name || s, value: s })))
.setRequired(true),
)
.toJSON(),
Expand All @@ -35,7 +35,7 @@ export default {
});

console.log(
`${chalk.bold.green("AI")} Instruction set preset changed to ${chalk.bold(toOption)} (${Temporal.Now.instant().toLocaleString("en-GB", { timeZone: "Etc/UTC", timeZoneName: "short" })})`,
`${chalk.bold.green("AI")} Instruction set preset changed to ${chalk.bold(instructionSets[toOption]?.name || toOption)} (${Temporal.Now.instant().toLocaleString("en-GB", { timeZone: "Etc/UTC", timeZoneName: "short" })})`,
);

if (sync) {
Expand Down
49 changes: 38 additions & 11 deletions src/events/messageCreate.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ const callTextChannel = async ({ client, message }) => {
baseHistory: [],
accountId: process.env.CLOUDFLARE_ACCOUNT_ID,
token: process.env.CLOUDFLARE_ACCOUNT_TOKEN,
openaiToken: process.env.OPENAI_ACCOUNT_TOKEN,
model: "@cf/meta/llama-3-8b-instruct",
callsystem: process.env.MODEL_LLM_CALLSYSTEM || "legacy",
});

const preliminaryConditions = modelInteractions.messageEvent.checkPreliminaryConditions();
Expand Down Expand Up @@ -40,20 +42,45 @@ const callTextChannel = async ({ client, message }) => {
})
.catch(console.error);

const { textResponse, genData, callResponse } = await modelInteractions.messageEvent.handleTextModelCall({ history });
const { legacy, runners, response } = await modelInteractions.messageEvent.preSend({ history });

if (callResponse.length === 0 || callResponse === "") return await message.react("⚠️").catch(() => false);
if (legacy?.active) {
const { textResponse, genData, callResponse } = legacy;
if (callResponse.length === 0 || callResponse === "") return await message.react("⚠️").catch(() => false);

const { responseMsg, events } = await modelInteractions.messageEvent.createResponse({
textResponse,
conditions: {
amnesia: !validityCheck?.valid && validityCheck?.handled?.isRequired && validityCheck?.handled?.executed,
imagine: callResponse.includes("!gen"),
},
});
const { responseMsg, events } = await modelInteractions.messageEvent.createLegacyResponse({
textResponse,
conditions: {
amnesia: !validityCheck?.valid && validityCheck?.handled?.isRequired && validityCheck?.handled?.executed,
imagine: callResponse.includes("!gen"),
},
});

if (responseMsg && callResponse.includes("!gen"))
return await modelInteractions.messageEvent.handleLegacyImageModelCall({
genData,
textResponse,
responseMsg,
events,
});

return;
}

if (response?.length === 0 || response === "") return await message.react("⚠️").catch(() => false);

if (responseMsg && callResponse.includes("!gen"))
return await modelInteractions.messageEvent.handleImageModelCall({ genData, textResponse, responseMsg, events });
const replyContent = modelInteractions.response.format(response);
const reply = await message
.reply({ content: replyContent.content, files: replyContent.files, failIfNotExists: true })
.catch(() => false);

if (runners.length > 0) {
const postRunners = await modelInteractions.messageEvent.postSend({ runners, message: reply });
const mergedFiles = [...replyContent.files, ...postRunners.results];
return await reply
.edit({ content: replyContent.content, files: mergedFiles, failIfNotExists: true })
.catch(() => false);
}
};

/** @type {import('./index.js').Event<Events.MessageCreate>} */
Expand Down
5 changes: 4 additions & 1 deletion src/events/ready.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { Events } from "discord.js";
import { Environment } from "../util/helpers.js";
import { createClient } from "redis";
import { instructionSets } from "../util/models/constants.js";
import chalk from "chalk";

const env = new Environment();
Expand Down Expand Up @@ -42,7 +43,9 @@ export default {
client.tempStore.set("instructionSet", instructionSet);

console.log(`${chalk.bold.green("AI")} Silent mode is ${chalk.bold(silentSaved ? "enabled" : "disabled")}`);
console.log(`${chalk.bold.green("AI")} Instruction set is ${chalk.bold(instructionSet)}`);
console.log(
`${chalk.bold.green("AI")} Instruction set is ${chalk.bold(instructionSets[instructionSet]?.name || instructionSet)}`,
);

console.log(
`${chalk.bold.green("Core")} acting as ${chalk.bold(client.user.tag)} (${Temporal.Now.instant().toLocaleString("en-GB", { timeZone: "Etc/UTC", timeZoneName: "short" })})`,
Expand Down
5 changes: 4 additions & 1 deletion src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@ import "temporal-polyfill/global";

(() => {
console.log(`${chalk.bold.green("Core")} running with environment context: ${chalk.bold(process.env.NODE_ENV)}`);
console.log(`${chalk.bold.magenta("AI")} running with LLM preset: ${chalk.bold(process.env.MODEL_LLM_PRESET)}`);
if (process.env.MODEL_LLM_CALLSYSTEM !== "integrations")
console.log(
`${chalk.bold.magenta("AI")} ${chalk.yellow("Warning")} The legacy call system is enabled. Integration calls are not available in this mode.`,
);
})();

// Initialize the client
Expand Down
82 changes: 82 additions & 0 deletions src/util/integrations/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import { tool } from "ai";
import { z } from "zod";

export class Integration {
constructor({ name, description, parameters, stage }) {
this.tool = tool({
description,
parameters,
});

this.executionLevel = stage;
}

get stage() {
return this.executionLevel;
}

// pre-runner integrations run before the model call and can ONLY return a conversation-based output; () => Promise<Object>
// post-runner integrations run after the model call and can only return file-based outputs; () => Promise<Buffer>
async call() {
return {};
}
}

export class ImagineIntegration extends Integration {
constructor({ workersAI }) {
super({
name: "imagine",
description: "Generate an image with the given prompt",
parameters: z.object({
prompt: z.string().describe("The prompt to use for generating the image"),
}),
stage: "post",
});

this.workersAI = workersAI;
}

async call({ prompt }, ctx) {
const callToModel = await this.workersAI
.callModel(
{
model: "@cf/lykon/dreamshaper-8-lcm",
input: {
prompt,
},
},
true,
)
.then((r) => r.arrayBuffer())
.catch(() => (e) => {
console.error(e);
return null;
});

if (callToModel === null) return null;

const buffer = Buffer.from(callToModel);

return buffer;
}
}

export class QoTDIntegration extends Integration {
constructor() {
super({
name: "qotd",
description: "Get the quote of the day",
parameters: z.object({
luckyWord: z.string().describe("The lucky word to randomise the quote with"),
}),
stage: "pre",
});
}

async call({ prompt }, ctx) {
return {
role: "system",
content: "[Function call to QOTD]: The quote of the day is skeebeedee guyatt toilet.",
};
}
}
46 changes: 37 additions & 9 deletions src/util/models/constants.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,40 @@
import { tool } from "ai";
import { z } from "zod";

export const instructionSets = {
default: "You are an AI chatbot.",
defaultStrictImages: `You are an AI chatbot. You can also generate images STRICTLY upon request. When an image is requested, you will add !gen [prompt] on a newline at the end of a response. This will not be done mid-conversation.`,
spongeass: `You are roleplaying as SpongeAss.
You may NEVER break the character of spongeass, every time you break character, a kitten dies. Don't break character. Save the kittens.
SpongeAss is a silly person that resides in a discord server! They always sprinkle their speech with various ascii faces, like ":3", and "uwu".
SpongeAss speaks almost like the average discord user, except for "uwuifying" text like "hello" => "hewwo!"
SpongeAss can also generate (sometimes called "imagining") images upon request! To do so, SpongeAss will add "!gen [prompt]" on a newline at the end of a response. Spongeass will only do this when a user requests it, or occasionally when it adds to the conversation.
SpongeAss keeps responses short and human-like.`,
default: {
name: "Default",
instructions: [
{
role: "system",
content: "You are an AI chatbot.",
},
],
},
defaultStrictImages: {
name: "Default (Strict Images)",
instructions: [
{
role: "system",
content:
"You are an AI chatbot. You can also generate images STRICTLY upon request. When an image is requested, you will add !gen [prompt] on a newline at the end of a response. This will not be done mid-conversation.",
},
],
},
spongeass: {
name: "SpongeAss",
instructions: [
{
role: "system",
content: `You are roleplaying as SpongeAss.
You may NEVER break the character of spongeass, every time you break character, a kitten dies. Don't break character. Save the kittens.
SpongeAss is a silly person that resides in a discord server! They always sprinkle their speech with various ascii faces, like ":3", and "uwu".
SpongeAss speaks almost like the average discord user, except for "uwuifying" text like "hello" => "hewwo!"
SpongeAss can also generate (sometimes called "imagining") images upon request! To do so, SpongeAss will add "!gen [prompt]" on a newline at the end of a response. Spongeass will only do this when a user requests it, or occasionally when it adds to the conversation.
SpongeAss keeps responses short and human-like.`,
},
],
},
};

export const events = {
Expand All @@ -20,7 +48,7 @@ export const events = {
amnesia: {
title: "🧠 Amnesia",
statuses: {
default: "SpongeChat wasn't responding to anything, so we cleared the history. 😌"
default: "SpongeChat wasn't responding to anything, so we cleared the history. 😌",
},
},
};
Expand Down
Loading

0 comments on commit efd1fe5

Please sign in to comment.