Skip to content

Commit

Permalink
refactor: ReferenceCurrentNote
Browse files Browse the repository at this point in the history
  • Loading branch information
longy2k committed Feb 5, 2024
1 parent 3d33a38 commit 020bf98
Show file tree
Hide file tree
Showing 8 changed files with 99 additions and 96 deletions.
44 changes: 31 additions & 13 deletions src/components/FetchModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@ import { addMessage, addParagraphBreaks } from "./chat/Message";
import { codeBlockCopyButton } from "./chat/Buttons";
import { getPrompt } from "./chat/Prompt";
import { displayLoadingBotMessage } from "./chat/BotMessage";
import { getActiveFileContent, getCurrentNoteContent } from "./editor/ReferenceCurrentNote";

let abortController = new AbortController();

// Fetch OpenAI API Chat
export async function fetchOpenAIAPI(settings: BMOSettings, referenceCurrentNoteContent: string, index: number) {
export async function fetchOpenAIAPI(settings: BMOSettings, index: number) {
const openai = new OpenAI({
apiKey: settings.apiKey,
baseURL: settings.openAIBaseUrl,
Expand Down Expand Up @@ -42,6 +43,9 @@ export async function fetchOpenAIAPI(settings: BMOSettings, referenceCurrentNote
messageContainerEl?.insertBefore(botMessageDiv, messageContainerElDivs[index+1]);
botMessageDiv.scrollIntoView({ behavior: 'smooth', block: 'start' });

await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

try {
const stream = await openai.chat.completions.create({
model: settings.model,
Expand Down Expand Up @@ -122,7 +126,7 @@ export async function fetchOpenAIAPI(settings: BMOSettings, referenceCurrentNote
}

// Fetch OpenAI-Based API
export async function fetchOpenAIBaseAPI(settings: BMOSettings, referenceCurrentNote: string, index: number) {
export async function fetchOpenAIBaseAPI(settings: BMOSettings, index: number) {
const openai = new OpenAI({
apiKey: settings.apiKey,
baseURL: settings.openAIBaseUrl,
Expand All @@ -146,12 +150,15 @@ export async function fetchOpenAIBaseAPI(settings: BMOSettings, referenceCurrent
messageContainerEl?.insertBefore(botMessageDiv, messageContainerElDivs[index+1]);
botMessageDiv.scrollIntoView({ behavior: 'smooth', block: 'start' });

await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

try {
const completion = await openai.chat.completions.create({
model: settings.model,
max_tokens: parseInt(settings.max_tokens),
messages: [
{ role: 'system', content: referenceCurrentNote + settings.system_role + prompt},
{ role: 'system', content: referenceCurrentNoteContent + settings.system_role + prompt},
...messageHistoryAtIndex as ChatCompletionMessageParam[]
],
});
Expand Down Expand Up @@ -193,7 +200,7 @@ export async function fetchOpenAIBaseAPI(settings: BMOSettings, referenceCurrent

// Request response from Ollama
// NOTE: Abort does not work for requestUrl
export async function ollamaFetchData(settings: BMOSettings, referenceCurrentNoteContent: string, index: number) {
export async function ollamaFetchData(settings: BMOSettings, index: number) {
const ollamaRestAPIUrl = settings.ollamaRestAPIUrl;

if (!ollamaRestAPIUrl) {
Expand All @@ -217,6 +224,9 @@ export async function ollamaFetchData(settings: BMOSettings, referenceCurrentNot
messageContainerEl?.insertBefore(botMessageDiv, messageContainerElDivs[index+1]);
botMessageDiv.scrollIntoView({ behavior: 'smooth', block: 'start' });

await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

try {
const response = await requestUrl({
url: ollamaRestAPIUrl + '/api/chat',
Expand All @@ -236,8 +246,6 @@ export async function ollamaFetchData(settings: BMOSettings, referenceCurrentNot
}),
});

// console.log(ollamaParametersOptions(settings));

const message = response.json.message.content;

const messageContainerEl = document.querySelector('#messageContainer');
Expand Down Expand Up @@ -273,7 +281,7 @@ export async function ollamaFetchData(settings: BMOSettings, referenceCurrentNot
}

// Fetch Ollama API via stream
export async function ollamaFetchDataStream(settings: BMOSettings, referenceCurrentNoteContent: string, index: number) {
export async function ollamaFetchDataStream(settings: BMOSettings, index: number) {
const ollamaRestAPIUrl = settings.ollamaRestAPIUrl;

if (!ollamaRestAPIUrl) {
Expand Down Expand Up @@ -305,6 +313,9 @@ export async function ollamaFetchDataStream(settings: BMOSettings, referenceCurr
messageContainerEl?.insertBefore(botMessageDiv, messageContainerElDivs[index+1]);
botMessageDiv.scrollIntoView({ behavior: 'smooth', block: 'start' });

await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

try {
const response = await fetch(url, {
method: 'POST',
Expand All @@ -323,8 +334,6 @@ export async function ollamaFetchDataStream(settings: BMOSettings, referenceCurr
}),
signal: abortController.signal
})

// console.log(ollamaParametersOptions(settings));

if (!response.ok) {
new Notice(`HTTP error! Status: ${response.status}`);
Expand Down Expand Up @@ -409,7 +418,7 @@ export async function ollamaFetchDataStream(settings: BMOSettings, referenceCurr
}

// Request response from openai-based rest api url
export async function openAIRestAPIFetchData(settings: BMOSettings, referenceCurrentNote: string, index: number) {
export async function openAIRestAPIFetchData(settings: BMOSettings, index: number) {
let prompt = await getPrompt(settings);

if (prompt == undefined) {
Expand All @@ -426,6 +435,9 @@ export async function openAIRestAPIFetchData(settings: BMOSettings, referenceCur

messageContainerEl?.insertBefore(botMessageDiv, messageContainerElDivs[index+1]);
botMessageDiv.scrollIntoView({ behavior: 'smooth', block: 'start' });

await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

const urls = [
settings.openAIRestAPIUrl + '/v1/chat/completions',
Expand All @@ -446,7 +458,7 @@ export async function openAIRestAPIFetchData(settings: BMOSettings, referenceCur
body: JSON.stringify({
model: settings.model,
messages: [
{ role: 'system', content: referenceCurrentNote + settings.system_role + prompt},
{ role: 'system', content: referenceCurrentNoteContent + settings.system_role + prompt},
...messageHistoryAtIndex
],
max_tokens: parseInt(settings.max_tokens),
Expand Down Expand Up @@ -497,7 +509,7 @@ export async function openAIRestAPIFetchData(settings: BMOSettings, referenceCur
}

// Fetch Ollama API via stream
export async function openAIRestAPIFetchDataStream(settings: BMOSettings, referenceCurrentNoteContent: string, index: number) {
export async function openAIRestAPIFetchDataStream(settings: BMOSettings, index: number) {
const openAIRestAPIUrl = settings.openAIRestAPIUrl;

if (!openAIRestAPIUrl) {
Expand Down Expand Up @@ -529,6 +541,9 @@ export async function openAIRestAPIFetchDataStream(settings: BMOSettings, refere
messageContainerEl?.insertBefore(botMessageDiv, messageContainerElDivs[index+1]);
botMessageDiv.scrollIntoView({ behavior: 'smooth', block: 'start' });

await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

try {
const response = await fetch(url, {
method: 'POST',
Expand Down Expand Up @@ -641,7 +656,7 @@ export async function openAIRestAPIFetchDataStream(settings: BMOSettings, refere
}

// Request response from Anthropic
export async function requestUrlAnthropicAPI(settings: BMOSettings, referenceCurrentNoteContent: string, index: number) {
export async function requestUrlAnthropicAPI(settings: BMOSettings, index: number) {
const headers = {
'anthropic-version': '2023-06-01',
'content-type': 'application/json',
Expand All @@ -667,6 +682,9 @@ export async function requestUrlAnthropicAPI(settings: BMOSettings, referenceCur
messageContainerEl?.insertBefore(botMessageDiv, messageContainerElDivs[index+1]);
botMessageDiv.scrollIntoView({ behavior: 'smooth', block: 'start' });

await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

const requestBody = {
model: settings.model,
prompt: `\n\nHuman: ${referenceCurrentNoteContent}\n\n${settings.system_role}\n\n${prompt}\n\n${messageHistoryAtIndexString}\n\nAssistant:`,
Expand Down
21 changes: 0 additions & 21 deletions src/components/ReferenceCurrentNoteIndicator.ts

This file was deleted.

32 changes: 16 additions & 16 deletions src/components/chat/Buttons.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { BMOSettings, checkActiveFile } from "src/main";
import { ANTHROPIC_MODELS, OPENAI_MODELS, activeEditor, filenameMessageHistoryJSON, lastCursorPosition, lastCursorPositionFile, messageHistory } from "src/view";
import { fetchOpenAIAPI, fetchOpenAIBaseAPI, ollamaFetchData, ollamaFetchDataStream, requestUrlAnthropicAPI, openAIRestAPIFetchData, openAIRestAPIFetchDataStream } from "../FetchModel";

export function regenerateUserButton(settings: BMOSettings, referenceCurrentNote: string) {
export function regenerateUserButton(settings: BMOSettings) {
const regenerateButton = document.createElement("button");
regenerateButton.textContent = "regenerate";
setIcon(regenerateButton, "refresh-ccw");
Expand Down Expand Up @@ -31,7 +31,7 @@ export function regenerateUserButton(settings: BMOSettings, referenceCurrentNote
deleteMessage(index+1);
if (OPENAI_MODELS.includes(settings.model)) {
try {
await fetchOpenAIAPI(settings, referenceCurrentNote, index);
await fetchOpenAIAPI(settings, index);
}
catch (error) {
new Notice('Error occurred while fetching completion: ' + error.message);
Expand All @@ -40,7 +40,7 @@ export function regenerateUserButton(settings: BMOSettings, referenceCurrentNote
}
else if (settings.openAIBaseModels.includes(settings.model)) {
try {
await fetchOpenAIBaseAPI(settings, referenceCurrentNote, index);
await fetchOpenAIBaseAPI(settings, index);
}
catch (error) {
new Notice('Error occurred while fetching completion: ' + error.message);
Expand All @@ -49,26 +49,26 @@ export function regenerateUserButton(settings: BMOSettings, referenceCurrentNote
}
else if (ANTHROPIC_MODELS.includes(settings.model)) {
try {
await requestUrlAnthropicAPI(settings, referenceCurrentNote, index);
await requestUrlAnthropicAPI(settings, index);
}
catch (error) {
console.error('Error:', error);
}
}
else if (settings.ollamaRestAPIUrl && settings.ollamaModels.includes(settings.model)) {
if (settings.allowOllamaStream) {
await ollamaFetchDataStream(settings, referenceCurrentNote, index);
await ollamaFetchDataStream(settings, index);
}
else {
await ollamaFetchData(settings, referenceCurrentNote, index);
await ollamaFetchData(settings, index);
}
}
else if (settings.openAIRestAPIUrl && settings.openAIRestAPIModels.includes(settings.model)){
if (settings.allowOpenAIRestAPIStream) {
await openAIRestAPIFetchDataStream(settings, referenceCurrentNote, index);
await openAIRestAPIFetchDataStream(settings, index);
}
else {
await openAIRestAPIFetchData(settings, referenceCurrentNote, index);
await openAIRestAPIFetchData(settings, index);
}
}
}
Expand All @@ -79,7 +79,7 @@ export function regenerateUserButton(settings: BMOSettings, referenceCurrentNote
return regenerateButton;
}

export function displayEditButton (settings: BMOSettings, referenceCurrentNoteContent: string, userP: HTMLParagraphElement) {
export function displayEditButton (settings: BMOSettings, userP: HTMLParagraphElement) {
const editButton = document.createElement("button");
editButton.textContent = "edit";
setIcon(editButton, "edit"); // Assuming setIcon is defined elsewhere
Expand Down Expand Up @@ -129,7 +129,7 @@ export function displayEditButton (settings: BMOSettings, referenceCurrentNoteCo
// Fetch OpenAI API
if (OPENAI_MODELS.includes(settings.model)) {
try {
await fetchOpenAIAPI(settings, referenceCurrentNoteContent, index);
await fetchOpenAIAPI(settings, index);
}
catch (error) {
new Notice('Error occurred while fetching completion: ' + error.message);
Expand All @@ -138,7 +138,7 @@ export function displayEditButton (settings: BMOSettings, referenceCurrentNoteCo
}
else if (settings.openAIBaseModels.includes(settings.model)) {
try {
await fetchOpenAIBaseAPI(settings, referenceCurrentNoteContent, index);
await fetchOpenAIBaseAPI(settings, index);
}
catch (error) {
new Notice('Error occurred while fetching completion: ' + error.message);
Expand All @@ -147,26 +147,26 @@ export function displayEditButton (settings: BMOSettings, referenceCurrentNoteCo
}
else if (ANTHROPIC_MODELS.includes(settings.model)) {
try {
await requestUrlAnthropicAPI(settings, referenceCurrentNoteContent, index);
await requestUrlAnthropicAPI(settings, index);
}
catch (error) {
console.error('Error:', error);
}
}
else if (settings.ollamaRestAPIUrl && settings.ollamaModels.includes(settings.model)) {
if (settings.allowOllamaStream) {
await ollamaFetchDataStream(settings, referenceCurrentNoteContent, index);
await ollamaFetchDataStream(settings, index);
}
else {
await ollamaFetchData(settings, referenceCurrentNoteContent, index);
await ollamaFetchData(settings, index);
}
}
else if (settings.openAIRestAPIUrl && settings.openAIRestAPIModels.includes(settings.model)){
if (settings.allowOpenAIRestAPIStream) {
await openAIRestAPIFetchDataStream(settings, referenceCurrentNoteContent, index);
await openAIRestAPIFetchDataStream(settings, index);
}
else {
await openAIRestAPIFetchData(settings, referenceCurrentNoteContent, index);
await openAIRestAPIFetchData(settings, index);
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/components/chat/Prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ export async function getPrompt(settings: BMOSettings) {
// Await the reading of the file and return its content
const content = await app.vault.adapter.read(promptFilePath);
// Remove YAML front matter if present
const cleanedContent = content.replace(/---[\s\S]+?---/, '').trim();
return cleanedContent;
const clearYamlContent = content.replace(/---[\s\S]+?---/, '').trim();
return clearYamlContent;
} catch (error) {
console.error(`Error reading file ${promptFilePath}:`, error);
return null;
Expand Down
6 changes: 3 additions & 3 deletions src/components/chat/UserMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { displayEditButton, displayTrashButton, displayUserCopyButton, regenerat
import { ANTHROPIC_MODELS } from "src/view";
import { marked } from "marked";

export function displayUserMessage(settings: BMOSettings, referenceCurrentNoteContent: string, message: string) {
export function displayUserMessage(settings: BMOSettings, message: string) {
const userMessageDiv = document.createElement("div");
userMessageDiv.className = "userMessage";
userMessageDiv.style.backgroundColor = colorToHex(settings.userMessageBackgroundColor ||
Expand All @@ -21,8 +21,8 @@ export function displayUserMessage(settings: BMOSettings, referenceCurrentNoteCo
userNameSpan.textContent = settings.userName || DEFAULT_SETTINGS.userName;
const userP = document.createElement("p");

const regenerateButton = regenerateUserButton(settings, referenceCurrentNoteContent);
const editButton = displayEditButton(settings, referenceCurrentNoteContent, userP);
const regenerateButton = regenerateUserButton(settings);
const editButton = displayEditButton(settings, userP);
const copyUserButton = displayUserCopyButton(userP);
const trashButton = displayTrashButton();

Expand Down
5 changes: 4 additions & 1 deletion src/components/editor/FetchRenameNoteTitle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@ import { Notice, requestUrl } from "obsidian";
import OpenAI from "openai";
import { BMOSettings } from "src/main";
import { ANTHROPIC_MODELS, OPENAI_MODELS } from "src/view";
import { getActiveFileContent, getCurrentNoteContent } from "./ReferenceCurrentNote";

// Rename note title based on specified model
export async function fetchModelRenameTitle(settings: BMOSettings, referenceCurrentNoteContent: string) {
export async function fetchModelRenameTitle(settings: BMOSettings) {
await getActiveFileContent(settings);
const referenceCurrentNoteContent = getCurrentNoteContent();

const prompt = `You are a title generator. You will give succinct titles that does not contain backslashes,
forward slashes, or colons. Please generate one title as your response.\n\n`;
Expand Down
29 changes: 29 additions & 0 deletions src/components/editor/ReferenceCurrentNote.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { BMOSettings } from "src/main";

let referenceCurrentNoteContent = '';

// Reference Current Note Indicator
export async function getActiveFileContent(settings: BMOSettings) {
const dotElement = document.querySelector('.dotIndicator');
if (settings.allowReferenceCurrentNote === true) {
if (dotElement) {
(dotElement as HTMLElement).style.backgroundColor = '#da2c2c';
referenceCurrentNoteContent = '';
}
const activeFile = app.workspace.getActiveFile();
if (activeFile?.extension === 'md') {
if (dotElement) {
(dotElement as HTMLElement).style.backgroundColor = 'green';
}
const content = await app.vault.read(activeFile);
const clearYamlContent = content.replace(/---[\s\S]+?---/, '').trim();
referenceCurrentNoteContent = 'Reference Note:' +
'\n\n' + clearYamlContent + '\n\n';
}
}
return referenceCurrentNoteContent;
}

export function getCurrentNoteContent() {
return referenceCurrentNoteContent;
}
Loading

0 comments on commit 020bf98

Please sign in to comment.