Skip to content

Commit

Permalink
feat: summary before doc query for better context
Browse files Browse the repository at this point in the history
  • Loading branch information
glorat committed Aug 29, 2023
1 parent 976d9ba commit d20690e
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 13 deletions.
21 changes: 13 additions & 8 deletions src/lib/ai/openaiWrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,21 +76,26 @@ const completionConfig = {
}

export async function answerMeDirect(arg: {context: string, userPrompt: string, initPrompt?:string}): Promise<string> {
const defaultPrompt = "Answer the question as truthfully as possible using the provided text, and if the answer is not contained within the text below, say \"I don't know\"\n\n"
// const defaultPrompt = "Answer the question as truthfully as possible using the provided text, and if the answer is not contained within the text below, say \"I don't know\"\n\n"
const defaultPrompt = `Use the following pieces of context to answer the users question.
If you don't know the answer, just say that you don't know, don't try to make up an answer.
----------------
`
const initPrompt = arg.initPrompt ?? defaultPrompt
const {context, userPrompt} = arg
const prompt = initPrompt
+ 'Context:\n' + context + '\n\n'
+ 'Q: ' + userPrompt + '\nA: ';

logger.debug(prompt)
const {context, userPrompt} = arg
// const prompt = initPrompt
// + 'Context:\n' + context + '\n\n'
// + 'Q: ' + userPrompt + '\nA: ';
//
// logger.debug(prompt)

const response = await callWithRetry(() => getOpenAIAPI(Config.chatModel).createChatCompletion({
...completionConfig,
model: Config.chatModel,
messages:[
{role: 'system', content: context},
{role: 'user', content: initPrompt},
{role: 'system', content: initPrompt + context},
// {role: 'user', content: context},
{role: 'user', content: userPrompt}
],
// user: TODO: for tracking purposes
Expand Down
7 changes: 5 additions & 2 deletions src/pages/DocumentQuery.vue
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@

<script setup lang="ts">
import {ref, nextTick, onMounted, Ref, computed} from 'vue'
import {performQna2} from 'src/lib/ai/answer'
import {performQna2, performQna3, performSummarisation} from 'src/lib/ai/answer'
import {createVectorStoreFromLargeContent} from 'src/lib/ai/largeDocQna'
import {exportFile, Notify} from 'quasar'
import {matCloudUpload} from '@quasar/extras/material-icons'
Expand Down Expand Up @@ -85,10 +85,13 @@ async function doit() {
const vectorStore = await createVectorStoreFromLargeContent(text.value, (p)=>embedProgress.value=p)
const summary = await performSummarisation(text.value)
console.log(`SUMMARY ${summary}`)
let idx = 0
for (const question of questionStore.questions) {
console.log(`QUESTION ${idx}: ${question}`)
const response = await performQna2(question, vectorStore)
const response = await performQna3(question, summary, vectorStore)
answers.value[idx] = response ?? 'cannot answer'
answerLoading.value[idx] = false
console.log(`ANSWER ${idx}: ${response}`)
Expand Down
4 changes: 2 additions & 2 deletions src/pages/MultiFilePage.vue
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import MultiFileManager from 'components/MultiFileManager.vue'
import QuestionInputs from 'components/QuestionInputs.vue'
import {computed, ref} from 'vue'
import {performQna2} from 'src/lib/ai/answer'
import {performQna2, performQna3, performSummarisation} from 'src/lib/ai/answer'
import {exportFile, Notify} from 'quasar'
import {useQuestionStore} from 'stores/questionStore'
import {useMultiFileStore} from 'stores/multiFileStore'
Expand All @@ -34,7 +34,7 @@ async function doit() {
for (const [fileIdx, file] of multiFileStore.documentInfo.entries()) {
console.log(`QUESTION ${idx}: ${question}`)
const vectorStore = useMultiFileStore().vectorStore
const response = await performQna2(question, vectorStore, d=>d.metadata['name'] === file.file.name)
const response = await performQna3(question, file.summary, vectorStore, d=>d.metadata['name'] === file.file.name)
answers[idx][fileIdx] = response ?? 'cannot answer'
console.log(`ANSWER ${idx}: ${response}`)
console.log()
Expand Down
10 changes: 9 additions & 1 deletion src/stores/multiFileStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,15 @@ import {getLangchainConfig} from 'src/lib/ai/config';
import {embedsCache} from 'src/lib/ai/openaiWrapper';
import {anyBufferToText, fileToText} from 'src/lib/ai/unstructured';
import {RecursiveCharacterTextSplitter} from 'langchain/text_splitter';
import {performSummarisation} from "src/lib/ai/answer";

export interface DocumentInfo {
name: string
file?: File
buffer?: Buffer
status: 'pending' | 'parsing' | 'processing' | 'ready' | 'error'
status: 'pending' | 'parsing' | 'processing' | 'ready' | 'error' | string
progress?: number,
summary?: string
// vectors?: MemoryVectorStore
}

Expand Down Expand Up @@ -58,6 +60,12 @@ export const useMultiFileStore = defineStore('multiFile', {
const vectorStore = this.vectorStore
await vectorStore.addDocuments(docs) // TODO: deduplicate based on metadata?

// We also want a summary
// TODO: This could be in parallel of above?
pendingDocument.status = 'summarising'
const summary = await performSummarisation(text)
pendingDocument.summary = summary

// Important to markRaw to avoid proxying the insides
// pendingDocument.vectors = markRaw(vectorStore)
// Update the status to 'ready' on successful processing
Expand Down

0 comments on commit d20690e

Please sign in to comment.