From e444d0fa517b805378521fb9c6a9fd5a04dd9f83 Mon Sep 17 00:00:00 2001 From: Casper Bollen Date: Thu, 7 Mar 2024 08:54:35 +0100 Subject: [PATCH] fix: recalculating order and dose type selection --- src/Client/Views/Prescribe.fs | 3 +- src/Informedica.GenOrder.Lib/Order.fs | 18 +- src/Informedica.Utils.Lib/Scripts/AI.fsx | 255 ++++++++++++++++++----- 3 files changed, 220 insertions(+), 56 deletions(-) diff --git a/src/Client/Views/Prescribe.fs b/src/Client/Views/Prescribe.fs index 3e03848..8918db1 100644 --- a/src/Client/Views/Prescribe.fs +++ b/src/Client/Views/Prescribe.fs @@ -449,8 +449,7 @@ module Prescribe = match props.scenarios with | Resolved scrs when scrs.Indication.IsSome && scrs.Medication.IsSome && - scrs.Route.IsSome && - scrs.DoseTypes |> Array.length > 1 -> + scrs.Route.IsSome -> (false, scrs.DoseType, scrs.DoseTypes) |> fun (isLoading, sel, items) -> if isMobile then diff --git a/src/Informedica.GenOrder.Lib/Order.fs b/src/Informedica.GenOrder.Lib/Order.fs index 56710d3..3eb73b8 100644 --- a/src/Informedica.GenOrder.Lib/Order.fs +++ b/src/Informedica.GenOrder.Lib/Order.fs @@ -1711,9 +1711,15 @@ module Order = let isSolved (ord: Order) = - ord.Orderable.Dose - |> Dose.toOrdVars - |> List.forall OrderVariable.isSolved + let qty = + ord.Orderable.Dose.Quantity + |> Quantity.toOrdVar + |> OrderVariable.isSolved + let rte = + ord.Orderable.Dose.Rate + |> Rate.toOrdVar + |> OrderVariable.isSolved + qty || rte /// @@ -2316,7 +2322,7 @@ module Order = let pres = $"""{sn |> String.concat " + "} {dr}""" let prep = ord |> compQtyToStr - let adm = $"""{sn |> String.concat " + "} {it} in {oq}, {rt}""" + let adm = $"""{sn |> String.concat " + "} {it} in {oq} stand {rt}""" pres, prep, adm @@ -2341,7 +2347,7 @@ module Order = let pres = $"{fr} {dq} {dt}" let prep = ord |> compQtyToStr - let adm = $"{fr} {ord |> orbDoseQtyToStr} in {tme} = {rt}" + let adm = $"{fr} {ord |> orbDoseQtyToStr} in {tme} stand {rt}" pres |> String.replace "()" "", prep, @@ -2367,7 +2373,7 @@ module Order = let pres = $"{dq} {dqa}" let prep = ord |> compQtyToStr - let adm = $"{ord |> orbDoseQtyToStr} in {tme} = {rt}" + let adm = $"{ord |> orbDoseQtyToStr} in {tme} stand {rt}" pres |> String.replace "()" "", prep, diff --git a/src/Informedica.Utils.Lib/Scripts/AI.fsx b/src/Informedica.Utils.Lib/Scripts/AI.fsx index d266275..582ac17 100644 --- a/src/Informedica.Utils.Lib/Scripts/AI.fsx +++ b/src/Informedica.Utils.Lib/Scripts/AI.fsx @@ -303,23 +303,50 @@ module Ollama = type Message = { - role : string - content : string + Role : string + Content : string + Validator : string -> Result } + type Conversation = + { + Model : string + Messages : QuestionAnswer list + } + and QuestionAnswer = + { + Question : Message + Answer : Message + } + + + module Conversation = + + let print (conversation : Conversation) = + for qAndA in conversation.Messages do + printfn $""" +## Question: +{qAndA.Question.Content.Trim()} + +## Answer: +{qAndA.Answer.Content.Trim()} + +""" + + module Message = - let create role content = + let create validator role content = { - role = role - content = content + Role = role + Content = content + Validator = validator } - let user = create Roles.user - - let system = create Roles.system + let user = create Result.Ok Roles.user + let system = create Result.Ok Roles.system type Response = @@ -460,13 +487,18 @@ module Ollama = let chat model messages (message : Message) = + let map msg = + {| + role = msg.Role + content = msg.Content + |} let messages = {| model = model messages = - [ message ] - |> List.append messages + [ message |> map ] + |> List.append (messages |> List.map map) options = options stream = false |} @@ -507,6 +539,8 @@ module Ollama = return models } + |> Async.RunSynchronously + let showModel model = let prompt = @@ -558,9 +592,9 @@ module Ollama = - let run llm messages message = + let run (model : string) messages message = message - |> chat llm messages + |> chat model messages |> Async.RunSynchronously |> function | Success response -> @@ -585,6 +619,8 @@ module Ollama = let ``mistral:7b-instruct`` = "mistral:7b-instruct" + let ``openchat:7b`` = "openchat:7b" + let runLlama2 = run Models.llama2 @@ -605,46 +641,84 @@ module Ollama = let runMistral_7b_instruct = run Models.``mistral:7b-instruct`` - module Operators = + open Newtonsoft.Json + - let mutable runModel = runLlama2 + let init model msg = + printfn $"""Starting conversation with {model} +Options: +{options |> JsonConvert.SerializeObject} +""" - let (>>?) msgs msg = - printfn $"### PROMPT:{msg}" + let msg = msg |> Message.system msg - |> Message.user - |> runModel msgs + |> run model [] |> fun msgs -> - let answer = - (msgs |> List.last).content.Trim() - - printfn $"### ANSWER:\n{answer}\n" - msgs + printfn $"Got an answer" + + { + + Model = model + Messages = + [{ + Question = msg + Answer = msgs |> List.last + }] + } + + + let (>>?) (conversation : Conversation) msg = + + let rec loop tryAgain conversation msg = + let msg = msg |> Message.user + + msg + |> run conversation.Model (conversation.Messages |> List.map (_.Question)) + |> fun msgs -> + let answer = msgs |> List.last + + match answer.Content |> msg.Validator with + | Ok _ -> + { conversation with + Messages = + [{ + Question = msg + Answer = answer + }] + |> List.append conversation.Messages + } + | Result.Error err -> + if not tryAgain then + { conversation with + Messages = + [{ + Question = msg + Answer = answer + }] + |> List.append conversation.Messages + } + + else + $""" +It seems the answer was not correct because: {err} +Can you try again answering? +{msg.Content} +""" + |> loop false conversation + loop true conversation msg open Ollama.Operators - -let testModel name modelToRun = - - - printfn $"\n\n# Running: {name}\n\n" - - - runModel <- modelToRun - - - for text in Texts.testTexts do - +let extractDoseQuantities model text = Texts.systemDoseQuantityExpert - |> Ollama.Message.system - |> runModel [] + |> init model >>? $""" The text between the ''' describes dose quantities for a substance: @@ -690,28 +764,113 @@ Summarize the previous answers as: - Frequency: ? """ - |> ignore + + +let testModel model = + + printfn $"\n\n# Running: {model}\n\n" + for text in Texts.testTexts do + extractDoseQuantities model text + |> Ollama.Conversation.print let testAll () = [ - Ollama.Models.gemma, Ollama.runGemma - Ollama.Models.``gemma:7b-instruct``, Ollama.runGemma_7b_instruct - Ollama.Models.llama2, Ollama.runLlama2 - Ollama.Models.``llama2:13b-chat``, Ollama.runLlama2_13b_chat - Ollama.Models.mistral, Ollama.runMistral - Ollama.Models.``mistral:7b-instruct``, Ollama.runMistral_7b_instruct + Ollama.Models.gemma + Ollama.Models.``gemma:7b-instruct`` + Ollama.Models.llama2 + Ollama.Models.``llama2:13b-chat`` + Ollama.Models.mistral + Ollama.Models.``mistral:7b-instruct`` ] - |> List.iter (fun (n, m) -> testModel n m) + |> List.iter testModel + + + +Ollama.options.penalize_newline <- true +Ollama.options.top_k <- 10 +Ollama.options.top_p <- 0.95 + + +Ollama.Models.``openchat:7b`` +|> testModel + + +module BNFC = + + let paracetamolPO = + [ + """ +paracetamol +Neonate 28 weeks to 32 weeks corrected gestational age +20 mg/ kg for 1 dose, then 10–15 mg/kg every 8–12 hours as required, maximum daily dose to be given in divided doses; maximum 30 mg/kg per day. +""" + """ +Child 1–2 months +30–60 mg every 8 hours as required, maximum daily dose to be given in divided doses; maximum 60 mg/kg per day. +""" + """ +paracetamol +Child 3–5 months +60 mg every 4–6 hours; maximum 4 doses per day. +""" + """ +paracetamol +Child 6–23 months +120 mg every 4–6 hours; maximum 4 doses per day. +""" + ] + +"You are a helpful assistant" +|> init Ollama.Models.``openchat:7b`` +>>? "Why is the sky blue?" +|> Ollama.Conversation.print + +Ollama.options.temperature <- 0 +Ollama.options.seed <- 101 Ollama.options.penalize_newline <- true Ollama.options.top_k <- 10 Ollama.options.top_p <- 0.95 -// The winner is: -Ollama.runMistral_7b_instruct -|> testModel Ollama.Models.``mistral:7b-instruct`` +BNFC.paracetamolPO[0] +|> extractDoseQuantities Ollama.Models.``openchat:7b`` +|> Ollama.Conversation.print + + +Ollama.options.temperature <- 0.5 +Ollama.options.seed <- 101 +Ollama.options.penalize_newline <- false +Ollama.options.top_k <- 50 +Ollama.options.top_p <- 0.5 + + +""" +You are an empathic medical professional and translate medical topics to parents +that have a child admitted to a pediatric critical care unit. +""" +|> init Ollama.Models.``openchat:7b`` +>>? """ +Explain to the parents that there child as to be put on a ventilator and has to +be intubated. +""" +>>? "translate the previous message to Dutch" +|> Ollama.Conversation.print + + +""" +Je bent een empathische zorgverlener die ouders uitleg moet geven over hun kind +dat op de kinder IC ligt. + +Je geeft alle uitleg en antwoorden in het Nederlands. +""" +|> init Ollama.Models.``openchat:7b`` +>>? """ +Leg aan ouders uit dat hun kind aan de beademing moet worden gelegd en daarvoor +geintubeerd moet worden. +""" +|> Ollama.Conversation.print