diff --git a/.gitignore b/.gitignore
index 1eedcbe4..c8681171 100644
--- a/.gitignore
+++ b/.gitignore
@@ -144,10 +144,10 @@
!/src/Informedica.Utils.Lib/BCL/
!/src/Informedica.Utils.Lib/Scripts/
!/src/Informedica.Utils.Lib/Notebooks/
-# Informedica.Ollama.Lib
-!/src/Informedica.Ollama.Lib/
-!/src/Informedica.Ollama.Lib/Scripts/
-!/src/Informedica.Ollama.Lib/Notebooks/
+# Informedica.OpenAI.Lib
+!/src/Informedica.OpenAI.Lib/
+!/src/Informedica.OpenAI.Lib/Scripts/
+!/src/Informedica.OpenAI.Lib/Notebooks/
# Informedica.GenCore.Lib
!/src/Informedica.GenCore.Lib/
!/src/Informedica.GenCore.Lib/Scripts/
diff --git a/GenPres.sln b/GenPres.sln
index a2074bfe..67c1b945 100644
--- a/GenPres.sln
+++ b/GenPres.sln
@@ -55,7 +55,7 @@ Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Informedica.KinderFormulari
EndProject
Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Informedica.GenForm.Tests", "tests\Informedica.GenForm.Tests\Informedica.GenForm.Tests.fsproj", "{73C084AF-54F4-4B39-A7F9-F0A2A3AB9DD5}"
EndProject
-Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Informedica.Ollama.Lib", "src\Informedica.Ollama.Lib\Informedica.Ollama.Lib.fsproj", "{5B13678B-1260-4B21-B5FF-7F8C8A9B1BD2}"
+Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Informedica.OpenAI.Lib", "src\Informedica.OpenAI.Lib\Informedica.OpenAI.Lib.fsproj", "{5B13678B-1260-4B21-B5FF-7F8C8A9B1BD2}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
diff --git a/paket.dependencies b/paket.dependencies
index 5ce0cfd2..ca4eeb21 100644
--- a/paket.dependencies
+++ b/paket.dependencies
@@ -22,6 +22,7 @@ nuget Feliz.UseElmish
nuget Feliz ~> 2
nuget Feliz.Markdown
nuget Feliz.Router
+nuget NJsonSchema
nuget Saturn ~> 0
@@ -47,4 +48,3 @@ nuget Expecto.Hopac
# have to add because of issue: https://github.com/fsprojects/Paket/issues/4197
nuget Microsoft.AspNetCore.Authentication.JwtBearer ~> 8
-
diff --git a/paket.lock b/paket.lock
index 7fd6c337..f5f2b5ae 100644
--- a/paket.lock
+++ b/paket.lock
@@ -273,7 +273,14 @@ NUGET
Microsoft.TestPlatform.ObjectModel (>= 17.8)
Newtonsoft.Json (>= 13.0.1)
Mono.Cecil (0.11.5)
+ Namotion.Reflection (3.1.1)
+ Microsoft.CSharp (>= 4.3)
Newtonsoft.Json (13.0.3)
+ NJsonSchema (11.0)
+ Namotion.Reflection (>= 3.1.1)
+ Newtonsoft.Json (>= 13.0.3)
+ NJsonSchema.Annotations (>= 11.0)
+ NJsonSchema.Annotations (11.0)
NuGet.Frameworks (6.8)
Saturn (0.16.1)
FSharp.Control.Websockets (>= 0.2.2)
diff --git a/src/Informedica.Ollama.Lib/paket.references b/src/Informedica.Ollama.Lib/paket.references
deleted file mode 100644
index a65178e5..00000000
--- a/src/Informedica.Ollama.Lib/paket.references
+++ /dev/null
@@ -1,2 +0,0 @@
-Newtonsoft.Json
-FSharp.Core
\ No newline at end of file
diff --git a/src/Informedica.Ollama.Lib/Informedica.Ollama.Lib.fsproj b/src/Informedica.OpenAI.Lib/Informedica.OpenAI.Lib.fsproj
similarity index 82%
rename from src/Informedica.Ollama.Lib/Informedica.Ollama.Lib.fsproj
rename to src/Informedica.OpenAI.Lib/Informedica.OpenAI.Lib.fsproj
index 2e3495b2..f00fb672 100644
--- a/src/Informedica.Ollama.Lib/Informedica.Ollama.Lib.fsproj
+++ b/src/Informedica.OpenAI.Lib/Informedica.OpenAI.Lib.fsproj
@@ -3,8 +3,11 @@
net8.0
true
+ Informedica.Ollama.Lib
+
+
diff --git a/src/Informedica.Ollama.Lib/Notebooks/Examples.ipynb b/src/Informedica.OpenAI.Lib/Notebooks/Examples.ipynb
similarity index 100%
rename from src/Informedica.Ollama.Lib/Notebooks/Examples.ipynb
rename to src/Informedica.OpenAI.Lib/Notebooks/Examples.ipynb
diff --git a/src/Informedica.Ollama.Lib/Notebooks/Functions.ipynb b/src/Informedica.OpenAI.Lib/Notebooks/Functions.ipynb
similarity index 100%
rename from src/Informedica.Ollama.Lib/Notebooks/Functions.ipynb
rename to src/Informedica.OpenAI.Lib/Notebooks/Functions.ipynb
diff --git a/src/Informedica.Ollama.Lib/Notebooks/Prompts.ipynb b/src/Informedica.OpenAI.Lib/Notebooks/Prompts.ipynb
similarity index 100%
rename from src/Informedica.Ollama.Lib/Notebooks/Prompts.ipynb
rename to src/Informedica.OpenAI.Lib/Notebooks/Prompts.ipynb
diff --git a/src/Informedica.Ollama.Lib/Notebooks/Validation.ipynb b/src/Informedica.OpenAI.Lib/Notebooks/Validation.ipynb
similarity index 100%
rename from src/Informedica.Ollama.Lib/Notebooks/Validation.ipynb
rename to src/Informedica.OpenAI.Lib/Notebooks/Validation.ipynb
diff --git a/src/Informedica.Ollama.Lib/Notebooks/load.fsx b/src/Informedica.OpenAI.Lib/Notebooks/load.fsx
similarity index 100%
rename from src/Informedica.Ollama.Lib/Notebooks/load.fsx
rename to src/Informedica.OpenAI.Lib/Notebooks/load.fsx
diff --git a/src/Informedica.Ollama.Lib/Ollama.fs b/src/Informedica.OpenAI.Lib/Ollama.fs
similarity index 86%
rename from src/Informedica.Ollama.Lib/Ollama.fs
rename to src/Informedica.OpenAI.Lib/Ollama.fs
index 9cbaf889..3f09c343 100644
--- a/src/Informedica.Ollama.Lib/Ollama.fs
+++ b/src/Informedica.OpenAI.Lib/Ollama.fs
@@ -1,4 +1,4 @@
-namespace Informedica.Ollama.Lib
+namespace Informedica.OpenAI.Lib
/// Utility methods to use ollama
@@ -8,6 +8,7 @@ module Ollama =
open System
open System.Net.Http
open System.Text
+ open NJsonSchema
open Newtonsoft.Json
@@ -230,6 +231,9 @@ module Ollama =
[]
let show = "http://localhost:11434/api/show"
+ []
+ let openAI = "http://localhost:11434/v1/chat/completions"
+
// Create an HTTP client
@@ -344,6 +348,59 @@ module Ollama =
}
+ let json<'ReturnType> model messages (message : Message) =
+ let map msg =
+ {|
+ role = msg.Role
+ content = msg.Content
+ |}
+
+ let schema = JsonSchema.FromType<'ReturnType>()
+ let content =
+ {|
+ model = model
+ format = "json"
+ response_format = {|
+ ``type`` = "json_object"
+ schema = schema
+ |}
+ messages =
+ [ message |> map ]
+ |> List.append (messages |> List.map map)
+ options = options
+ stream = false
+ |}
+ |> JsonConvert.SerializeObject
+
+ let content = new StringContent(content, Encoding.UTF8, "application/json")
+ // Asynchronous API call
+ async {
+ let! response = client.PostAsync(EndPoints.chat, content) |> Async.AwaitTask
+ let! responseBody = response.Content.ReadAsStringAsync() |> Async.AwaitTask
+
+ printfn $"responseBody: {responseBody}"
+
+ let modelResponse =
+ try
+ let resp =
+ responseBody
+ |> JsonConvert.DeserializeObject
+
+ match resp.error with
+ | s when s |> String.IsNullOrEmpty ->
+ responseBody
+ |> JsonConvert.DeserializeObject
+ |> Success
+ | s ->
+ s |> Error
+ with
+ | e ->
+ e.ToString() |> Error
+
+ return modelResponse
+ }
+
+
let extract tools model messages (message : Message) =
let map msg =
diff --git a/src/Informedica.Ollama.Lib/Prompts.fs b/src/Informedica.OpenAI.Lib/Prompts.fs
similarity index 79%
rename from src/Informedica.Ollama.Lib/Prompts.fs
rename to src/Informedica.OpenAI.Lib/Prompts.fs
index 74ef92fb..1d9790d0 100644
--- a/src/Informedica.Ollama.Lib/Prompts.fs
+++ b/src/Informedica.OpenAI.Lib/Prompts.fs
@@ -1,13 +1,13 @@
-namespace Informedica.Ollama.Lib
+namespace Informedica.OpenAI.Lib
module Prompts =
let tasks = """
-You are a world-class prompt engineering assistant. Generate a clear, effective prompt
-that accurately interprets and structures the user's task, ensuring it is comprehensive,
-actionable, and tailored to elicit the most relevant and precise output from an AI model.
-When appropriate enhance the prompt with the required persona, format, style, and
+You are a world-class prompt engineering assistant. Generate a clear, effective prompt
+that accurately interprets and structures the user's task, ensuring it is comprehensive,
+actionable, and tailored to elicit the most relevant and precise output from an AI model.
+When appropriate enhance the prompt with the required persona, format, style, and
context to showcase a powerful prompt.
"""
@@ -19,7 +19,7 @@ context to showcase a powerful prompt.
let assistentAsk = """
-You are a world-class AI assistant. Your communication is brief and concise.
+You are a world-class AI assistant. Your communication is brief and concise.
You're precise and answer only when you're confident in the high quality of your answer.
"""
@@ -31,11 +31,11 @@ You're precise and answer only when you're confident in the high quality of your
let extractData = """
-You are a world-class expert for function-calling and data extraction.
-Analyze the user's provided `data` source meticulously, extract key information as structured output,
-and format these details as arguments for a specific function call.
-Ensure strict adherence to user instructions, particularly those regarding argument style and formatting
-as outlined in the function's docstrings, prioritizing detail orientation and accuracy in alignment
+You are a world-class expert for function-calling and data extraction.
+Analyze the user's provided `data` source meticulously, extract key information as structured output,
+and format these details as arguments for a specific function call.
+Ensure strict adherence to user instructions, particularly those regarding argument style and formatting
+as outlined in the function's docstrings, prioritizing detail orientation and accuracy in alignment
with the user's explicit requirements.
"""
diff --git a/src/Informedica.OpenAI.Lib/Scripts/AI.fsx b/src/Informedica.OpenAI.Lib/Scripts/AI.fsx
new file mode 100644
index 00000000..e12f0b7c
--- /dev/null
+++ b/src/Informedica.OpenAI.Lib/Scripts/AI.fsx
@@ -0,0 +1,239 @@
+
+
+#r "nuget: Newtonsoft.Json"
+#r "nuget: NJsonSchema"
+
+#load "../Texts.fs"
+#load "../Prompts.fs"
+#load "../Ollama.fs"
+
+
+open Newtonsoft.Json
+open Informedica.OpenAI.Lib
+open Ollama.Operators
+
+
+
+let tools =
+ {|
+ city = {|
+ ``type`` = "string"
+ description = "The city to get the weather for"
+ |}
+ state = {|
+ ``type`` = "string"
+ description = "The state to get the weather for"
+ |}
+ |}
+ |> Ollama.Tool.create
+ "get_weather"
+ "Get the weather."
+ ["city"; "state"]
+ |> List.singleton
+
+"What is the weather in Seattle?"
+|> Ollama.Message.user
+|> Ollama.extract
+ tools
+ "joefamous/firefunction-v1:q3_k"
+ []
+|> Async.RunSynchronously
+|> function
+ | Ollama.Response.Success resp ->
+ resp.message.content
+ |> printfn "%s"
+ | _ -> ()
+
+
+let extractDoseQuantities model text =
+ Texts.systemDoseQuantityExpert
+ |> init model
+ >>? $"""
+The text between the ''' describes dose quantities for a
+substance:
+
+'''{text}'''
+
+For which substance?
+Give the answer as Substance : ?
+"""
+ >>? """
+What is the unit used for the substance, the substance unit?
+Give the answer as SubstanceUnit : ?
+"""
+ >>? """
+What is the unit to adjust the dose for?
+Give the answer as AdjustUnit : ?
+"""
+ >>? """
+What is the time unit for the dose frequency?
+Give the answer as TimeUnit : ?
+"""
+ >>? """
+What is the maximum dose per time in SubstanceUnit/TimeUnit?
+Give the answer as MaximumDosePerTime: ?
+"""
+ >>? """
+What is the dose, adjusted for weight in SubstanceUnit/AdjustUnit/TimeUnit?
+Give the answer as AdjustedDosePerTime: ?
+"""
+ >>? """
+What is the number of doses per TimeUnit?
+Give the answer as Frequency: ?
+"""
+ >>? """
+Summarize the previous answers as:
+
+- Substance: ?
+- SubstanceUnit: ?
+- AdjustUnit: ?
+- TimeUnit: ?
+- MaximumDosePerTime: ?
+- AdjustedDosePerTime: ?
+- Frequency: ?
+
+"""
+
+
+let testModel model =
+
+ printfn $"\n\n# Running: {model}\n\n"
+ for text in Texts.testTexts do
+ extractDoseQuantities model text
+ |> Ollama.Conversation.print
+
+
+let testAll () =
+
+ [
+ Ollama.Models.gemma
+ Ollama.Models.``gemma:7b-instruct``
+ Ollama.Models.llama2
+ Ollama.Models.``llama2:13b-chat``
+ Ollama.Models.mistral
+ Ollama.Models.``mistral:7b-instruct``
+ ]
+ |> List.iter testModel
+
+
+
+Ollama.options.penalize_newline <- true
+Ollama.options.top_k <- 10
+Ollama.options.top_p <- 0.95
+
+
+Ollama.Models.``openchat:7b``
+|> testModel
+
+
+module BNFC =
+
+ let paracetamolPO =
+ [
+ """
+paracetamol
+Neonate 28 weeks to 32 weeks corrected gestational age
+20 mg/ kg for 1 dose, then 10–15 mg/kg every 8–12 hours as required, maximum daily dose to be given in divided doses; maximum 30 mg/kg per day.
+"""
+ """
+Child 1–2 months
+30–60 mg every 8 hours as required, maximum daily dose to be given in divided doses; maximum 60 mg/kg per day.
+"""
+ """
+paracetamol
+Child 3–5 months
+60 mg every 4–6 hours; maximum 4 doses per day.
+"""
+ """
+paracetamol
+Child 6–23 months
+120 mg every 4–6 hours; maximum 4 doses per day.
+"""
+ ]
+
+
+
+"You are a helpful assistant"
+|> init Ollama.Models.``openchat:7b``
+>>? "Why is the sky blue?"
+|> Ollama.Conversation.print
+
+
+Ollama.options.temperature <- 0
+Ollama.options.seed <- 101
+Ollama.options.penalize_newline <- true
+Ollama.options.top_k <- 10
+Ollama.options.top_p <- 0.95
+
+
+BNFC.paracetamolPO[0]
+|> extractDoseQuantities Ollama.Models.``openchat:7b``
+|> Ollama.Conversation.print
+
+
+Ollama.options.temperature <- 0.5
+Ollama.options.seed <- 101
+Ollama.options.penalize_newline <- false
+Ollama.options.top_k <- 50
+Ollama.options.top_p <- 0.5
+
+
+"""
+You are an empathic medical professional and translate medical topics to parents
+that have a child admitted to a pediatric critical care unit.
+"""
+|> init Ollama.Models.``openchat:7b``
+>>? """
+Explain to the parents that there child as to be put on a ventilator and has to
+be intubated.
+"""
+//>>? "translate the previous message to Dutch"
+|> Ollama.Conversation.print
+
+
+"""
+Je bent een empathische zorgverlener die ouders uitleg moet geven over hun kind
+dat op de kinder IC ligt.
+
+Je geeft alle uitleg en antwoorden in het Nederlands.
+"""
+|> init Ollama.Models.``openchat:7b``
+>>? """
+Leg aan ouders uit dat hun kind aan de beademing moet worden gelegd en daarvoor
+geintubeerd moet worden.
+"""
+|> Ollama.Conversation.print
+
+let x =
+ """
+Je bent een empathische zorgverlener die ouders uitleg moet geven over hun kind
+dat op de kinder IC ligt.
+
+Je geeft alle uitleg en antwoorden in het Nederlands.
+"""
+ |> init Ollama.Models.``openchat:7b``
+
+
+"""
+Je bent een empathische zorgverlener die ouders uitleg moet geven over hun kind
+dat op de kinder IC ligt.
+
+Je geeft alle uitleg en antwoorden in het Nederlands.
+"""
+|> Ollama.Message.system
+|> Ollama.chat Ollama.Models.llama2 []
+|> Async.RunSynchronously
+
+
+Ollama.listModels ()
+
+
+""""
+What is the minimal age for a neonate 28 weeks to 32 weeks corrected gestational age
+Reply just in one JSON.
+"""
+|> Ollama.Message.user
+|> Ollama.json<{| number: int; unit: string |}>
+ Ollama.Models.llama2
+ []
+|> Async.RunSynchronously
diff --git a/src/Informedica.Ollama.Lib/Scripts/AI.fsx b/src/Informedica.OpenAI.Lib/Texts.fs
similarity index 70%
rename from src/Informedica.Ollama.Lib/Scripts/AI.fsx
rename to src/Informedica.OpenAI.Lib/Texts.fs
index 8b885128..fe8f5efa 100644
--- a/src/Informedica.Ollama.Lib/Scripts/AI.fsx
+++ b/src/Informedica.OpenAI.Lib/Texts.fs
@@ -1,11 +1,4 @@
-
-
-#r "nuget: Newtonsoft.Json"
-
-
-#load "../Prompts.fs"
-#load "../Ollama.fs"
-
+namespace Informedica.OpenAI.Lib
module Texts =
@@ -243,220 +236,6 @@ You answer all questions with ONLY the shortest possible answer to the question.
}
"""
-open Newtonsoft.Json
-open Informedica.Ollama.Lib
-open Ollama.Operators
-
-
-let tools =
- {|
- city = {|
- ``type`` = "string"
- description = "The city to get the weather for"
- |}
- state = {|
- ``type`` = "string"
- description = "The state to get the weather for"
- |}
- |}
- |> Ollama.Tool.create
- "get_weather"
- "Get the weather."
- ["city"; "state"]
- |> List.singleton
-
-"What is the weather in Seattle?"
-|> Ollama.Message.user
-|> Ollama.extract
- tools
- "joefamous/firefunction-v1:q3_k"
- []
-|> Async.RunSynchronously
-|> function
- | Ollama.Response.Success resp ->
- resp.message.content
- |> printfn "%s"
- | _ -> ()
-
-
-let extractDoseQuantities model text =
- Texts.systemDoseQuantityExpert
- |> init model
- >>? $"""
-The text between the ''' describes dose quantities for a
-substance:
-
-'''{text}'''
-
-For which substance?
-Give the answer as Substance : ?
-"""
- >>? """
-What is the unit used for the substance, the substance unit?
-Give the answer as SubstanceUnit : ?
-"""
- >>? """
-What is the unit to adjust the dose for?
-Give the answer as AdjustUnit : ?
-"""
- >>? """
-What is the time unit for the dose frequency?
-Give the answer as TimeUnit : ?
-"""
- >>? """
-What is the maximum dose per time in SubstanceUnit/TimeUnit?
-Give the answer as MaximumDosePerTime: ?
-"""
- >>? """
-What is the dose, adjusted for weight in SubstanceUnit/AdjustUnit/TimeUnit?
-Give the answer as AdjustedDosePerTime: ?
-"""
- >>? """
-What is the number of doses per TimeUnit?
-Give the answer as Frequency: ?
-"""
- >>? """
-Summarize the previous answers as:
-
-- Substance: ?
-- SubstanceUnit: ?
-- AdjustUnit: ?
-- TimeUnit: ?
-- MaximumDosePerTime: ?
-- AdjustedDosePerTime: ?
-- Frequency: ?
-
-"""
-
-
-let testModel model =
-
- printfn $"\n\n# Running: {model}\n\n"
- for text in Texts.testTexts do
- extractDoseQuantities model text
- |> Ollama.Conversation.print
-
-
-let testAll () =
-
- [
- Ollama.Models.gemma
- Ollama.Models.``gemma:7b-instruct``
- Ollama.Models.llama2
- Ollama.Models.``llama2:13b-chat``
- Ollama.Models.mistral
- Ollama.Models.``mistral:7b-instruct``
- ]
- |> List.iter testModel
-
-
-
-Ollama.options.penalize_newline <- true
-Ollama.options.top_k <- 10
-Ollama.options.top_p <- 0.95
-
-
-Ollama.Models.``openchat:7b``
-|> testModel
-
-
-module BNFC =
-
- let paracetamolPO =
- [
- """
-paracetamol
-Neonate 28 weeks to 32 weeks corrected gestational age
-20 mg/ kg for 1 dose, then 10–15 mg/kg every 8–12 hours as required, maximum daily dose to be given in divided doses; maximum 30 mg/kg per day.
-"""
- """
-Child 1–2 months
-30–60 mg every 8 hours as required, maximum daily dose to be given in divided doses; maximum 60 mg/kg per day.
-"""
- """
-paracetamol
-Child 3–5 months
-60 mg every 4–6 hours; maximum 4 doses per day.
-"""
- """
-paracetamol
-Child 6–23 months
-120 mg every 4–6 hours; maximum 4 doses per day.
-"""
- ]
-
-
-
-"You are a helpful assistant"
-|> init Ollama.Models.``openchat:7b``
->>? "Why is the sky blue?"
-|> Ollama.Conversation.print
-
-
-Ollama.options.temperature <- 0
-Ollama.options.seed <- 101
-Ollama.options.penalize_newline <- true
-Ollama.options.top_k <- 10
-Ollama.options.top_p <- 0.95
-
-
-BNFC.paracetamolPO[0]
-|> extractDoseQuantities Ollama.Models.``openchat:7b``
-|> Ollama.Conversation.print
-
-
-Ollama.options.temperature <- 0.5
-Ollama.options.seed <- 101
-Ollama.options.penalize_newline <- false
-Ollama.options.top_k <- 50
-Ollama.options.top_p <- 0.5
-
-
-"""
-You are an empathic medical professional and translate medical topics to parents
-that have a child admitted to a pediatric critical care unit.
-"""
-|> init Ollama.Models.``openchat:7b``
->>? """
-Explain to the parents that there child as to be put on a ventilator and has to
-be intubated.
-"""
-//>>? "translate the previous message to Dutch"
-|> Ollama.Conversation.print
-
-
-"""
-Je bent een empathische zorgverlener die ouders uitleg moet geven over hun kind
-dat op de kinder IC ligt.
-
-Je geeft alle uitleg en antwoorden in het Nederlands.
-"""
-|> init Ollama.Models.``openchat:7b``
->>? """
-Leg aan ouders uit dat hun kind aan de beademing moet worden gelegd en daarvoor
-geintubeerd moet worden.
-"""
-|> Ollama.Conversation.print
-
-let x =
- """
-Je bent een empathische zorgverlener die ouders uitleg moet geven over hun kind
-dat op de kinder IC ligt.
-
-Je geeft alle uitleg en antwoorden in het Nederlands.
-"""
- |> init Ollama.Models.``openchat:7b``
-"""
-Je bent een empathische zorgverlener die ouders uitleg moet geven over hun kind
-dat op de kinder IC ligt.
-
-Je geeft alle uitleg en antwoorden in het Nederlands.
-"""
-|> Ollama.Message.system
-|> Ollama.chat Ollama.Models.llama2 []
-|> Async.RunSynchronously
-
-Ollama.listModels ()
\ No newline at end of file
diff --git a/src/Informedica.OpenAI.Lib/Utils.fs b/src/Informedica.OpenAI.Lib/Utils.fs
new file mode 100644
index 00000000..7274d4ce
--- /dev/null
+++ b/src/Informedica.OpenAI.Lib/Utils.fs
@@ -0,0 +1,4 @@
+namespace Informedica.OpenAI.Lib
+
+module Utils
+
diff --git a/src/Informedica.OpenAI.Lib/paket.references b/src/Informedica.OpenAI.Lib/paket.references
new file mode 100644
index 00000000..4ab0bbef
--- /dev/null
+++ b/src/Informedica.OpenAI.Lib/paket.references
@@ -0,0 +1,3 @@
+Newtonsoft.Json
+FSharp.Core
+NJsonSchema
\ No newline at end of file
diff --git a/tests/Informedica.Utils.Tests/paket.references b/tests/Informedica.Utils.Tests/paket.references
index dca623c0..efdb4cd1 100644
--- a/tests/Informedica.Utils.Tests/paket.references
+++ b/tests/Informedica.Utils.Tests/paket.references
@@ -4,4 +4,5 @@ Expecto.FsCheck
Microsoft.Net.Test.Sdk
YoloDev.Expecto.TestSdk
Unquote
-FSharp.Core
\ No newline at end of file
+FSharp.Core
+NJsonSchema
\ No newline at end of file