-
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathopenai-consult.el
49 lines (47 loc) · 1.42 KB
/
openai-consult.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
;; capf is not so great because the latency is not there yet
;; but an async consult one that sounds great
(defun openai-api-capf ()
(let* ((beg (min
(save-excursion
(backward-paragraph 1)
(point))
(save-excursion
(beginning-of-defun)
(point))))
(end (point))
(prompt (buffer-substring beg end))
;; (completions '())
;; (_
;; (openai-api-retrieve
;; `((model . "text-davinci-003")
;; (max_tokens . 14)
;; (temperature . 0)
;; (prompt . ,prompt))
;; (lambda (_status)
;; (setf completions (openai-api-choices)))))
)
(list
end
end
(completion-table-dynamic
(lambda (_)
(with-current-buffer
(openai-api-retrieve-sync
;; `((model . "text-davinci-003")
;; (max_tokens . 14)
;; (temperature . 0)
;; (prompt . ,prompt))
`((model . "code-cushman-001")
(max_tokens . 14)
(temperature . 0)
(prompt . ,prompt)))
(mapcan
#'opanai-api-split-words
(openai-api-choices)))))
;; (completion-table-dynamic
;; (lambda (&rest _)
;; completions))
)))
;; make a consult one
;; with -- paradigm for temparature
;; or feed async stuff