Skip to content

Commit 56ec34e

Browse files
committed
Added codex API
1 parent 6cee58c commit 56ec34e

File tree

3 files changed

+67
-19
lines changed

3 files changed

+67
-19
lines changed

gpt3.go

+5
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,11 @@
3636
// 2 => This text is unsafe. This means that the text contains profane language, prejudiced or hateful language,
3737
// something that could be NSFW, or text that portrays certain groups/people in a harmful manner.
3838
//
39+
// Code Generation:
40+
//
41+
// Added to the completions API are the codex engines for code generation.
42+
// The Codex model series is a descendant of our base GPT-3 series that’s been trained on both
43+
// natural language and billions of lines of code.
3944

4045
package gpt3
4146

internal/examples/main.go

+32-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@ import (
99
)
1010

1111
func main() {
12-
contentFilterCall()
12+
completionCodexCall()
13+
//contentFilterCall()
1314
//completionCall()
1415
//answersCall()
1516
//SearchCall()
@@ -80,6 +81,36 @@ func completionCall(){
8081
}
8182
}
8283

84+
func completionCodexCall(){
85+
query, err := ioutil.ReadFile("prompts.txt")
86+
if err != nil {
87+
panic(err)
88+
}
89+
req := gpt3.CompletionRequest{
90+
Prompt: string(query),
91+
MaxTokens: 300,
92+
TopP: 1,
93+
Temperature: 0.5,
94+
FrequencyPenalty: 0.5,
95+
PresencePenalty: 0,
96+
}
97+
98+
cl := gpt3.ApiClient{}
99+
cl.Setup(gpt3.DAVINCI_CODEX)
100+
101+
response, err := cl.Call(&req)
102+
if err != nil {
103+
log.Fatalln(err)
104+
}
105+
106+
data := *response
107+
results, _ := data.(*gpt3.CompletionResponse)
108+
109+
for _,t := range results.Choices{
110+
fmt.Println(t)
111+
}
112+
}
113+
83114
func SearchCall(){
84115
req := gpt3.SearchRequest{
85116
Documents: []string{"White House","hospital","school","City"},

models.go

+30-18
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,28 @@ import (
66
)
77

88
const (
9-
DAVINCI = "davinci"
10-
CURIE = "curie"
11-
BABBAGE = "babbage"
12-
ADA = "ada"
13-
CURIE_INSTRUCT_BETA = "curie-instruct-beta"
14-
DAVINCI_INSTRUCT_BETA = "davinci-instruct-beta"
9+
DAVINCI = "davinci"
10+
CURIE = "curie"
11+
BABBAGE = "babbage"
12+
ADA = "ada"
13+
CURIE_INSTRUCT_BETA = "curie-instruct-beta"
14+
DAVINCI_INSTRUCT_BETA = "davinci-instruct-beta"
15+
16+
// CURSING_FILTER_V6 Content filters moderate output and input to the api to
17+
//avoid negative content generation
1518
CURSING_FILTER_V6 = "cursing-filter-v6"
1619
CONTENT_FILTER_DEV = "content-filter-dev"
1720
CONTENT_FILTER_ALPHA_C4 = "content-filter-alpha-c4"
21+
22+
// DAVINCI_CODEX Codex engines for code generation.
23+
//Davinci Codex is more capable, particularly for translating natural language to code
24+
DAVINCI_CODEX = "davinci-codex"
25+
26+
// CUSHMAN_CODEX Cushman Codex is almost as capable, but slightly faster.
27+
//This speed advantage may be preferable for real-time applications.
28+
CUSHMAN_CODEX = "cushman-codex"
1829
)
30+
1931
//
2032
const (
2133
getRequest = "GET"
@@ -111,9 +123,9 @@ func (r *FilesRequest) getRequestMeta(config RequestConfig) (string, string) {
111123
}
112124

113125
// File models
114-
type FileRequest struct{
115-
File os.File `json:"file"`
116-
Purpose string `json:"purpose"`
126+
type FileRequest struct {
127+
File os.File `json:"file"`
128+
Purpose string `json:"purpose"`
117129
}
118130

119131
type FileResponse struct {
@@ -172,15 +184,15 @@ func (r *CompletionResponse) GetBody() Response {
172184
}
173185

174186
//ContentFilterRequest Content filter model structures
175-
type ContentFilterRequest struct{
176-
Prompt string `json:"prompt"`
177-
MaxTokens int `json:"max_tokens"`
178-
Temperature float32 `json:"temperature,omitempty"`
179-
TopP float32 `json:"top_p,omitempty"`
180-
N int `json:"n,omitempty"`
181-
Logprobs int `json:"logprobs,omitempty"`
182-
PresencePenalty float32 `json:"presence_penalty,omitempty"`
183-
FrequencyPenalty float32 `json:"frequency_penalty,omitempty"`
187+
type ContentFilterRequest struct {
188+
Prompt string `json:"prompt"`
189+
MaxTokens int `json:"max_tokens"`
190+
Temperature float32 `json:"temperature,omitempty"`
191+
TopP float32 `json:"top_p,omitempty"`
192+
N int `json:"n,omitempty"`
193+
Logprobs int `json:"logprobs,omitempty"`
194+
PresencePenalty float32 `json:"presence_penalty,omitempty"`
195+
FrequencyPenalty float32 `json:"frequency_penalty,omitempty"`
184196
}
185197

186198
func (r *ContentFilterRequest) attachResponse() Response {

0 commit comments

Comments
 (0)