Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
kardolus
GitHub Repository: kardolus/chatgpt-cli
Path: blob/main/api/http/http_test.go
2649 views
1
package http_test
2
3
import (
4
"bytes"
5
"strings"
6
"testing"
7
8
"github.com/kardolus/chatgpt-cli/api/http"
9
. "github.com/onsi/gomega"
10
"github.com/sclevine/spec"
11
"github.com/sclevine/spec/report"
12
)
13
14
func TestUnitHTTP(t *testing.T) {
15
spec.Run(t, "Testing the HTTP Client", testHTTP, spec.Report(report.Terminal{}))
16
}
17
18
func testHTTP(t *testing.T, when spec.G, it spec.S) {
19
var subject http.RestCaller
20
21
const responsesPath = "/v1/responses" // use http.ResponsesPath if you export it
22
23
it.Before(func() {
24
RegisterTestingT(t)
25
subject = http.RestCaller{}
26
})
27
28
when("ProcessResponse()", func() {
29
it("parses a legacy stream as expected (any endpoint)", func() {
30
buf := &bytes.Buffer{}
31
// legacy works via both branches; use a non-responses endpoint to
32
// ensure we exercise the original/legacy code path.
33
subject.ProcessResponse(strings.NewReader(legacyStream), buf, "/v1/chat/completions")
34
output := buf.String()
35
Expect(output).To(Equal("a b c\n"))
36
})
37
38
it("parses a GPT-5 SSE stream when endpoint is /v1/responses", func() {
39
buf := &bytes.Buffer{}
40
subject.ProcessResponse(strings.NewReader(gpt5Stream), buf, responsesPath)
41
output := buf.String()
42
// deltas are "a", " b", " c" then response.completed -> newline
43
Expect(output).To(Equal("a b c\n"))
44
})
45
46
it("throws an error when the legacy json is invalid", func() {
47
input := `data: {"invalid":"json"` // missing closing brace
48
expectedOutput := "Error: unexpected end of JSON input\n"
49
50
var buf bytes.Buffer
51
subject.ProcessResponse(strings.NewReader(input), &buf, "/v1/chat/completions")
52
output := buf.String()
53
Expect(output).To(Equal(expectedOutput))
54
})
55
})
56
}
57
58
const legacyStream = `
59
data: {"id":"id-1","object":"chat.completion.chunk","created":1,"model":"model-1","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
60
61
data: {"id":"id-2","object":"chat.completion.chunk","created":2,"model":"model-1","choices":[{"delta":{"content":"a"},"index":0,"finish_reason":null}]}
62
63
data: {"id":"id-3","object":"chat.completion.chunk","created":3,"model":"model-1","choices":[{"delta":{"content":" b"},"index":0,"finish_reason":null}]}
64
65
data: {"id":"id-4","object":"chat.completion.chunk","created":4,"model":"model-1","choices":[{"delta":{"content":" c"},"index":0,"finish_reason":null}]}
66
67
data: {"id":"id-5","object":"chat.completion.chunk","created":5,"model":"model-1","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
68
69
data: [DONE]
70
`
71
72
// Minimal GPT-5 SSE that your new parser should handle
73
const gpt5Stream = `
74
event: response.created
75
data: {"type":"response.created"}
76
77
event: response.output_item.added
78
data: {"type":"response.output_item.added","output_index":0,"item":{"id":"msg_1","type":"message","status":"in_progress","content":[],"role":"assistant"}}
79
80
event: response.content_part.added
81
data: {"type":"response.content_part.added","item_id":"msg_1","output_index":0,"content_index":0,"part":{"type":"output_text","annotations":[],"logprobs":[],"text":""}}
82
83
event: response.output_text.delta
84
data: {"type":"response.output_text.delta","item_id":"msg_1","output_index":0,"content_index":0,"delta":"a"}
85
86
event: response.output_text.delta
87
data: {"type":"response.output_text.delta","item_id":"msg_1","output_index":0,"content_index":0,"delta":" b"}
88
89
event: response.output_text.delta
90
data: {"type":"response.output_text.delta","item_id":"msg_1","output_index":0,"content_index":0,"delta":" c"}
91
92
event: response.completed
93
data: {"type":"response.completed","response":{"status":"completed"}}
94
`
95
96