export default {
async fetch(request, env, ctx) {
const openai = new OpenAI({
apiKey: "##-#######"
})
const stream = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{ role: "user", content: "Tell me a story using 900 chars." }],
stream: true
})
let { readable, writable } = new TransformStream()
let writer = writable.getWriter()
const textEncoder = new TextEncoder()
for await (const part of stream) {
console.log(part.choices[0]?.delta?.content || "")
writer.write(textEncoder.encode(part.choices[0]?.delta?.content || ""))
}
writer.close()
return new Response(readable, {
status: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept',
'Content-Type': 'text/plain; charset=utf-8'
}
})
}
}
export default {
async fetch(request, env, ctx) {
const openai = new OpenAI({
apiKey: "##-#######"
})
const stream = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{ role: "user", content: "Tell me a story using 900 chars." }],
stream: true
})
let { readable, writable } = new TransformStream()
let writer = writable.getWriter()
const textEncoder = new TextEncoder()
for await (const part of stream) {
console.log(part.choices[0]?.delta?.content || "")
writer.write(textEncoder.encode(part.choices[0]?.delta?.content || ""))
}
writer.close()
return new Response(readable, {
status: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept',
'Content-Type': 'text/plain; charset=utf-8'
}
})
}
}