export default { async fetch(req, env) { const model = "@cf/meta/llama-2-7b-chat-int8"; c

export default {
async fetch(req, env) {
const model = "@cf/meta/llama-2-7b-chat-int8";

const stream = await env.DOCTALK_AI.run(model, {
prompt: "What is a tomato?",
stream: true,
});

return new Response(stream, {
headers: {
"Content-Type": "text/event-stream",
},
});
},
};
Hello bro this is correct code
Was this page helpful?