Response promise resolves before endpoint finished processing

I want to interact with OpenAI's API in my Next.js + tRPC app. It seems that the my frontend is not waiting for the backend to finish the API call before using the response, leading to an error because the response is still undefined. This is my endpoint:
export const llmRouter = createTRPCRouter({
searchBarInputToQuery: publicProcedure
.input(z.object({ text: z.string().nullish() }))
.mutation(async ({ input }) => {
if (!input.text) {
console.log("Empty input, returning empty response");
return { role: "", content: "" };
}
console.log("Calling OpenAI API with input:", input.text);
const response = await openai.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{ role: "system", content: SYSTEM_MESSAGE_ENGLISH },
{ role: "user", content: input.text },
],
});
const message = response.data.choices[0]!.message!;
return message;
}),
});
export const llmRouter = createTRPCRouter({
searchBarInputToQuery: publicProcedure
.input(z.object({ text: z.string().nullish() }))
.mutation(async ({ input }) => {
if (!input.text) {
console.log("Empty input, returning empty response");
return { role: "", content: "" };
}
console.log("Calling OpenAI API with input:", input.text);
const response = await openai.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{ role: "system", content: SYSTEM_MESSAGE_ENGLISH },
{ role: "user", content: input.text },
],
});
const message = response.data.choices[0]!.message!;
return message;
}),
});
I am calling from the frontend like this:
const [userInput, setUserInput] = useState("");
const searchBarInputToQuery = api.llm.searchBarInputToQuery.useMutation();

const handleSubmit = async () => {
props.setIsLoading(true);
props.setCurrentPage(Pages.RESULTS);
props.setMessages([{ role: "user", content: userInput }]);
const response = await searchBarInputToQuery.mutate({ text: userInput });
console.log(response);
props.setMessages([...props.messages, response.data!]);
props.setIsLoading(false);
};
const [userInput, setUserInput] = useState("");
const searchBarInputToQuery = api.llm.searchBarInputToQuery.useMutation();

const handleSubmit = async () => {
props.setIsLoading(true);
props.setCurrentPage(Pages.RESULTS);
props.setMessages([{ role: "user", content: userInput }]);
const response = await searchBarInputToQuery.mutate({ text: userInput });
console.log(response);
props.setMessages([...props.messages, response.data!]);
props.setIsLoading(false);
};
I am getting the error:
TypeError: Cannot read properties of undefined (reading 'data')
> 28 | props.setMessages([...props.messages, response.data!]);
| ^
...
TypeError: Cannot read properties of undefined (reading 'data')
> 28 | props.setMessages([...props.messages, response.data!]);
| ^
...
I have tried changing the endpoint to useQuery, which caused a different set of problems (calling the endpoint on each change to userInput or never when using an intermediate variable changed only in handleSubmit).
B
bencccc368d ago
Note: when logging the result of the OpenAI API call, the error appears before the result is logged correctly.