const API_BASE_URL = "https://api.cloudflare.com/client/v4/accounts/{myID}/ai/run/"
const API_AUTH_TOKEN = "{myTOKEN}" //process.env.API_AUTH_TOKEN;
const model = "@cf/meta/llama-2-7b-chat-int8"
const headers = {
'Authorization':`Bearer ${API_AUTH_TOKEN}`,
//'Content-type':'application/type',
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET,HEAD,POST,OPTIONS",
"Access-Control-Max-Age": "86400"
}
if (!API_BASE_URL || !API_AUTH_TOKEN) {
throw new Error('API credential is wrong or not configured from Github Action')
}
const inputs = [
{'role':'system', 'content':systemPrompt},
{'role':'user', 'content':userPrompt},
]
const payload = {
message: inputs
}
try {
console.log("Requesting to LLM...")
const response = await fetch(`${API_BASE_URL}${model}`, {
method: 'POST',
headers:headers,
body: JSON.stringify(payload),
mode: 'no-cors'
}
);
if (!response.ok) {
throw new Error (`Error request from LLM: ${response.status}`)
}
console.log("Requesting completed. Waiting for output...")
const output = await response.json(); // output
console.log(output)
}
catch (error) {
console.log("API Error", error);
throw error;
}
const API_BASE_URL = "https://api.cloudflare.com/client/v4/accounts/{myID}/ai/run/"
const API_AUTH_TOKEN = "{myTOKEN}" //process.env.API_AUTH_TOKEN;
const model = "@cf/meta/llama-2-7b-chat-int8"
const headers = {
'Authorization':`Bearer ${API_AUTH_TOKEN}`,
//'Content-type':'application/type',
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET,HEAD,POST,OPTIONS",
"Access-Control-Max-Age": "86400"
}
if (!API_BASE_URL || !API_AUTH_TOKEN) {
throw new Error('API credential is wrong or not configured from Github Action')
}
const inputs = [
{'role':'system', 'content':systemPrompt},
{'role':'user', 'content':userPrompt},
]
const payload = {
message: inputs
}
try {
console.log("Requesting to LLM...")
const response = await fetch(`${API_BASE_URL}${model}`, {
method: 'POST',
headers:headers,
body: JSON.stringify(payload),
mode: 'no-cors'
}
);
if (!response.ok) {
throw new Error (`Error request from LLM: ${response.status}`)
}
console.log("Requesting completed. Waiting for output...")
const output = await response.json(); // output
console.log(output)
}
catch (error) {
console.log("API Error", error);
throw error;
}