we intend to make queues available under the free plan (with limits of course) in the future, althou
we intend to make queues available under the free plan (with limits of course) in the future, although we don't have a timeline yet






leading option for it.caches.default but the requests seemd to hit before the cache write and subsequent read had updated await env.QUEUE.send(message)
cloudflare:test module.
ctx.waitUntil(promisse) followed by a msg.ack{} it does not receive other batches until all messages on this batch finishes processing.ctx.waitUntilpromise.allSettled, egleadingcaches.default await env.QUEUE.send(message)
cloudflare:testctx.waitUntil(promisse)msg.ack{}promise.allSettledasync queue(batch, env, ctx) {
for (const message of batch.messages) {
try {
ctx.waitUntil(processBody(message.body))
message.ack()
} catch (e) {
message.retry()
}
}
}async queue(batch, env, ctx) {
for (const message of batch.messages) {
try {
//Lots of seconds because call AI APIs and other stuff
await processBody(message.body)
message.ack()
} catch (e) {
message.retry()
}
}
}async queue(batch, env, ctx) {
await Promise.allSettled(
batch.messages.map((message) => processBody(message.body)
.then(() => {
// maybe `res?.ok` if its a fetch or something
message.ack()
})
.catch(() => {
message.retry()
})
)).finally(() => {
console.log('batch completed')
})
}[[queues.consumers]]
queue = "my-webhook"
max_batch_size = 100
max_batch_timeout = 3 #seconds
max_retries = 3
retry_delay = 30 #seconds
dead_letter_queue = "my-webhook-dlq"
max_concurrency = 20