ai queue
This commit is contained in:
92
src/utils/rateLimitedQueue.ts
Normal file
92
src/utils/rateLimitedQueue.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
// src/utils/rateLimitedQueue.ts
|
||||
// ------------------------------------------------------------
|
||||
// A tiny FIFO, single‑instance queue that spaces API requests by
|
||||
// a configurable delay. Import `enqueueApiCall()` wherever you
|
||||
// call the AI API and the queue will make sure calls are sent
|
||||
// one after another with the defined pause in‑between.
|
||||
// ------------------------------------------------------------
|
||||
|
||||
import dotenv from "dotenv";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
/**
|
||||
* Delay (in **milliseconds**) between two consecutive API calls.
|
||||
*
|
||||
* Configure it in your `.env` file, e.g.
|
||||
* AI_RATE_LIMIT_DELAY_MS=2000
|
||||
* Defaults to **1000 ms** (≈ 1 request / second) when not set or invalid.
|
||||
*/
|
||||
const RATE_LIMIT_DELAY_MS = Number.parseInt(process.env.AI_RATE_LIMIT_DELAY_MS ?? "1000", 10) || 1000;
|
||||
|
||||
/**
|
||||
* Internal task type. Every task returns a Promise so callers get the
|
||||
* real API response transparently.
|
||||
*/
|
||||
export type Task<T = unknown> = () => Promise<T>;
|
||||
|
||||
class RateLimitedQueue {
|
||||
private queue: Task[] = [];
|
||||
private processing = false;
|
||||
private delayMs = RATE_LIMIT_DELAY_MS;
|
||||
|
||||
/**
|
||||
* Schedule a task. Returns a Promise that resolves/rejects with the
|
||||
* task result once the queue reaches it.
|
||||
*/
|
||||
add<T>(task: Task<T>): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
this.queue.push(async () => {
|
||||
try {
|
||||
const result = await task();
|
||||
resolve(result);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
this.process();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the delay at runtime – e.g. if you reload env vars without
|
||||
* restarting the server.
|
||||
*/
|
||||
setDelay(ms: number): void {
|
||||
if (!Number.isFinite(ms) || ms < 0) return;
|
||||
this.delayMs = ms;
|
||||
}
|
||||
|
||||
// ---------------------------------------
|
||||
// ️🌐 Internal helpers
|
||||
// ---------------------------------------
|
||||
private async process(): Promise<void> {
|
||||
if (this.processing) return;
|
||||
this.processing = true;
|
||||
|
||||
while (this.queue.length > 0) {
|
||||
const next = this.queue.shift();
|
||||
if (!next) continue;
|
||||
await next();
|
||||
// Wait before the next one
|
||||
await new Promise((r) => setTimeout(r, this.delayMs));
|
||||
}
|
||||
|
||||
this.processing = false;
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------
|
||||
// Export a **singleton** instance so every import shares the
|
||||
// same queue. That way the rate‑limit is enforced globally.
|
||||
// ------------------------------------------------------------
|
||||
const queue = new RateLimitedQueue();
|
||||
|
||||
/**
|
||||
* Helper for convenience: `enqueueApiCall(() => fetch(...))`.
|
||||
*/
|
||||
export function enqueueApiCall<T>(task: Task<T>): Promise<T> {
|
||||
return queue.add(task);
|
||||
}
|
||||
|
||||
export default queue;
|
||||
Reference in New Issue
Block a user