add queue support for archive delivery
This commit is contained in:
182
src/common/queue.js
Normal file
182
src/common/queue.js
Normal file
@@ -0,0 +1,182 @@
|
||||
import { getPool } from "../db/index.js";
|
||||
import { sleep } from "./sleep.js";
|
||||
import axios from 'axios';
|
||||
|
||||
const backOffUrls = {};
|
||||
|
||||
async function getQueue(client, type, batchSize) {
|
||||
const now = new Date();
|
||||
const sql = `
|
||||
SELECT
|
||||
q.id,
|
||||
qu.id AS queue_url_id,
|
||||
qu.value AS url,
|
||||
q.headers,
|
||||
q.data,
|
||||
q.run_count
|
||||
FROM queue q
|
||||
INNER JOIN queue_type qt ON q.queue_type_id = qt.id
|
||||
INNER JOIN queue_url qu ON q.queue_url_id = qu.id
|
||||
WHERE qt.value = $1
|
||||
AND qu.next_run_ts <= $2
|
||||
ORDER BY qu.next_run_ts ASC, q.id ASC
|
||||
LIMIT $3;
|
||||
`;
|
||||
const data = [
|
||||
type,
|
||||
now,
|
||||
batchSize,
|
||||
];
|
||||
const res = await client.query(sql, data);
|
||||
return res.rows;
|
||||
}
|
||||
|
||||
export async function putQueue(client, type, url, headers, data) {
|
||||
await client.query(`
|
||||
INSERT INTO queue_url (
|
||||
value
|
||||
) VALUES (
|
||||
$1
|
||||
) ON CONFLICT DO NOTHING;
|
||||
`, [
|
||||
url,
|
||||
]);
|
||||
await client.query(`
|
||||
INSERT INTO queue (
|
||||
queue_type_id,
|
||||
queue_url_id,
|
||||
headers,
|
||||
data
|
||||
) VALUES (
|
||||
(
|
||||
SELECT id
|
||||
FROM queue_type
|
||||
WHERE value = $1
|
||||
),
|
||||
(
|
||||
SELECT id
|
||||
FROM queue_url
|
||||
WHERE value = $2
|
||||
),
|
||||
$3,
|
||||
$4
|
||||
);
|
||||
`, [
|
||||
type,
|
||||
url,
|
||||
headers,
|
||||
data
|
||||
]);
|
||||
}
|
||||
|
||||
async function updateQueue(client, item, lastFail) {
|
||||
// Queue back off
|
||||
// ==============
|
||||
// - Start with 30 seconds
|
||||
// - Double for every run
|
||||
// - Max 120 minutes
|
||||
const delay = 30;
|
||||
const maxDelay = 120 * 60;
|
||||
const delaySeconds = Math.min(delay * Math.pow(2, item.run_count - 1), maxDelay);
|
||||
const now = new Date();
|
||||
const nextRunTs = new Date(now.getTime() + delaySeconds * 1000);
|
||||
if (backOffUrls[item.queue_url_id]) {
|
||||
if (backOffUrls[item.queue_url_id] < nextRunTs) {
|
||||
backOffUrls[item.queue_url_id] = nextRunTs;
|
||||
} else {
|
||||
nextRunTs = backOffUrls[item.queue_url_id];
|
||||
}
|
||||
} else {
|
||||
backOffUrls[item.queue_url_id] = nextRunTs;
|
||||
}
|
||||
await client.query(`
|
||||
UPDATE queue_url SET
|
||||
next_run_ts = $1,
|
||||
updated_ts = $2
|
||||
WHERE id = $3
|
||||
`, [
|
||||
backOffUrls[item.queue_url_id].toISOString(),
|
||||
now.toISOString(),
|
||||
item.id,
|
||||
]);
|
||||
await client.query(`
|
||||
UPDATE queue SET
|
||||
run_count = $1,
|
||||
last_fail = $2,
|
||||
updated_ts = $3
|
||||
WHERE id = $4
|
||||
`, [
|
||||
parseInt(item.run_count) + 1,
|
||||
lastFail,
|
||||
now.toISOString(),
|
||||
item.id,
|
||||
]);
|
||||
}
|
||||
|
||||
async function deleteQueue(client, id) {
|
||||
await client.query(`
|
||||
DELETE FROM queue
|
||||
WHERE id = $1
|
||||
`, [
|
||||
id,
|
||||
]);
|
||||
}
|
||||
|
||||
async function processBatch(client, type) {
|
||||
const batchSize = 100;
|
||||
const queueList = await getQueue(client, type, batchSize);
|
||||
const now = new Date();
|
||||
for await (const item of queueList) {
|
||||
if (backOffUrls[item.queue_url_id] && backOffUrls[item.queue_url_id] > now)
|
||||
continue;
|
||||
try {
|
||||
let entry = 'unknown';
|
||||
if (item.data.audit) {
|
||||
const uuid = item.data.audit.eventId ?? item.data.audit.agreementId;
|
||||
const auditType = item.data.audit.eventId ? 'event' : 'agreement';
|
||||
entry = `${auditType}:${uuid}`
|
||||
}
|
||||
console.log(`${type.toUpperCase()} - ${entry} (${item.run_count})`);
|
||||
let result;
|
||||
try {
|
||||
result = await axios.post(
|
||||
item.url,
|
||||
item.data,
|
||||
{
|
||||
headers: item.headers,
|
||||
}
|
||||
)
|
||||
} catch (e) {
|
||||
result = e
|
||||
}
|
||||
if (result?.status === 200) {
|
||||
await deleteQueue(client, item.id);
|
||||
} else {
|
||||
let lastFail = `${result.status}`;
|
||||
if (result.response.data.error) {
|
||||
lastFail += ` - ${result.response.data.error}`
|
||||
}
|
||||
await updateQueue(client, item, lastFail);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
return queueList.length;
|
||||
}
|
||||
|
||||
async function watchQueue(client, type) {
|
||||
const repeat = 30 * 1000; // seconds
|
||||
while (true) {
|
||||
const count = await processBatch(client, type);
|
||||
if (count === 0) {
|
||||
await sleep(repeat);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function watchAudits() {
|
||||
const client = await getPool();
|
||||
await watchQueue(client, 'audit');
|
||||
await client.release();
|
||||
}
|
||||
Reference in New Issue
Block a user