Initial commit
This commit is contained in:
32
shared/auth.js
Normal file
32
shared/auth.js
Normal file
@@ -0,0 +1,32 @@
|
||||
const path = require('path');
|
||||
const { authNodesDir } = require('./paths');
|
||||
const { readJsonIfExists, writeJson } = require('./fs');
|
||||
|
||||
function nodeTokenPath(nodeId) {
|
||||
return path.join(authNodesDir, `${nodeId}.json`);
|
||||
}
|
||||
|
||||
async function getNodeToken(nodeId) {
|
||||
return readJsonIfExists(nodeTokenPath(nodeId));
|
||||
}
|
||||
|
||||
async function verifyNodeToken(nodeId, token) {
|
||||
const record = await getNodeToken(nodeId);
|
||||
return Boolean(record && record.node_id === nodeId && record.token === token);
|
||||
}
|
||||
|
||||
async function saveNodeToken(nodeId, token) {
|
||||
await writeJson(nodeTokenPath(nodeId), {
|
||||
node_id: nodeId,
|
||||
token,
|
||||
schema_version: 'v1',
|
||||
updated_at: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getNodeToken,
|
||||
verifyNodeToken,
|
||||
saveNodeToken,
|
||||
};
|
||||
|
||||
38
shared/bootstrap.js
vendored
Normal file
38
shared/bootstrap.js
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
const {
|
||||
dataDir,
|
||||
resourcesDir,
|
||||
resourceTypeToDirName,
|
||||
workOrdersPendingDir,
|
||||
workOrdersRunningDir,
|
||||
workOrdersFinishedDir,
|
||||
eventsDir,
|
||||
logsDir,
|
||||
snapshotsSystemDir,
|
||||
snapshotsTenantsDir,
|
||||
idempotencyDir,
|
||||
authNodesDir,
|
||||
} = require('./paths');
|
||||
const { ensureDir } = require('./fs');
|
||||
|
||||
async function bootstrapDataLayout() {
|
||||
await ensureDir(dataDir);
|
||||
await ensureDir(resourcesDir);
|
||||
|
||||
for (const dirName of Object.values(resourceTypeToDirName)) {
|
||||
await ensureDir(`${resourcesDir}/${dirName}`);
|
||||
}
|
||||
|
||||
await ensureDir(workOrdersPendingDir);
|
||||
await ensureDir(workOrdersRunningDir);
|
||||
await ensureDir(workOrdersFinishedDir);
|
||||
await ensureDir(eventsDir);
|
||||
await ensureDir(logsDir);
|
||||
await ensureDir(snapshotsSystemDir);
|
||||
await ensureDir(snapshotsTenantsDir);
|
||||
await ensureDir(idempotencyDir);
|
||||
await ensureDir(authNodesDir);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
bootstrapDataLayout,
|
||||
};
|
||||
17
shared/context.js
Normal file
17
shared/context.js
Normal file
@@ -0,0 +1,17 @@
|
||||
const { randomUUID } = require('crypto');
|
||||
|
||||
function normalizeUuid(value) {
|
||||
return typeof value === 'string' && value.trim() ? value.trim() : null;
|
||||
}
|
||||
|
||||
function createContext(input) {
|
||||
return {
|
||||
request_id: normalizeUuid(input && input.request_id) || randomUUID(),
|
||||
correlation_id: normalizeUuid(input && input.correlation_id) || randomUUID(),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createContext,
|
||||
};
|
||||
|
||||
14
shared/errors.js
Normal file
14
shared/errors.js
Normal file
@@ -0,0 +1,14 @@
|
||||
class AppError extends Error {
|
||||
constructor(statusCode, code, message, details) {
|
||||
super(message);
|
||||
this.name = 'AppError';
|
||||
this.statusCode = statusCode;
|
||||
this.code = code;
|
||||
this.details = details || {};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AppError,
|
||||
};
|
||||
|
||||
42
shared/events.js
Normal file
42
shared/events.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const path = require('path');
|
||||
const { randomUUID } = require('crypto');
|
||||
const { eventsDir } = require('./paths');
|
||||
const { writeJson } = require('./fs');
|
||||
|
||||
function dayString(timestamp) {
|
||||
return timestamp.slice(0, 10);
|
||||
}
|
||||
|
||||
async function emitEvent({
|
||||
type,
|
||||
resource_type,
|
||||
resource_id,
|
||||
request_id,
|
||||
correlation_id,
|
||||
payload,
|
||||
}) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const event = {
|
||||
event_id: randomUUID(),
|
||||
schema_version: 'v1',
|
||||
type,
|
||||
resource_type,
|
||||
resource_id,
|
||||
timestamp,
|
||||
request_id: request_id || null,
|
||||
correlation_id: correlation_id || null,
|
||||
payload: payload || {},
|
||||
};
|
||||
|
||||
await writeJson(
|
||||
path.join(eventsDir, dayString(timestamp), `${timestamp.replace(/[:.]/g, '-')}-${event.event_id}.json`),
|
||||
event
|
||||
);
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
emitEvent,
|
||||
};
|
||||
|
||||
72
shared/fs.js
Normal file
72
shared/fs.js
Normal file
@@ -0,0 +1,72 @@
|
||||
const fs = require('fs/promises');
|
||||
const path = require('path');
|
||||
const { randomUUID } = require('crypto');
|
||||
|
||||
async function ensureDir(dirPath) {
|
||||
await fs.mkdir(dirPath, { recursive: true });
|
||||
}
|
||||
|
||||
async function readJson(filePath) {
|
||||
const raw = await fs.readFile(filePath, 'utf8');
|
||||
return JSON.parse(raw);
|
||||
}
|
||||
|
||||
async function readJsonIfExists(filePath) {
|
||||
if (!(await fileExists(filePath))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return readJson(filePath);
|
||||
}
|
||||
|
||||
async function writeJson(filePath, value) {
|
||||
await ensureDir(path.dirname(filePath));
|
||||
const tempPath = `${filePath}.${randomUUID()}.tmp`;
|
||||
await fs.writeFile(tempPath, JSON.stringify(value, null, 2));
|
||||
await fs.rename(tempPath, filePath);
|
||||
}
|
||||
|
||||
async function fileExists(filePath) {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function listJsonFiles(dirPath) {
|
||||
await ensureDir(dirPath);
|
||||
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
||||
|
||||
return entries
|
||||
.filter((entry) => entry.isFile() && entry.name.endsWith('.json'))
|
||||
.map((entry) => path.join(dirPath, entry.name))
|
||||
.sort();
|
||||
}
|
||||
|
||||
async function appendJsonLine(filePath, value) {
|
||||
await ensureDir(path.dirname(filePath));
|
||||
await fs.appendFile(filePath, `${JSON.stringify(value)}\n`);
|
||||
}
|
||||
|
||||
async function moveFile(fromPath, toPath) {
|
||||
await ensureDir(path.dirname(toPath));
|
||||
await fs.rename(fromPath, toPath);
|
||||
}
|
||||
|
||||
async function removeFile(filePath) {
|
||||
await fs.rm(filePath, { force: true });
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureDir,
|
||||
readJson,
|
||||
readJsonIfExists,
|
||||
writeJson,
|
||||
fileExists,
|
||||
listJsonFiles,
|
||||
appendJsonLine,
|
||||
moveFile,
|
||||
removeFile,
|
||||
};
|
||||
29
shared/idempotency.js
Normal file
29
shared/idempotency.js
Normal file
@@ -0,0 +1,29 @@
|
||||
const path = require('path');
|
||||
const { createHash } = require('crypto');
|
||||
const { idempotencyDir } = require('./paths');
|
||||
const { readJsonIfExists, writeJson } = require('./fs');
|
||||
|
||||
function recordPath(scope, key) {
|
||||
const hash = createHash('sha256').update(`${scope}:${key}`).digest('hex');
|
||||
return path.join(idempotencyDir, scope, `${hash}.json`);
|
||||
}
|
||||
|
||||
async function getIdempotencyRecord(scope, key) {
|
||||
return readJsonIfExists(recordPath(scope, key));
|
||||
}
|
||||
|
||||
async function saveIdempotencyRecord(scope, key, value) {
|
||||
await writeJson(recordPath(scope, key), {
|
||||
schema_version: 'v1',
|
||||
scope,
|
||||
key,
|
||||
value,
|
||||
created_at: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getIdempotencyRecord,
|
||||
saveIdempotencyRecord,
|
||||
};
|
||||
|
||||
71
shared/logs.js
Normal file
71
shared/logs.js
Normal file
@@ -0,0 +1,71 @@
|
||||
const path = require('path');
|
||||
const { logsDir } = require('./paths');
|
||||
const { appendJsonLine } = require('./fs');
|
||||
|
||||
const SECRET_PATTERNS = ['token', 'secret', 'password', 'authorization'];
|
||||
|
||||
function sanitizeValue(value) {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(sanitizeValue);
|
||||
}
|
||||
|
||||
if (!value || typeof value !== 'object') {
|
||||
return value;
|
||||
}
|
||||
|
||||
return Object.fromEntries(
|
||||
Object.entries(value).map(([key, entryValue]) => {
|
||||
const lower = key.toLowerCase();
|
||||
|
||||
if (SECRET_PATTERNS.some((pattern) => lower.includes(pattern))) {
|
||||
return [key, '[redacted]'];
|
||||
}
|
||||
|
||||
return [key, sanitizeValue(entryValue)];
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function dayString(timestamp) {
|
||||
return timestamp.slice(0, 10);
|
||||
}
|
||||
|
||||
async function log(entry) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const logEntry = {
|
||||
timestamp,
|
||||
level: entry.level || 'info',
|
||||
service: entry.service,
|
||||
node_id: entry.node_id || null,
|
||||
tenant_id: entry.tenant_id || null,
|
||||
request_id: entry.request_id || null,
|
||||
correlation_id: entry.correlation_id || null,
|
||||
action: entry.action || 'unknown',
|
||||
result: entry.result || 'unknown',
|
||||
metadata: sanitizeValue(entry.metadata || {}),
|
||||
};
|
||||
|
||||
await appendJsonLine(path.join(logsDir, dayString(timestamp), `${logEntry.service}.ndjson`), logEntry);
|
||||
process.stdout.write(`${JSON.stringify(logEntry)}\n`);
|
||||
return logEntry;
|
||||
}
|
||||
|
||||
function createLogger(baseFields) {
|
||||
return {
|
||||
info(action, result, metadata) {
|
||||
return log({ ...baseFields, level: 'info', action, result, metadata });
|
||||
},
|
||||
warn(action, result, metadata) {
|
||||
return log({ ...baseFields, level: 'warn', action, result, metadata });
|
||||
},
|
||||
error(action, result, metadata) {
|
||||
return log({ ...baseFields, level: 'error', action, result, metadata });
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
log,
|
||||
createLogger,
|
||||
};
|
||||
|
||||
43
shared/paths.js
Normal file
43
shared/paths.js
Normal file
@@ -0,0 +1,43 @@
|
||||
const path = require('path');
|
||||
|
||||
const rootDir = path.resolve(__dirname, '..');
|
||||
const dataDir = process.env.DATA_DIR
|
||||
? path.resolve(process.env.DATA_DIR)
|
||||
: path.join(rootDir, 'data');
|
||||
|
||||
const resourceTypeToDirName = {
|
||||
tenant: 'tenants',
|
||||
node: 'nodes',
|
||||
service: 'services',
|
||||
deployment: 'deployments',
|
||||
resource_limits: 'resource-limits',
|
||||
network: 'networks',
|
||||
volume: 'volumes',
|
||||
};
|
||||
|
||||
function getResourceDir(resourceType) {
|
||||
const dirName = resourceTypeToDirName[resourceType];
|
||||
|
||||
if (!dirName) {
|
||||
throw new Error(`Unknown resource type: ${resourceType}`);
|
||||
}
|
||||
|
||||
return path.join(dataDir, 'resources', dirName);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
rootDir,
|
||||
dataDir,
|
||||
resourceTypeToDirName,
|
||||
getResourceDir,
|
||||
resourcesDir: path.join(dataDir, 'resources'),
|
||||
workOrdersPendingDir: path.join(dataDir, 'work-orders', 'pending'),
|
||||
workOrdersRunningDir: path.join(dataDir, 'work-orders', 'running'),
|
||||
workOrdersFinishedDir: path.join(dataDir, 'work-orders', 'finished'),
|
||||
eventsDir: path.join(dataDir, 'events'),
|
||||
logsDir: path.join(dataDir, 'logs'),
|
||||
snapshotsSystemDir: path.join(dataDir, 'snapshots', 'system'),
|
||||
snapshotsTenantsDir: path.join(dataDir, 'snapshots', 'tenants'),
|
||||
idempotencyDir: path.join(dataDir, 'idempotency'),
|
||||
authNodesDir: path.join(dataDir, 'auth', 'nodes'),
|
||||
};
|
||||
94
shared/resources.js
Normal file
94
shared/resources.js
Normal file
@@ -0,0 +1,94 @@
|
||||
const path = require('path');
|
||||
const { getResourceDir, resourceTypeToDirName } = require('./paths');
|
||||
const { ensureDir, listJsonFiles, readJsonIfExists, writeJson } = require('./fs');
|
||||
|
||||
function resourcePath(resourceType, resourceId) {
|
||||
return path.join(getResourceDir(resourceType), `${resourceId}.json`);
|
||||
}
|
||||
|
||||
function nowIso() {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
|
||||
function createResourceDocument({
|
||||
id,
|
||||
resource_type,
|
||||
desired_state,
|
||||
current_state,
|
||||
last_applied_state,
|
||||
metadata,
|
||||
created_at,
|
||||
updated_at,
|
||||
}) {
|
||||
const timestamp = nowIso();
|
||||
|
||||
return {
|
||||
id,
|
||||
resource_type,
|
||||
schema_version: 'v1',
|
||||
desired_state: desired_state || {},
|
||||
current_state: current_state || {},
|
||||
last_applied_state: last_applied_state || {},
|
||||
metadata: metadata || {},
|
||||
created_at: created_at || timestamp,
|
||||
updated_at: updated_at || timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
async function getResource(resourceType, resourceId) {
|
||||
return readJsonIfExists(resourcePath(resourceType, resourceId));
|
||||
}
|
||||
|
||||
async function saveResource(document) {
|
||||
const existing = await getResource(document.resource_type, document.id);
|
||||
const next = createResourceDocument({
|
||||
...document,
|
||||
created_at: existing ? existing.created_at : document.created_at,
|
||||
updated_at: nowIso(),
|
||||
});
|
||||
|
||||
await writeJson(resourcePath(next.resource_type, next.id), next);
|
||||
return next;
|
||||
}
|
||||
|
||||
async function patchResourceState(resourceType, resourceId, patch) {
|
||||
const existing = await getResource(resourceType, resourceId);
|
||||
|
||||
if (!existing) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return saveResource({
|
||||
...existing,
|
||||
desired_state: patch.desired_state || existing.desired_state,
|
||||
current_state: patch.current_state || existing.current_state,
|
||||
last_applied_state: patch.last_applied_state || existing.last_applied_state,
|
||||
metadata: patch.metadata || existing.metadata,
|
||||
});
|
||||
}
|
||||
|
||||
async function listResources(resourceType) {
|
||||
const files = await listJsonFiles(getResourceDir(resourceType));
|
||||
return Promise.all(files.map((filePath) => readJsonIfExists(filePath)));
|
||||
}
|
||||
|
||||
async function listAllResources() {
|
||||
const result = {};
|
||||
|
||||
for (const resourceType of Object.keys(resourceTypeToDirName)) {
|
||||
await ensureDir(getResourceDir(resourceType));
|
||||
result[resourceType] = await listResources(resourceType);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createResourceDocument,
|
||||
getResource,
|
||||
saveResource,
|
||||
patchResourceState,
|
||||
listResources,
|
||||
listAllResources,
|
||||
};
|
||||
|
||||
80
shared/snapshots.js
Normal file
80
shared/snapshots.js
Normal file
@@ -0,0 +1,80 @@
|
||||
const path = require('path');
|
||||
const { randomUUID } = require('crypto');
|
||||
const { snapshotsSystemDir, snapshotsTenantsDir } = require('./paths');
|
||||
const { readJsonIfExists, writeJson } = require('./fs');
|
||||
const { listAllResources } = require('./resources');
|
||||
|
||||
function buildResourceDiff(resource) {
|
||||
return {
|
||||
in_sync: JSON.stringify(resource.desired_state || {}) === JSON.stringify(resource.current_state || {}),
|
||||
desired_state: resource.desired_state || {},
|
||||
current_state: resource.current_state || {},
|
||||
};
|
||||
}
|
||||
|
||||
function filterTenantResources(resources, tenantId) {
|
||||
const filtered = {};
|
||||
|
||||
for (const [resourceType, items] of Object.entries(resources)) {
|
||||
filtered[resourceType] = items.filter((item) => {
|
||||
if (!item) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (resourceType === 'tenant') {
|
||||
return item.id === tenantId;
|
||||
}
|
||||
|
||||
return item.desired_state && item.desired_state.tenant_id === tenantId;
|
||||
});
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
|
||||
async function createSnapshot(scope, context) {
|
||||
const resources = await listAllResources();
|
||||
const selected = scope === 'system'
|
||||
? resources
|
||||
: filterTenantResources(resources, scope.replace('tenant:', ''));
|
||||
|
||||
const snapshot = {
|
||||
snapshot_id: randomUUID(),
|
||||
schema_version: 'v1',
|
||||
scope,
|
||||
created_at: new Date().toISOString(),
|
||||
request_id: context.request_id,
|
||||
correlation_id: context.correlation_id,
|
||||
resources: selected,
|
||||
diffs: Object.fromEntries(
|
||||
Object.entries(selected).map(([resourceType, items]) => [
|
||||
resourceType,
|
||||
items.map((item) => ({
|
||||
resource_id: item.id,
|
||||
diff: buildResourceDiff(item),
|
||||
})),
|
||||
])
|
||||
),
|
||||
};
|
||||
|
||||
const filePath = scope === 'system'
|
||||
? path.join(snapshotsSystemDir, 'latest.json')
|
||||
: path.join(snapshotsTenantsDir, `${scope.replace('tenant:', '')}.json`);
|
||||
|
||||
await writeJson(filePath, snapshot);
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
async function getLatestSystemSnapshot() {
|
||||
return readJsonIfExists(path.join(snapshotsSystemDir, 'latest.json'));
|
||||
}
|
||||
|
||||
async function getLatestTenantSnapshot(tenantId) {
|
||||
return readJsonIfExists(path.join(snapshotsTenantsDir, `${tenantId}.json`));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createSnapshot,
|
||||
getLatestSystemSnapshot,
|
||||
getLatestTenantSnapshot,
|
||||
};
|
||||
167
shared/work-orders.js
Normal file
167
shared/work-orders.js
Normal file
@@ -0,0 +1,167 @@
|
||||
const fs = require('fs/promises');
|
||||
const path = require('path');
|
||||
const { randomUUID } = require('crypto');
|
||||
const { workOrdersPendingDir, workOrdersRunningDir, workOrdersFinishedDir } = require('./paths');
|
||||
const { ensureDir, fileExists, listJsonFiles, readJson, readJsonIfExists, writeJson } = require('./fs');
|
||||
|
||||
function pendingPath(workOrderId) {
|
||||
return path.join(workOrdersPendingDir, `${workOrderId}.json`);
|
||||
}
|
||||
|
||||
function runningPath(workOrderId) {
|
||||
return path.join(workOrdersRunningDir, `${workOrderId}.json`);
|
||||
}
|
||||
|
||||
function finishedPath(workOrderId) {
|
||||
return path.join(workOrdersFinishedDir, `${workOrderId}.json`);
|
||||
}
|
||||
|
||||
function lockPath(workOrderId) {
|
||||
return path.join(workOrdersPendingDir, `${workOrderId}.lock`);
|
||||
}
|
||||
|
||||
async function withLock(workOrderId, callback) {
|
||||
const targetPath = lockPath(workOrderId);
|
||||
await ensureDir(workOrdersPendingDir);
|
||||
|
||||
try {
|
||||
await fs.mkdir(targetPath);
|
||||
} catch (error) {
|
||||
if (error.code === 'EEXIST') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
return await callback();
|
||||
} finally {
|
||||
await fs.rm(targetPath, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
async function createWorkOrder({ type, target, desired_state, request_id, correlation_id, metadata }) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const workOrder = {
|
||||
id: randomUUID(),
|
||||
resource_type: 'work_order',
|
||||
schema_version: 'v1',
|
||||
type,
|
||||
target,
|
||||
desired_state: desired_state || {},
|
||||
status: 'pending',
|
||||
result: null,
|
||||
request_id: request_id || null,
|
||||
correlation_id: correlation_id || null,
|
||||
created_at: timestamp,
|
||||
started_at: null,
|
||||
finished_at: null,
|
||||
metadata: metadata || {},
|
||||
};
|
||||
|
||||
await writeJson(pendingPath(workOrder.id), workOrder);
|
||||
return workOrder;
|
||||
}
|
||||
|
||||
async function getWorkOrder(workOrderId) {
|
||||
return (
|
||||
(await readJsonIfExists(pendingPath(workOrderId))) ||
|
||||
(await readJsonIfExists(runningPath(workOrderId))) ||
|
||||
(await readJsonIfExists(finishedPath(workOrderId)))
|
||||
);
|
||||
}
|
||||
|
||||
async function listWorkOrders() {
|
||||
const files = [
|
||||
...(await listJsonFiles(workOrdersPendingDir)),
|
||||
...(await listJsonFiles(workOrdersRunningDir)),
|
||||
...(await listJsonFiles(workOrdersFinishedDir)),
|
||||
];
|
||||
|
||||
return Promise.all(files.map((filePath) => readJson(filePath)));
|
||||
}
|
||||
|
||||
async function claimNextWorkOrder(nodeId, context) {
|
||||
const files = await listJsonFiles(workOrdersPendingDir);
|
||||
|
||||
for (const filePath of files) {
|
||||
const current = await readJson(filePath);
|
||||
|
||||
if (!current.target || current.target.node_id !== nodeId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const claimed = await withLock(current.id, async () => {
|
||||
if (!(await fileExists(filePath))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const latest = await readJson(filePath);
|
||||
|
||||
if (!latest.target || latest.target.node_id !== nodeId || latest.status !== 'pending') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const running = {
|
||||
...latest,
|
||||
status: 'running',
|
||||
started_at: new Date().toISOString(),
|
||||
request_id: context.request_id,
|
||||
correlation_id: context.correlation_id,
|
||||
};
|
||||
|
||||
await writeJson(runningPath(latest.id), running);
|
||||
await fs.rm(filePath, { force: true });
|
||||
return running;
|
||||
});
|
||||
|
||||
if (claimed) {
|
||||
return claimed;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function finishWorkOrder(workOrderId, status, result) {
|
||||
const existingFinished = await readJsonIfExists(finishedPath(workOrderId));
|
||||
|
||||
if (existingFinished) {
|
||||
return existingFinished;
|
||||
}
|
||||
|
||||
const activePath = runningPath(workOrderId);
|
||||
|
||||
if (!(await fileExists(activePath))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return withLock(workOrderId, async () => {
|
||||
const running = await readJsonIfExists(activePath);
|
||||
|
||||
if (!running) {
|
||||
return readJsonIfExists(finishedPath(workOrderId));
|
||||
}
|
||||
|
||||
const finished = {
|
||||
...running,
|
||||
status,
|
||||
result,
|
||||
finished_at: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeJson(finishedPath(workOrderId), finished);
|
||||
await fs.rm(activePath, { force: true });
|
||||
return finished;
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createWorkOrder,
|
||||
getWorkOrder,
|
||||
listWorkOrders,
|
||||
claimNextWorkOrder,
|
||||
finishWorkOrder,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user