This commit is contained in:
2026-05-10 19:12:02 +02:00
commit 3398982ca8
19 changed files with 2152 additions and 0 deletions

7
.dockerignore Normal file
View File

@@ -0,0 +1,7 @@
node_modules
npm-debug.log*
.env
.git
.idea
coverage
dist

5
.env.example Normal file
View File

@@ -0,0 +1,5 @@
PORT=3030
API_HOST_PORT=8030
MONGODB_URI=mongodb://root:change-me@37.60.245.70:27017
MONGODB_DB=duneawa
MAX_PAGES=

9
.gitignore vendored Normal file
View File

@@ -0,0 +1,9 @@
node_modules/
npm-debug.log*
.env
.env.*
!.env.example
coverage/
dist/
.DS_Store
.idea

22
Dockerfile Normal file
View File

@@ -0,0 +1,22 @@
FROM cgr.dev/chainguard/node:latest-dev AS dependencies
WORKDIR /app
ENV NODE_ENV=production
COPY package*.json ./
RUN npm ci --omit=dev
FROM cgr.dev/chainguard/node:latest AS runtime
WORKDIR /app
ENV NODE_ENV=production
COPY --from=dependencies /app/node_modules ./node_modules
COPY src ./src
COPY scripts ./scripts
EXPOSE 3030
CMD ["src/server.js"]

54
README.md Normal file
View File

@@ -0,0 +1,54 @@
# Dune API
Node.js API for Dune: Awakening Questlog data. It imports items, skills, recipes, placeables, and NPCs in English and German into MongoDB, then exposes them through REST endpoints and Swagger UI.
## Quick Start
```powershell
npm install
npm run import:smoke
npm start
```
Open Swagger UI at `http://localhost:3030/docs`.
## Docker
```powershell
docker compose up --build
```
The API listens on `http://localhost:8030` by default when run through Docker Compose. Set `API_HOST_PORT=3031` if your machine needs the alternate host port.
## Import All Data
```powershell
npm run import
```
You can also call the script directly with filters:
```powershell
node scripts/import.js --datasets=items,skills --languages=en,de --max-pages=1
```
or call `POST /api/import` from Swagger UI with an empty JSON body:
```json
{}
```
The importer pulls every page for all configured datasets and both supported languages by default.
## Endpoints
- `GET /health`
- `GET /docs`
- `GET /openapi.json`
- `GET /api/{dataset}`
- `GET /api/{dataset}/{id}`
- `GET /api/search?q=...`
- `POST /api/import`
- `GET /api/import/status`
Datasets: `items`, `skills`, `recipes`, `placeables`, `npcs`.

12
docker-compose.yml Normal file
View File

@@ -0,0 +1,12 @@
services:
api:
build: .
container_name: dune-api
restart: unless-stopped
ports:
- "${API_HOST_PORT:-8030}:${PORT:-3030}"
environment:
PORT: ${PORT:-3030}
MONGODB_URI: ${MONGODB_URI}
MONGODB_DB: ${MONGODB_DB:-duneawa}
MAX_PAGES: ${MAX_PAGES:-}

1063
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

28
package.json Normal file
View File

@@ -0,0 +1,28 @@
{
"name": "dune-api",
"version": "1.0.0",
"description": "Node.js API for Dune: Awakening Questlog data with MongoDB and Swagger UI.",
"main": "src/server.js",
"type": "commonjs",
"scripts": {
"start": "node src/server.js",
"dev": "node --watch src/server.js",
"import": "node scripts/import.js",
"import:smoke": "node scripts/import.js --max-pages=1",
"check": "node --check src/server.js && node --check src/app.js && node --check src/config.js && node --check src/db/client.js && node --check src/db/indexes.js && node --check src/datasets.js && node --check src/importer/importer.js && node --check src/importer/questlogClient.js && node --check src/routes/api.js && node --check src/swagger/openapi.js && node --check scripts/import.js"
},
"keywords": [
"dune-awakening",
"api",
"mongodb",
"swagger"
],
"license": "UNLICENSED",
"dependencies": {
"cors": "^2.8.5",
"dotenv": "^16.4.7",
"express": "^4.21.2",
"mongodb": "^6.12.0",
"swagger-ui-express": "^5.0.1"
}
}

43
scripts/import.js Normal file
View File

@@ -0,0 +1,43 @@
const { closeMongo } = require("../src/db/client");
const { runImport } = require("../src/importer/importer");
function parseArgs(argv) {
const options = {};
for (const arg of argv) {
if (arg.startsWith("--datasets=")) {
options.datasets = arg
.slice("--datasets=".length)
.split(",")
.filter(Boolean);
}
if (arg.startsWith("--languages=")) {
options.languages = arg
.slice("--languages=".length)
.split(",")
.filter(Boolean);
}
if (arg.startsWith("--max-pages=")) {
options.maxPages = Number(arg.slice("--max-pages=".length));
}
}
return options;
}
async function main() {
const options = parseArgs(process.argv.slice(2));
const status = await runImport(options);
console.log(JSON.stringify(status, null, 2));
}
main()
.catch((error) => {
console.error(error);
process.exitCode = 1;
})
.finally(async () => {
await closeMongo();
});

50
src/app.js Normal file
View File

@@ -0,0 +1,50 @@
const cors = require("cors");
const express = require("express");
const swaggerUi = require("swagger-ui-express");
const { pingMongo } = require("./db/client");
const { router: apiRouter } = require("./routes/api");
const { openApiDocument } = require("./swagger/openapi");
function createApp() {
const app = express();
app.use(cors());
app.use(express.json({ limit: "2mb" }));
app.get("/", (request, response) => {
response.redirect("/docs");
});
app.get("/health", async (request, response) => {
try {
await pingMongo();
response.json({ ok: true, mongo: "connected" });
} catch (error) {
response
.status(503)
.json({ ok: false, mongo: "unavailable", error: error.message });
}
});
app.get("/openapi.json", (request, response) => {
response.json(openApiDocument);
});
app.use("/docs", swaggerUi.serve, swaggerUi.setup(openApiDocument));
app.use("/api", apiRouter);
app.use((request, response) => {
response.status(404).json({ error: "Not found" });
});
app.use((error, request, response, next) => {
const status = error.status || 500;
response
.status(status)
.json({ error: error.message || "Internal server error" });
});
return app;
}
module.exports = { createApp };

35
src/config.js Normal file
View File

@@ -0,0 +1,35 @@
const dotenv = require("dotenv");
dotenv.config();
function parseOptionalPositiveInteger(value, name) {
if (value === undefined || value === null || value === "") {
return undefined;
}
const parsed = Number(value);
if (!Number.isInteger(parsed) || parsed <= 0) {
throw new Error(`${name} must be a positive integer when provided`);
}
return parsed;
}
const config = {
port: parseOptionalPositiveInteger(process.env.PORT, "PORT") || 3030,
mongodb: {
uri:
process.env.MONGODB_URI || "mongodb://root:63eba009@37.60.245.70:27017",
dbName: process.env.MONGODB_DB || "duneawa",
},
importer: {
maxPages: parseOptionalPositiveInteger(process.env.MAX_PAGES, "MAX_PAGES"),
},
questlog: {
baseUrl:
process.env.QUESTLOG_BASE_URL ||
"https://questlog.gg/dune-awakening/api/trpc",
},
};
module.exports = { config, parseOptionalPositiveInteger };

90
src/datasets.js Normal file
View File

@@ -0,0 +1,90 @@
const DATASETS = {
items: {
key: "items",
collection: "items",
method: "database.getItems",
singular: "item",
},
skills: {
key: "skills",
collection: "skills",
method: "database.getSkills",
singular: "skill",
},
recipes: {
key: "recipes",
collection: "recipes",
method: "database.getRecipes",
singular: "recipe",
},
placeables: {
key: "placeables",
collection: "placeables",
method: "database.getPlaceables",
singular: "placeable",
},
npcs: {
key: "npcs",
collection: "npcs",
method: "database.getNpcs",
singular: "npc",
},
};
const LANGUAGES = ["en", "de"];
function getDataset(key) {
return DATASETS[key];
}
function assertDataset(key) {
const dataset = getDataset(key);
if (!dataset) {
const allowed = Object.keys(DATASETS).join(", ");
const error = new Error(
`Unknown dataset "${key}". Allowed datasets: ${allowed}`,
);
error.status = 400;
throw error;
}
return dataset;
}
function normalizeDatasetList(values) {
if (!values || values.length === 0) {
return Object.keys(DATASETS);
}
const list = Array.isArray(values) ? values : String(values).split(",");
return list.map((value) => assertDataset(String(value).trim()).key);
}
function normalizeLanguageList(values) {
if (!values || values.length === 0) {
return LANGUAGES;
}
const list = Array.isArray(values) ? values : String(values).split(",");
const normalized = list.map((value) => String(value).trim().toLowerCase());
const invalid = normalized.filter(
(language) => !LANGUAGES.includes(language),
);
if (invalid.length > 0) {
const error = new Error(
`Unsupported language(s): ${invalid.join(", ")}. Allowed languages: ${LANGUAGES.join(", ")}`,
);
error.status = 400;
throw error;
}
return normalized;
}
module.exports = {
DATASETS,
LANGUAGES,
assertDataset,
normalizeDatasetList,
normalizeLanguageList,
};

52
src/db/client.js Normal file
View File

@@ -0,0 +1,52 @@
const { MongoClient } = require("mongodb");
const { config } = require("../config");
let client;
let db;
async function connectToMongo() {
if (db) {
return db;
}
client = new MongoClient(config.mongodb.uri, {
serverSelectionTimeoutMS: 5000,
connectTimeoutMS: 10000,
socketTimeoutMS: 30000,
maxIdleTimeMS: 300000,
});
await client.connect();
db = client.db(config.mongodb.dbName);
return db;
}
function getDb() {
if (!db) {
throw new Error("MongoDB is not connected yet");
}
return db;
}
async function closeMongo() {
if (client) {
await client.close();
}
client = undefined;
db = undefined;
}
async function pingMongo() {
const database = await connectToMongo();
await database.command({ ping: 1 });
return true;
}
module.exports = {
closeMongo,
connectToMongo,
getDb,
pingMongo,
};

18
src/db/indexes.js Normal file
View File

@@ -0,0 +1,18 @@
const { DATASETS } = require("../datasets");
async function ensureIndexes(db) {
await Promise.all(
Object.values(DATASETS).map(async (dataset) => {
const collection = db.collection(dataset.collection);
await collection.createIndex(
{ language: 1, sourceId: 1 },
{ unique: true },
);
await collection.createIndex({ language: 1, name: 1 });
await collection.createIndex({ dataset: 1, language: 1 });
await collection.createIndex({ name: "text", searchText: "text" });
}),
);
}
module.exports = { ensureIndexes };

202
src/importer/importer.js Normal file
View File

@@ -0,0 +1,202 @@
const crypto = require("crypto");
const { config } = require("../config");
const {
DATASETS,
normalizeDatasetList,
normalizeLanguageList,
} = require("../datasets");
const { connectToMongo } = require("../db/client");
const { ensureIndexes } = require("../db/indexes");
const { fetchQuestlogPage } = require("./questlogClient");
const importStatus = {
running: false,
startedAt: null,
finishedAt: null,
error: null,
current: null,
totals: {},
};
function stableJsonHash(value) {
return crypto.createHash("sha1").update(JSON.stringify(value)).digest("hex");
}
function extractSourceId(record) {
return String(
record?.compoundId ||
record?.id ||
record?._id ||
record?.slug ||
record?.name ||
record?.title ||
stableJsonHash(record),
);
}
function extractName(record) {
const value =
record?.name ||
record?.title ||
record?.displayName ||
record?.id ||
record?.compoundId ||
"";
return String(value).replace(/\s+/g, " ").trim();
}
function buildStoredDocument(dataset, language, record) {
const sourceId = extractSourceId(record);
const name = extractName(record);
const rawText = JSON.stringify(record);
return {
dataset: dataset.key,
language,
source: "questlog.gg",
sourceMethod: dataset.method,
sourceId,
name,
searchText: `${name} ${rawText}`,
raw: record,
importedAt: new Date(),
};
}
async function upsertRecords(db, dataset, language, records) {
if (records.length === 0) {
return { matched: 0, modified: 0, upserted: 0 };
}
const collection = db.collection(dataset.collection);
const operations = records.map((record) => {
const document = buildStoredDocument(dataset, language, record);
return {
updateOne: {
filter: { language, sourceId: document.sourceId },
update: { $set: document },
upsert: true,
},
};
});
const result = await collection.bulkWrite(operations, { ordered: false });
return {
matched: result.matchedCount || 0,
modified: result.modifiedCount || 0,
upserted: result.upsertedCount || 0,
};
}
function resetStatus() {
importStatus.running = true;
importStatus.startedAt = new Date().toISOString();
importStatus.finishedAt = null;
importStatus.error = null;
importStatus.current = null;
importStatus.totals = {};
}
function recordTotals(datasetKey, language, pageResult, recordsCount) {
const key = `${datasetKey}:${language}`;
const existing = importStatus.totals[key] || {
pages: 0,
records: 0,
matched: 0,
modified: 0,
upserted: 0,
};
existing.pages += 1;
existing.records += recordsCount;
existing.matched += pageResult.matched;
existing.modified += pageResult.modified;
existing.upserted += pageResult.upserted;
importStatus.totals[key] = existing;
}
async function importDatasetLanguage(db, dataset, language, maxPages) {
let page = 1;
while (true) {
importStatus.current = { dataset: dataset.key, language, page };
const payload = await fetchQuestlogPage(dataset, language, page);
if (payload.records.length === 0) {
break;
}
const pageResult = await upsertRecords(
db,
dataset,
language,
payload.records,
);
recordTotals(dataset.key, language, pageResult, payload.records.length);
const reachedKnownEnd = payload.pageCount && page >= payload.pageCount;
const reachedConfiguredLimit = maxPages && page >= maxPages;
if (reachedKnownEnd || reachedConfiguredLimit) {
break;
}
page += 1;
}
}
async function runImport(options = {}) {
if (importStatus.running) {
const error = new Error("An import is already running");
error.status = 409;
throw error;
}
const datasetKeys = normalizeDatasetList(options.datasets);
const languages = normalizeLanguageList(options.languages);
const maxPages = options.maxPages || config.importer.maxPages;
resetStatus();
try {
const db = await connectToMongo();
await ensureIndexes(db);
for (const datasetKey of datasetKeys) {
const dataset = DATASETS[datasetKey];
for (const language of languages) {
await importDatasetLanguage(db, dataset, language, maxPages);
}
}
importStatus.finishedAt = new Date().toISOString();
importStatus.running = false;
importStatus.current = null;
return getImportStatus();
} catch (error) {
importStatus.finishedAt = new Date().toISOString();
importStatus.running = false;
importStatus.error = error.message;
throw error;
}
}
function getImportStatus() {
return {
running: importStatus.running,
startedAt: importStatus.startedAt,
finishedAt: importStatus.finishedAt,
error: importStatus.error,
current: importStatus.current,
totals: importStatus.totals,
};
}
function startImport(options = {}) {
const promise = runImport(options).catch(() => undefined);
return promise;
}
module.exports = {
getImportStatus,
runImport,
startImport,
};

View File

@@ -0,0 +1,95 @@
const { config } = require("../config");
function buildQuestlogUrl(method, language, page) {
const input = JSON.stringify({
language,
page,
mainCategory: "",
subCategory: "",
});
return `${config.questlog.baseUrl}/${method}?input=${encodeURIComponent(input)}`;
}
function findFirstArray(value) {
if (Array.isArray(value)) {
return value;
}
if (!value || typeof value !== "object") {
return undefined;
}
const preferredKeys = [
"pageData",
"items",
"data",
"results",
"records",
"rows",
];
for (const key of preferredKeys) {
if (Array.isArray(value[key])) {
return value[key];
}
}
for (const key of Object.keys(value)) {
const nested = findFirstArray(value[key]);
if (nested) {
return nested;
}
}
return undefined;
}
function extractPagePayload(payload) {
const data =
payload?.result?.data ||
payload?.result?.data?.json ||
payload?.data ||
payload;
const records = findFirstArray(data);
const pageCount =
Number(data?.pageCount || payload?.result?.data?.pageCount || 0) ||
undefined;
const currentPage =
Number(data?.currentPage || payload?.result?.data?.currentPage || 0) ||
undefined;
if (!records) {
const topLevelKeys =
payload && typeof payload === "object" ? Object.keys(payload) : [];
throw new Error(
`Could not find records array in Questlog response. Top-level keys: ${topLevelKeys.join(", ")}`,
);
}
return { records, pageCount, currentPage };
}
async function fetchQuestlogPage(dataset, language, page) {
const url = buildQuestlogUrl(dataset.method, language, page);
const response = await fetch(url, {
headers: {
accept: "application/json",
"user-agent": "dune-api-importer/1.0",
},
});
if (!response.ok) {
throw new Error(
`Questlog request failed for ${dataset.key}/${language}/page ${page}: ${response.status} ${response.statusText}`,
);
}
const payload = await response.json();
return extractPagePayload(payload);
}
module.exports = {
buildQuestlogUrl,
extractPagePayload,
fetchQuestlogPage,
};

170
src/routes/api.js Normal file
View File

@@ -0,0 +1,170 @@
const express = require("express");
const { ObjectId } = require("mongodb");
const {
assertDataset,
DATASETS,
normalizeDatasetList,
normalizeLanguageList,
} = require("../datasets");
const { getDb } = require("../db/client");
const { getImportStatus, startImport } = require("../importer/importer");
const router = express.Router();
function parsePage(value) {
const page = Number(value || 1);
return Number.isInteger(page) && page > 0 ? page : 1;
}
function parseLimit(value, fallback, max) {
const limit = Number(value || fallback);
if (!Number.isInteger(limit) || limit <= 0) {
return fallback;
}
return Math.min(limit, max);
}
function parseMaxPages(value) {
if (value === undefined || value === null || value === "") {
return undefined;
}
const maxPages = Number(value);
if (!Number.isInteger(maxPages) || maxPages <= 0) {
const error = new Error("maxPages must be a positive integer");
error.status = 400;
throw error;
}
return maxPages;
}
function buildLanguageFilter(language) {
if (!language) {
return {};
}
const [normalized] = normalizeLanguageList([language]);
return { language: normalized };
}
function buildSearchFilter(query) {
if (!query) {
return {};
}
return { $text: { $search: String(query) } };
}
router.get("/datasets", (request, response) => {
response.json({ datasets: Object.values(DATASETS) });
});
router.get("/import/status", (request, response) => {
response.json(getImportStatus());
});
router.post("/import", (request, response, next) => {
try {
const body = request.body || {};
const options = {
datasets: normalizeDatasetList(body.datasets),
languages: normalizeLanguageList(body.languages),
maxPages: parseMaxPages(body.maxPages),
};
startImport(options);
response
.status(202)
.json({ message: "Import started", status: getImportStatus() });
} catch (error) {
next(error);
}
});
router.get("/search", async (request, response, next) => {
try {
const query = String(request.query.q || "").trim();
if (!query) {
const error = new Error("q is required");
error.status = 400;
throw error;
}
const datasetKeys = normalizeDatasetList(request.query.datasets);
const languageFilter = buildLanguageFilter(request.query.language);
const limit = parseLimit(request.query.limit, 10, 50);
const db = getDb();
const results = {};
for (const datasetKey of datasetKeys) {
const dataset = DATASETS[datasetKey];
results[datasetKey] = await db
.collection(dataset.collection)
.find({ ...languageFilter, ...buildSearchFilter(query) })
.project({ searchText: 0 })
.limit(limit)
.toArray();
}
response.json({ query, results });
} catch (error) {
next(error);
}
});
router.get("/:dataset", async (request, response, next) => {
try {
const dataset = assertDataset(request.params.dataset);
const page = parsePage(request.query.page);
const limit = parseLimit(request.query.limit, 25, 100);
const skip = (page - 1) * limit;
const languageFilter = buildLanguageFilter(request.query.language);
const searchFilter = buildSearchFilter(request.query.q);
const filter = { ...languageFilter, ...searchFilter };
const collection = getDb().collection(dataset.collection);
const [items, total] = await Promise.all([
collection
.find(filter)
.project({ searchText: 0 })
.skip(skip)
.limit(limit)
.toArray(),
collection.countDocuments(filter),
]);
response.json({ dataset: dataset.key, page, limit, total, items });
} catch (error) {
next(error);
}
});
router.get("/:dataset/:id", async (request, response, next) => {
try {
const dataset = assertDataset(request.params.dataset);
const id = request.params.id;
const languageFilter = buildLanguageFilter(request.query.language);
const idFilter = ObjectId.isValid(id)
? { $or: [{ _id: new ObjectId(id) }, { sourceId: id }] }
: { sourceId: id };
const document = await getDb()
.collection(dataset.collection)
.findOne(
{ ...languageFilter, ...idFilter },
{ projection: { searchText: 0 } },
);
if (!document) {
response.status(404).json({ error: "Not found" });
return;
}
response.json(document);
} catch (error) {
next(error);
}
});
module.exports = { router };

31
src/server.js Normal file
View File

@@ -0,0 +1,31 @@
const { createApp } = require("./app");
const { config } = require("./config");
const { closeMongo, connectToMongo } = require("./db/client");
const { ensureIndexes } = require("./db/indexes");
async function start() {
const db = await connectToMongo();
await ensureIndexes(db);
const app = createApp();
const server = app.listen(config.port, () => {
console.log(`Dune API listening on http://localhost:${config.port}`);
console.log(`Swagger UI available at http://localhost:${config.port}/docs`);
});
async function shutdown(signal) {
console.log(`Received ${signal}, shutting down`);
server.close(async () => {
await closeMongo();
process.exit(0);
});
}
process.on("SIGINT", shutdown);
process.on("SIGTERM", shutdown);
}
start().catch((error) => {
console.error(error);
process.exit(1);
});

166
src/swagger/openapi.js Normal file
View File

@@ -0,0 +1,166 @@
const { DATASETS, LANGUAGES } = require("../datasets");
const datasetKeys = Object.keys(DATASETS);
const openApiDocument = {
openapi: "3.0.3",
info: {
title: "Dune Awakening API",
version: "1.0.0",
description: "API for Dune: Awakening Questlog data stored in MongoDB.",
},
servers: [{ url: "/" }],
tags: [{ name: "Health" }, { name: "Data" }, { name: "Import" }],
paths: {
"/health": {
get: {
tags: ["Health"],
summary: "Check API and MongoDB status",
responses: {
200: { description: "Service is healthy" },
503: { description: "MongoDB is unavailable" },
},
},
},
"/api/datasets": {
get: {
tags: ["Data"],
summary: "List supported datasets",
responses: { 200: { description: "Supported datasets" } },
},
},
"/api/{dataset}": {
get: {
tags: ["Data"],
summary: "List records for a dataset",
parameters: [
{
name: "dataset",
in: "path",
required: true,
schema: { type: "string", enum: datasetKeys },
},
{
name: "language",
in: "query",
schema: { type: "string", enum: LANGUAGES },
},
{ name: "q", in: "query", schema: { type: "string" } },
{
name: "page",
in: "query",
schema: { type: "integer", minimum: 1, default: 1 },
},
{
name: "limit",
in: "query",
schema: { type: "integer", minimum: 1, maximum: 100, default: 25 },
},
],
responses: { 200: { description: "Paged dataset records" } },
},
},
"/api/{dataset}/{id}": {
get: {
tags: ["Data"],
summary: "Get one record by MongoDB id or Questlog source id",
parameters: [
{
name: "dataset",
in: "path",
required: true,
schema: { type: "string", enum: datasetKeys },
},
{
name: "id",
in: "path",
required: true,
schema: { type: "string" },
},
{
name: "language",
in: "query",
schema: { type: "string", enum: LANGUAGES },
},
],
responses: {
200: { description: "Record" },
404: { description: "Record was not found" },
},
},
},
"/api/search": {
get: {
tags: ["Data"],
summary: "Search across datasets",
parameters: [
{
name: "q",
in: "query",
required: true,
schema: { type: "string" },
},
{
name: "language",
in: "query",
schema: { type: "string", enum: LANGUAGES },
},
{
name: "datasets",
in: "query",
schema: { type: "string", example: "items,skills" },
},
{
name: "limit",
in: "query",
schema: { type: "integer", minimum: 1, maximum: 50, default: 10 },
},
],
responses: {
200: { description: "Search results grouped by dataset" },
},
},
},
"/api/import": {
post: {
tags: ["Import"],
summary: "Start Questlog import",
requestBody: {
required: false,
content: {
"application/json": {
schema: {
type: "object",
properties: {
datasets: {
type: "array",
items: { type: "string", enum: datasetKeys },
},
languages: {
type: "array",
items: { type: "string", enum: LANGUAGES },
},
maxPages: { type: "integer", minimum: 1 },
},
},
example: { datasets: ["items"], languages: ["en"], maxPages: 1 },
},
},
},
responses: {
202: { description: "Import started" },
409: { description: "Import is already running" },
},
},
},
"/api/import/status": {
get: {
tags: ["Import"],
summary: "Get current or latest import status",
responses: { 200: { description: "Import status" } },
},
},
},
};
module.exports = { openApiDocument };