Some checks are pending
Deploy Frontend / build-and-deploy (push) Waiting to run
Infrastructure changes to enable automatic frontend deployment when schemas change: - Add .forgejo/workflows/deploy-frontend.yml workflow triggered by: - Changes to frontend/** or schemas/20251121/linkml/** - Manual workflow dispatch - Rewrite generate-schema-manifest.cjs to properly scan all schema directories - Recursively scans classes, enums, slots, modules directories - Uses singular category names (class, enum, slot) matching TypeScript types - Includes all 4 main schemas at root level - Skips archive directories and backup files - Update schema-loader.ts to match new manifest format - Add SchemaCategory interface - Update SchemaManifest to use categories as array - Add flattenCategories() helper function - Add getSchemaCategories() and getSchemaCategoriesSync() functions The workflow builds frontend with updated manifest and deploys to bronhouder.nl
193 lines
4.8 KiB
JavaScript
193 lines
4.8 KiB
JavaScript
#!/usr/bin/env node
|
|
/**
|
|
* LinkML Schema Manifest Generator
|
|
*
|
|
* Generates a manifest.json file for the frontend to dynamically load
|
|
* schema files from the public/schemas directory.
|
|
*
|
|
* This script is run as part of the build process:
|
|
* pnpm run generate-manifest
|
|
*
|
|
* It scans the synced schema files (after sync-schemas) and creates
|
|
* a structured manifest that the LinkML viewer can consume.
|
|
*/
|
|
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
|
|
const SCHEMAS_DIR = path.join(__dirname, '../public/schemas/20251121/linkml');
|
|
const OUTPUT_FILE = path.join(SCHEMAS_DIR, 'manifest.json');
|
|
|
|
// Category configuration
|
|
const CATEGORIES = [
|
|
{
|
|
name: 'main',
|
|
displayName: 'Main Schemas',
|
|
scan: false, // Main schemas are at root level
|
|
scanPath: null
|
|
},
|
|
{
|
|
name: 'class',
|
|
displayName: 'Classes',
|
|
scan: true,
|
|
scanPath: 'modules/classes'
|
|
},
|
|
{
|
|
name: 'enum',
|
|
displayName: 'Enumerations',
|
|
scan: true,
|
|
scanPath: 'modules/enums'
|
|
},
|
|
{
|
|
name: 'slot',
|
|
displayName: 'Slots',
|
|
scan: true,
|
|
scanPath: 'modules/slots'
|
|
},
|
|
{
|
|
name: 'module',
|
|
displayName: 'Modules',
|
|
scan: true,
|
|
scanPath: 'modules',
|
|
excludeSubdirs: ['classes', 'enums', 'slots', 'archive']
|
|
}
|
|
];
|
|
|
|
/**
|
|
* Recursively scan a directory for YAML files
|
|
*/
|
|
function scanDirectory(dir, category, relativePath = '', excludeSubdirs = []) {
|
|
const files = [];
|
|
|
|
if (!fs.existsSync(dir)) {
|
|
console.warn(`Directory not found: ${dir}`);
|
|
return files;
|
|
}
|
|
|
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
|
|
for (const entry of entries) {
|
|
// Skip archive directories and backup files
|
|
if (entry.name === 'archive' || entry.name.startsWith('archive_')) {
|
|
continue;
|
|
}
|
|
|
|
if (entry.isDirectory()) {
|
|
// Skip excluded subdirectories
|
|
if (excludeSubdirs.includes(entry.name)) {
|
|
continue;
|
|
}
|
|
// Recurse into subdirectories
|
|
const subPath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
|
files.push(...scanDirectory(
|
|
path.join(dir, entry.name),
|
|
category,
|
|
subPath,
|
|
[] // Don't propagate excludeSubdirs to recursive calls
|
|
));
|
|
} else if (entry.isFile() && entry.name.endsWith('.yaml') && !entry.name.includes('.bak')) {
|
|
const name = entry.name.replace('.yaml', '');
|
|
const filePath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
|
files.push({
|
|
name,
|
|
path: filePath,
|
|
category
|
|
});
|
|
}
|
|
}
|
|
|
|
return files.sort((a, b) => a.name.localeCompare(b.name));
|
|
}
|
|
|
|
/**
|
|
* Scan root directory for main schema files
|
|
*/
|
|
function scanMainSchemas() {
|
|
const files = [];
|
|
|
|
if (!fs.existsSync(SCHEMAS_DIR)) {
|
|
console.warn(`Schema directory not found: ${SCHEMAS_DIR}`);
|
|
return files;
|
|
}
|
|
|
|
const entries = fs.readdirSync(SCHEMAS_DIR, { withFileTypes: true });
|
|
|
|
for (const entry of entries) {
|
|
if (entry.isFile() && entry.name.endsWith('.yaml') && !entry.name.includes('.bak')) {
|
|
const name = entry.name.replace('.yaml', '');
|
|
files.push({
|
|
name,
|
|
path: entry.name,
|
|
category: 'main'
|
|
});
|
|
}
|
|
}
|
|
|
|
return files.sort((a, b) => a.name.localeCompare(b.name));
|
|
}
|
|
|
|
/**
|
|
* Generate the manifest
|
|
*/
|
|
function generateManifest() {
|
|
console.log(`Scanning LinkML schemas in: ${SCHEMAS_DIR}`);
|
|
|
|
const categories = [];
|
|
let totalFiles = 0;
|
|
const categoryCounts = {};
|
|
|
|
for (const catConfig of CATEGORIES) {
|
|
let files;
|
|
|
|
if (catConfig.name === 'main') {
|
|
// Special handling for main schemas at root level
|
|
files = scanMainSchemas();
|
|
} else if (catConfig.scan && catConfig.scanPath) {
|
|
const scanDir = path.join(SCHEMAS_DIR, catConfig.scanPath);
|
|
files = scanDirectory(
|
|
scanDir,
|
|
catConfig.name,
|
|
catConfig.scanPath,
|
|
catConfig.excludeSubdirs || []
|
|
);
|
|
} else {
|
|
files = [];
|
|
}
|
|
|
|
if (files.length > 0) {
|
|
categories.push({
|
|
name: catConfig.name,
|
|
displayName: catConfig.displayName,
|
|
files
|
|
});
|
|
categoryCounts[catConfig.name] = files.length;
|
|
totalFiles += files.length;
|
|
}
|
|
}
|
|
|
|
const manifest = {
|
|
generated: new Date().toISOString(),
|
|
schemaRoot: '/schemas/20251121/linkml',
|
|
totalFiles,
|
|
categoryCounts,
|
|
categories
|
|
};
|
|
|
|
// Ensure output directory exists
|
|
const outputDir = path.dirname(OUTPUT_FILE);
|
|
if (!fs.existsSync(outputDir)) {
|
|
fs.mkdirSync(outputDir, { recursive: true });
|
|
}
|
|
|
|
// Write manifest
|
|
fs.writeFileSync(OUTPUT_FILE, JSON.stringify(manifest, null, 2));
|
|
|
|
// Print summary
|
|
console.log(`Generated manifest with ${totalFiles} schema files`);
|
|
for (const cat of categories) {
|
|
console.log(` - ${cat.displayName}: ${cat.files.length}`);
|
|
}
|
|
console.log(`Output: ${OUTPUT_FILE}`);
|
|
}
|
|
|
|
generateManifest();
|