2026-03-31 13:02:29 +08:00
|
|
|
import type { LoaderContext } from '@rspack/core';
|
|
|
|
|
import { parse } from 'csv-parse/sync';
|
|
|
|
|
import { parseSchema, createValidator, parseValue } from '../index.js';
|
2026-03-31 15:19:03 +08:00
|
|
|
import type { Schema } from '../types.js';
|
|
|
|
|
import * as path from 'path';
|
2026-03-31 15:54:38 +08:00
|
|
|
import * as fs from 'fs';
|
2026-03-31 13:02:29 +08:00
|
|
|
|
|
|
|
|
export interface CsvLoaderOptions {
|
|
|
|
|
delimiter?: string;
|
|
|
|
|
quote?: string;
|
|
|
|
|
escape?: string;
|
2026-03-31 14:45:02 +08:00
|
|
|
bom?: boolean;
|
|
|
|
|
comment?: string | false;
|
|
|
|
|
trim?: boolean;
|
2026-03-31 15:19:03 +08:00
|
|
|
/** Generate TypeScript declaration file (.d.ts) */
|
|
|
|
|
emitTypes?: boolean;
|
|
|
|
|
/** Output directory for generated type files (relative to output path) */
|
|
|
|
|
typesOutputDir?: string;
|
2026-03-31 15:54:38 +08:00
|
|
|
/** Write .d.ts files to disk (useful for dev server) */
|
|
|
|
|
writeToDisk?: boolean;
|
2026-03-31 13:02:29 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
interface PropertyConfig {
|
|
|
|
|
name: string;
|
|
|
|
|
schema: any;
|
|
|
|
|
validator: (value: unknown) => boolean;
|
|
|
|
|
parser: (valueString: string) => unknown;
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-31 15:19:03 +08:00
|
|
|
/**
|
|
|
|
|
* Convert a schema to TypeScript type string
|
|
|
|
|
*/
|
|
|
|
|
function schemaToTypeString(schema: Schema): string {
|
|
|
|
|
switch (schema.type) {
|
|
|
|
|
case 'string':
|
|
|
|
|
return 'string';
|
|
|
|
|
case 'number':
|
|
|
|
|
return 'number';
|
|
|
|
|
case 'boolean':
|
|
|
|
|
return 'boolean';
|
|
|
|
|
case 'array':
|
|
|
|
|
if (schema.element.type === 'tuple') {
|
2026-03-31 16:57:52 +08:00
|
|
|
const tupleElements = schema.element.elements.map((el) => {
|
|
|
|
|
const typeStr = schemaToTypeString(el.schema);
|
|
|
|
|
return el.name ? `${el.name}: ${typeStr}` : typeStr;
|
|
|
|
|
});
|
2026-03-31 15:19:03 +08:00
|
|
|
return `[${tupleElements.join(', ')}]`;
|
|
|
|
|
}
|
|
|
|
|
return `${schemaToTypeString(schema.element)}[]`;
|
|
|
|
|
case 'tuple':
|
2026-03-31 16:57:52 +08:00
|
|
|
const tupleElements = schema.elements.map((el) => {
|
|
|
|
|
const typeStr = schemaToTypeString(el.schema);
|
|
|
|
|
return el.name ? `${el.name}: ${typeStr}` : typeStr;
|
|
|
|
|
});
|
2026-03-31 15:19:03 +08:00
|
|
|
return `[${tupleElements.join(', ')}]`;
|
|
|
|
|
default:
|
|
|
|
|
return 'unknown';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Generate TypeScript interface for the CSV data
|
|
|
|
|
*/
|
|
|
|
|
function generateTypeDefinition(
|
|
|
|
|
resourceName: string,
|
2026-03-31 16:12:17 +08:00
|
|
|
propertyConfigs: PropertyConfig[]
|
2026-03-31 15:19:03 +08:00
|
|
|
): string {
|
|
|
|
|
const properties = propertyConfigs
|
2026-03-31 16:12:17 +08:00
|
|
|
.map((config) => ` ${config.name}: ${schemaToTypeString(config.schema)};`)
|
2026-03-31 15:19:03 +08:00
|
|
|
.join('\n');
|
|
|
|
|
|
2026-03-31 16:12:17 +08:00
|
|
|
return `type Table = {
|
2026-03-31 15:19:03 +08:00
|
|
|
${properties}
|
2026-03-31 16:12:17 +08:00
|
|
|
}[];
|
2026-03-31 15:19:03 +08:00
|
|
|
|
2026-03-31 16:12:17 +08:00
|
|
|
declare const data: Table;
|
|
|
|
|
export default data;
|
2026-03-31 15:19:03 +08:00
|
|
|
`;
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-31 13:02:29 +08:00
|
|
|
export default function csvLoader(
|
|
|
|
|
this: LoaderContext<CsvLoaderOptions>,
|
|
|
|
|
content: string
|
2026-03-31 14:25:38 +08:00
|
|
|
): string | Buffer {
|
2026-03-31 14:45:02 +08:00
|
|
|
const options = this.getOptions() as CsvLoaderOptions | undefined;
|
|
|
|
|
const delimiter = options?.delimiter ?? ',';
|
|
|
|
|
const quote = options?.quote ?? '"';
|
|
|
|
|
const escape = options?.escape ?? '\\';
|
|
|
|
|
const bom = options?.bom ?? true;
|
|
|
|
|
const comment = options?.comment === false ? undefined : (options?.comment ?? '#');
|
|
|
|
|
const trim = options?.trim ?? true;
|
2026-03-31 15:19:03 +08:00
|
|
|
const emitTypes = options?.emitTypes ?? true;
|
|
|
|
|
const typesOutputDir = options?.typesOutputDir ?? '';
|
2026-03-31 15:54:38 +08:00
|
|
|
const writeToDisk = options?.writeToDisk ?? false;
|
2026-03-31 13:02:29 +08:00
|
|
|
|
|
|
|
|
const records = parse(content, {
|
|
|
|
|
delimiter,
|
|
|
|
|
quote,
|
|
|
|
|
escape,
|
2026-03-31 14:45:02 +08:00
|
|
|
bom,
|
|
|
|
|
comment,
|
|
|
|
|
trim,
|
2026-03-31 13:02:29 +08:00
|
|
|
relax_column_count: true,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (records.length < 2) {
|
|
|
|
|
throw new Error('CSV must have at least 2 rows: headers and schemas');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const headers = records[0];
|
|
|
|
|
const schemas = records[1];
|
|
|
|
|
|
|
|
|
|
if (headers.length !== schemas.length) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`Header count (${headers.length}) does not match schema count (${schemas.length})`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const propertyConfigs: PropertyConfig[] = headers.map((header: string, index: number) => {
|
|
|
|
|
const schemaString = schemas[index];
|
|
|
|
|
const schema = parseSchema(schemaString);
|
|
|
|
|
return {
|
|
|
|
|
name: header,
|
|
|
|
|
schema,
|
|
|
|
|
validator: createValidator(schema),
|
|
|
|
|
parser: (valueString: string) => parseValue(schema, valueString),
|
|
|
|
|
};
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const dataRows = records.slice(2);
|
|
|
|
|
const objects = dataRows.map((row: string[], rowIndex: number) => {
|
|
|
|
|
const obj: Record<string, unknown> = {};
|
|
|
|
|
propertyConfigs.forEach((config, colIndex) => {
|
|
|
|
|
const rawValue = row[colIndex] ?? '';
|
|
|
|
|
try {
|
|
|
|
|
const parsed = config.parser(rawValue);
|
|
|
|
|
if (!config.validator(parsed)) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`Validation failed for property "${config.name}" at row ${rowIndex + 3}: ${rawValue}`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
obj[config.name] = parsed;
|
|
|
|
|
} catch (error) {
|
|
|
|
|
if (error instanceof Error) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`Failed to parse property "${config.name}" at row ${rowIndex + 3}, column ${colIndex + 1}: ${error.message}`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
return obj;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const json = JSON.stringify(objects, null, 2);
|
2026-03-31 15:49:05 +08:00
|
|
|
|
2026-03-31 15:19:03 +08:00
|
|
|
// Emit type definition file if enabled
|
|
|
|
|
if (emitTypes) {
|
2026-03-31 15:49:05 +08:00
|
|
|
const context = this.context || '';
|
|
|
|
|
// Get relative path from context, normalize to forward slashes
|
|
|
|
|
let relativePath = this.resourcePath.replace(context, '');
|
|
|
|
|
if (relativePath.startsWith('\\') || relativePath.startsWith('/')) {
|
|
|
|
|
relativePath = relativePath.substring(1);
|
|
|
|
|
}
|
|
|
|
|
relativePath = relativePath.replace(/\\/g, '/');
|
2026-03-31 15:54:38 +08:00
|
|
|
|
2026-03-31 16:12:17 +08:00
|
|
|
// Replace .csv with .csv.d.ts for the output filename
|
|
|
|
|
const dtsFileName = `${relativePath}.d.ts`;
|
2026-03-31 15:54:38 +08:00
|
|
|
const outputPath = typesOutputDir
|
2026-03-31 15:49:05 +08:00
|
|
|
? path.join(typesOutputDir, dtsFileName)
|
|
|
|
|
: dtsFileName;
|
2026-03-31 16:12:17 +08:00
|
|
|
const dtsContent = generateTypeDefinition(this.resourcePath, propertyConfigs);
|
2026-03-31 15:49:05 +08:00
|
|
|
|
2026-03-31 15:54:38 +08:00
|
|
|
if (writeToDisk) {
|
|
|
|
|
// Write directly to disk (useful for dev server)
|
|
|
|
|
const absolutePath = path.join(this.context || process.cwd(), typesOutputDir || '', dtsFileName);
|
|
|
|
|
fs.mkdirSync(path.dirname(absolutePath), { recursive: true });
|
|
|
|
|
fs.writeFileSync(absolutePath, dtsContent);
|
|
|
|
|
} else {
|
|
|
|
|
// Emit to in-memory filesystem (for production build)
|
|
|
|
|
this.emitFile?.(outputPath, dtsContent);
|
|
|
|
|
}
|
2026-03-31 15:19:03 +08:00
|
|
|
}
|
2026-03-31 15:49:05 +08:00
|
|
|
|
2026-03-31 13:02:29 +08:00
|
|
|
return `export default ${json};`;
|
|
|
|
|
}
|