add TorqueScript transpiler and runtime

This commit is contained in:
Brian Beck 2025-11-30 11:44:47 -08:00
parent c8391a1056
commit 7d10fb7dee
49 changed files with 12324 additions and 2075 deletions

View file

@ -1,47 +1,107 @@
import fs from "node:fs/promises";
import path from "node:path";
import { parseArgs } from "node:util";
import ignore from "ignore";
import unzipper from "unzipper";
import { normalizePath } from "@/src/stringUtils";
import manifest from "@/public/manifest.json";
import path from "node:path";
import { walkDirectory } from "@/src/fileUtils";
const inputBaseDir = process.env.BASE_DIR || "GameData/base";
const outputBaseDir = "docs/base";
const archives = new Map<string, unzipper.CentralDirectory>();
// NOTE! Yes, the files below will be ignored. But this also expects `inputBaseDir`
// to largely have already been pruned of files that are indistinguishable from
// useful files - like player skins, voice binds, and anything else that will
// not be used by the map tool. So, remove `voice.vl2` before running this, for
// example. Random scripts are typically fine, since they're small (and other
// scripts may expect them to be available).
const ignoreList = ignore().add(`
fonts/
lighting/
prefs/
.DS_Store
*.dso
*.gui
*.ico
*.ml
*.txt
`);
async function buildExtractedGameDataFolder() {
await fs.mkdir(outputBaseDir, { recursive: true });
const filePaths = Object.keys(manifest).sort();
for (const filePath of filePaths) {
const sources = manifest[filePath];
for (const source of sources) {
if (source) {
let archive = archives.get(source);
if (!archive) {
const archivePath = `${inputBaseDir}/${source}`;
archive = await unzipper.Open.file(archivePath);
archives.set(source, archive);
async function extractAssets({ clean }: { clean: boolean }) {
const vl2Files: string[] = [];
const looseFiles: string[] = [];
// Discover all files
await walkDirectory(inputBaseDir, {
onFile: ({ entry }) => {
const filePath = path.join(entry.parentPath, entry.name);
const resourcePath = normalizePath(path.relative(inputBaseDir, filePath));
if (!ignoreList.ignores(resourcePath)) {
if (/\.vl2$/i.test(entry.name)) {
vl2Files.push(filePath);
} else {
looseFiles.push(filePath);
}
const entry = archive.files.find(
(entry) => normalizePath(entry.path) === filePath,
);
const inFile = `${inputBaseDir}/${source}:${filePath}`;
if (!entry) {
throw new Error(`File not found in archive: ${inFile}`);
}
const outFile = `${outputBaseDir}/@vl2/${source}/${filePath}`;
const outDir = path.dirname(outFile);
console.log(`${inFile} -> ${outFile}`);
await fs.mkdir(outDir, { recursive: true });
await fs.writeFile(outFile, entry.stream());
} else {
const inFile = `${inputBaseDir}/${filePath}`;
const outFile = `${outputBaseDir}/${filePath}`;
console.log(`${inFile} -> ${outFile}`);
await fs.cp(inFile, outFile);
}
},
onDir: ({ entry }) => {
const dirPath = path.join(entry.parentPath, entry.name);
const resourcePath =
normalizePath(path.relative(inputBaseDir, dirPath)) + "/";
const shouldRecurse = !ignoreList.ignores(resourcePath);
return shouldRecurse;
},
});
if (clean) {
console.log(`Cleaning ${outputBaseDir}`);
await fs.rm(outputBaseDir, { recursive: true, force: true });
}
await fs.mkdir(outputBaseDir, { recursive: true });
for (const filePath of looseFiles) {
const relativePath = path.relative(inputBaseDir, filePath);
const outFile = path.join(outputBaseDir, relativePath);
const outDir = path.dirname(outFile);
console.log(outFile);
await fs.mkdir(outDir, { recursive: true });
await fs.copyFile(filePath, outFile);
}
// Extract .vl2 files
for (const archivePath of vl2Files) {
const relativePath = path.relative(inputBaseDir, archivePath);
const archive = await unzipper.Open.file(archivePath);
const outputArchiveDir = path.join(outputBaseDir, "@vl2", relativePath);
for (const entry of archive.files) {
if (entry.type === "Directory") continue;
const resourcePath = normalizePath(entry.path);
if (ignoreList.ignores(resourcePath)) continue;
const outFile = path.join(outputArchiveDir, resourcePath);
const outDir = path.dirname(outFile);
console.log(outFile);
await fs.mkdir(outDir, { recursive: true });
const content = await entry.buffer();
await fs.writeFile(outFile, content);
}
}
console.log("Done.");
}
buildExtractedGameDataFolder();
const { values } = parseArgs({
options: {
clean: {
type: "boolean",
default: false,
},
},
});
extractAssets({ clean: values.clean });

View file

@ -1,99 +1,104 @@
import fs from "node:fs/promises";
import path from "node:path";
import { parseArgs } from "node:util";
import { Dirent } from "node:fs";
import orderBy from "lodash.orderby";
import ignore from "ignore";
import { normalizePath } from "@/src/stringUtils";
import { walkDirectory } from "@/src/fileUtils";
import { parseMissionScript } from "@/src/mission";
const baseDir = process.env.BASE_DIR || "docs/base";
async function walkDirectory(
dir: string,
{
onFile,
onDir = () => true,
}: {
onFile: (fileInfo: {
dir: string;
entry: Dirent<string>;
fullPath: string;
}) => void | Promise<void>;
onDir?: (dirInfo: {
dir: string;
entry: Dirent<string>;
fullPath: string;
}) => boolean | Promise<boolean>;
},
): Promise<void> {
const entries = await fs.readdir(dir, { withFileTypes: true });
// Most files we're not interested in would have already been ignored by the
// `extract-assets` script - but some extra files still may have popped up from
// the host sytem.
const ignoreList = ignore().add(`
.DS_Store
`);
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
type SourceTuple =
// If casing of the path within this source is the same as "first seen" casing
| [sourcePath: string]
// If casing of the path within this source is different
| [sourceName: string, actualPath: string];
if (entry.isDirectory()) {
const shouldRecurse = await onDir({ dir, entry, fullPath });
if (shouldRecurse) {
await walkDirectory(fullPath, { onFile, onDir });
}
} else if (entry.isFile()) {
await onFile({ dir, entry, fullPath });
}
}
}
// Resource entry: [firstSeenActualPath, ...sourceTuples]
type ResourceEntry = [firstSeenActualPath: string, ...SourceTuple[]];
/**
* Log and return the manifest of files for the given game asset directory.
* The assets used to build the mapper are a filtered set of relevant files
* (map related assets) from the `Tribes2/GameData/base` folder. The manifest
* consists of the set of unique paths (case sensitive!) represented by the file
* tree AND the vl2 files as if they had been unzipped. Thus, each file in the
* manifest can have one or more "sources". If the file appears outside of a vl2,
* it will have a blank source (the empty string) first. Each vl2 containing the
* file will then be listed in order. To resolve an asset, the engine uses a
* layering approach where paths inside lexicographically-higher vl2 files win
* over the same path outside of a vl2 or in a lexicographically-lower vl2 file.
* So, to choose the same final asset as the engine, choose the last source in
* the list for any given path.
* consists of the set of unique paths represented by the file tree AND the vl2
* files as if they had been unzipped. Keys are normalized (lowercased) paths
* for case-insensitive lookup.
*
* Values are arrays where the first element is the first-seen casing of the
* path, followed by source tuples. Each source tuple is either:
* - [sourcePath] if the file has the same casing as firstSeenPath
* - [sourcePath, actualPath] if the file has different casing in that source
*
* If the file appears outside of a vl2, the source path will be the empty
* string. Each vl2 containing the file will then be listed in order. To resolve
* an asset, the engine uses a layering approach where paths inside
* lexicographically-higher vl2 files win over the same path outside of a vl2
* or in a lexicographically-lower vl2 file. So, to choose the same final asset
* as the engine, choose the last source in the list for any given path.
*
* Example:
*
* ```
* {
* "textures/terrainTiles/green.png": ["textures.vl2"],
* "textures/lava/ds_iwal01a.png": [
* "lava.vl2",
* "yHDTextures2.0.vl2",
* "zAddOnsVL2s/zDiscord-Map-Pack-4.7.1.vl2"
* "textures/terraintiles/green.png": [
* "textures/terrainTiles/green.png",
* ["textures.vl2"],
* ["otherTextures.vl2", "Textures/TerrainTiles/Green.PNG"]
* ]
* }
* ```
*/
async function buildManifest() {
const fileSources = new Map<string, string[]>();
// Map from normalized (lowercased) path to [firstSeenActualPath, ...sourceTuples]
const fileSources = new Map<string, ResourceEntry>();
const looseFiles: string[] = [];
await walkDirectory(baseDir, {
onFile: ({ fullPath }) => {
looseFiles.push(normalizePath(fullPath));
onFile: ({ entry }) => {
const resourcePath = normalizePath(
path.relative(baseDir, path.join(entry.parentPath, entry.name)),
);
if (!ignoreList.ignores(resourcePath)) {
looseFiles.push(resourcePath);
}
},
onDir: ({ dir, entry, fullPath }) => {
onDir: ({ entry }) => {
return entry.name !== "@vl2";
},
});
for (const filePath of looseFiles) {
const relativePath = normalizePath(path.relative(baseDir, filePath));
fileSources.set(relativePath, [""]);
for (const resourcePath of looseFiles) {
const normalizedKey = resourcePath.toLowerCase();
const existing = fileSources.get(normalizedKey);
if (existing) {
const [firstSeenPath] = existing;
if (resourcePath === firstSeenPath) {
existing.push([""]);
} else {
existing.push(["", resourcePath]);
}
} else {
fileSources.set(normalizedKey, [resourcePath, [""]]);
}
}
let archiveDirs: string[] = [];
await walkDirectory(`${baseDir}/@vl2`, {
onFile: () => {},
onDir: ({ dir, entry, fullPath }) => {
if (entry.name.endsWith(".vl2")) {
archiveDirs.push(fullPath);
onDir: ({ entry }) => {
if (/\.vl2$/i.test(entry.name)) {
const archivePath = path.join(entry.parentPath, entry.name);
archiveDirs.push(archivePath);
}
return true;
},
@ -101,7 +106,7 @@ async function buildManifest() {
archiveDirs = orderBy(
archiveDirs,
[(fullPath) => path.basename(fullPath).toLowerCase()],
[(archivePath) => path.basename(archivePath).toLowerCase()],
["asc"],
);
@ -110,49 +115,67 @@ async function buildManifest() {
path.relative(`${baseDir}/@vl2`, archivePath),
);
await walkDirectory(archivePath, {
onFile: ({ dir, entry, fullPath }) => {
const filePath = normalizePath(path.relative(archivePath, fullPath));
const sources = fileSources.get(filePath) ?? [];
sources.push(relativeArchivePath);
fileSources.set(filePath, sources);
onFile: ({ entry }) => {
const resourcePath = normalizePath(
path.relative(archivePath, path.join(entry.parentPath, entry.name)),
);
if (ignoreList.ignores(resourcePath)) {
return;
}
const normalizedKey = resourcePath.toLowerCase();
const existing = fileSources.get(normalizedKey);
if (existing) {
const [firstSeenPath] = existing;
if (resourcePath === firstSeenPath) {
existing.push([relativeArchivePath]);
} else {
existing.push([relativeArchivePath, resourcePath]);
}
} else {
fileSources.set(normalizedKey, [resourcePath, [relativeArchivePath]]);
}
},
});
}
const resources: Record<string, string[]> = {};
const resources: Record<string, ResourceEntry> = {};
const missions: Record<
string,
{ resourcePath: string; displayName: string | null; missionTypes: string[] }
> = {};
const orderedFiles = Array.from(fileSources.keys()).sort();
for (const filePath of orderedFiles) {
const sources = fileSources.get(filePath);
resources[filePath] = sources;
const lastSource = sources[sources.length - 1];
const sortedResourceKeys = Array.from(fileSources.keys()).sort();
for (const resourceKey of sortedResourceKeys) {
const entry = fileSources.get(resourceKey)!;
resources[resourceKey] = entry;
const [firstSeenPath, ...sourceTuples] = entry;
const lastSourceTuple = sourceTuples[sourceTuples.length - 1];
const lastSource = lastSourceTuple[0];
const lastActualPath = lastSourceTuple[1] ?? firstSeenPath;
console.log(
`${filePath}${sources[0] ? ` 📦 ${sources[0]}` : ""}${
sources.length > 1
? sources
`${firstSeenPath}${sourceTuples[0][0] ? ` 📦 ${sourceTuples[0][0]}` : ""}${
sourceTuples.length > 1
? sourceTuples
.slice(1)
.map((source) => ` ❗️ ${source}`)
.map((tuple) => ` ❗️ ${tuple[0]}`)
.join("")
: ""
}`,
);
const resolvedPath = lastSource
? path.join(baseDir, "@vl2", lastSource, filePath)
: path.join(baseDir, filePath);
? path.join(baseDir, "@vl2", lastSource, lastActualPath)
: path.join(baseDir, lastActualPath);
if (filePath.endsWith(".mis")) {
if (resourceKey.endsWith(".mis")) {
const missionScript = await fs.readFile(resolvedPath, "utf8");
const mission = parseMissionScript(missionScript);
const baseName = path.basename(filePath, ".mis");
const baseName = path.basename(firstSeenPath, ".mis");
missions[baseName] = {
resourcePath: filePath,
resourcePath: resourceKey,
displayName: mission.displayName,
missionTypes: mission.missionTypes,
};

View file

@ -1,6 +1,6 @@
import fs from "node:fs";
import { inspect, parseArgs } from "node:util";
import { parseImageFrameList } from "@/src/ifl";
import { parseImageFileList } from "@/src/imageFileList";
import { getFilePath } from "@/src/manifest";
async function run() {
@ -50,7 +50,7 @@ async function run() {
}
const missionScript = fs.readFileSync(framesFile, "utf8");
console.log(
inspect(parseImageFrameList(missionScript), {
inspect(parseImageFileList(missionScript), {
colors: true,
depth: Infinity,
}),

View file

@ -1,7 +1,8 @@
import fs from "node:fs/promises";
import { iterObjects, parseMissionScript } from "@/src/mission";
import path from "node:path";
import { parseArgs } from "node:util";
import { basename } from "node:path";
import { parse } from "@/src/torqueScript";
import type * as AST from "@/src/torqueScript/ast";
/**
* For all missions, log all the property values matching the given filters.
@ -14,7 +15,7 @@ import { basename } from "node:path";
*
* tsx scripts/mission-properties.ts -t TerrainBlock -p position
*/
const { values, positionals } = parseArgs({
const { values } = parseArgs({
allowPositionals: true,
options: {
types: {
@ -41,6 +42,115 @@ const propertyList =
? null
: new Set(values.properties.split(","));
function getClassName(node: AST.Identifier | AST.Expression): string | null {
if (node.type === "Identifier") {
return node.name;
}
return null;
}
function getPropertyName(
target: AST.Identifier | AST.IndexExpression,
): string | null {
if (target.type === "Identifier") {
return target.name;
}
// IndexExpression like foo[0] - get the base name
if (
target.type === "IndexExpression" &&
target.object.type === "Identifier"
) {
return target.object.name;
}
return null;
}
function expressionToString(expr: AST.Expression): string {
switch (expr.type) {
case "StringLiteral":
return expr.value;
case "NumberLiteral":
return String(expr.value);
case "BooleanLiteral":
return String(expr.value);
case "Identifier":
return expr.name;
case "Variable":
return `${expr.scope === "global" ? "$" : "%"}${expr.name}`;
case "BinaryExpression":
return `${expressionToString(expr.left)} ${expr.operator} ${expressionToString(expr.right)}`;
case "UnaryExpression":
return `${expr.operator}${expressionToString(expr.argument)}`;
default:
return `[${expr.type}]`;
}
}
interface ObjectInfo {
className: string;
properties: Array<{ name: string; value: string }>;
children: ObjectInfo[];
}
function extractObject(node: AST.ObjectDeclaration): ObjectInfo | null {
const className = getClassName(node.className);
if (!className) return null;
const properties: Array<{ name: string; value: string }> = [];
const children: ObjectInfo[] = [];
for (const item of node.body) {
if (item.type === "Assignment") {
const name = getPropertyName(item.target);
if (name) {
properties.push({
name,
value: expressionToString(item.value),
});
}
} else if (item.type === "ObjectDeclaration") {
const child = extractObject(item);
if (child) {
children.push(child);
}
}
}
return { className, properties, children };
}
function* walkObjects(ast: AST.Program): Generator<ObjectInfo> {
function* walkStatements(statements: AST.Statement[]): Generator<ObjectInfo> {
for (const stmt of statements) {
if (stmt.type === "ObjectDeclaration") {
const obj = extractObject(stmt);
if (obj) {
yield obj;
yield* walkObjectTree(obj.children);
}
} else if (stmt.type === "ExpressionStatement") {
// Check if expression is an ObjectDeclaration
if (stmt.expression.type === "ObjectDeclaration") {
const obj = extractObject(stmt.expression);
if (obj) {
yield obj;
yield* walkObjectTree(obj.children);
}
}
}
}
}
function* walkObjectTree(objects: ObjectInfo[]): Generator<ObjectInfo> {
for (const obj of objects) {
yield obj;
yield* walkObjectTree(obj.children);
}
}
yield* walkStatements(ast.body);
}
async function run({
typeList,
propertyList,
@ -51,18 +161,26 @@ async function run({
valuesOnly: boolean;
}) {
for await (const inFile of fs.glob("docs/base/**/*.mis")) {
const baseName = basename(inFile);
const baseName = path.basename(inFile);
const missionScript = await fs.readFile(inFile, "utf8");
const mission = parseMissionScript(missionScript);
for (const consoleObject of iterObjects(mission.objects)) {
if (!typeList || typeList.has(consoleObject.className)) {
for (const property of consoleObject.properties) {
if (!propertyList || propertyList.has(property.target.name)) {
let ast: AST.Program;
try {
ast = parse(missionScript);
} catch (err) {
console.error(`Failed to parse ${baseName}:`, err);
continue;
}
for (const obj of walkObjects(ast)) {
if (!typeList || typeList.has(obj.className)) {
for (const property of obj.properties) {
if (!propertyList || propertyList.has(property.name)) {
if (valuesOnly) {
console.log(property.value);
} else {
console.log(
`${baseName} > ${consoleObject.className} > ${property.target.name} = ${property.value}`,
`${baseName} > ${obj.className} > ${property.name} = ${property.value}`,
);
}
}

View file

@ -0,0 +1,13 @@
import fs from "node:fs/promises";
import { inspect } from "node:util";
import { parse } from "@/src/torqueScript";
async function run(scriptPath: string) {
const script = await fs.readFile(scriptPath, "utf8");
const ast = parse(script);
console.log(inspect(ast, { colors: true, depth: Infinity }));
}
const scriptPath = process.argv[2];
await run(scriptPath);

View file

@ -0,0 +1,11 @@
import fs from "node:fs/promises";
import { transpile } from "@/src/torqueScript";
async function run(scriptPath: string) {
const script = await fs.readFile(scriptPath, "utf8");
const { code } = transpile(script);
console.log(code);
}
const scriptPath = process.argv[2];
await run(scriptPath);