Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
Kattoor committed Aug 25, 2021
0 parents commit 803f80d
Show file tree
Hide file tree
Showing 13 changed files with 871 additions and 0 deletions.
95 changes: 95 additions & 0 deletions datasheets/datasheet-converter.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import {promises as fs} from 'fs';
import {globby} from 'globby';

const amountOfColumnsOffset = 0x44;
const amountOfRowsOffset = 0x48;
const headersOffset = 0x5c;
const amountOfBytesInHeader = 12;
const amountOfBytesInCell = 8;

export async function convertDatasheets(path) {
const start = Date.now();

const filePaths = await globby(path + '**/*.datasheet');

process.stdout.write('Converting datasheets..\r');

for (let filePath of filePaths) {
const data = await fs.readFile(filePath);

const amountOfColumns = data.readInt32LE(amountOfColumnsOffset);
const amountOfRows = data.readInt32LE(amountOfRowsOffset);

const cellsOffset = headersOffset + amountOfColumns * amountOfBytesInHeader;
const amountOfBytesInRow = amountOfBytesInCell * amountOfColumns;
const stringsOffset = cellsOffset + amountOfRows * amountOfColumns * amountOfBytesInCell;

const headers = [];
for (let i = 0; i < amountOfColumns; i++) {
const headerOffset = headersOffset + i * amountOfBytesInHeader;
const stringValue = readStringValue(data, headerOffset);
const type = data.readInt32LE(headerOffset + 8);
headers.push({stringValue, type});
}

let sb = headers.map(h => readCString(data, stringsOffset, h.stringValue.stringOffset)).join(';') + '\n';

const rows = [];
for (let i = 0; i < amountOfRows; i++) {
const cells = [];
for (let j = 0; j < amountOfColumns; j++) {
const cellOffset = cellsOffset + i * amountOfBytesInRow + j * amountOfBytesInCell;
const cellValue = readCell(data, cellOffset);
const columnType = headers[j].type;
cells.push(parseCellValueToType(data, stringsOffset, cellValue, columnType));
}
rows.push(cells);
}

sb += rows.map(cells => cells.join(';')).join('\n');

await saveFile(filePath.slice(0, -9) + 'csv', sb);
await fs.unlink(filePath);
}

console.log('Converting datasheets.. finished in ' + (Date.now() - start) + 'ms');
}

function readCString(data, stringsOffset, value) {
const offset = stringsOffset + value.readInt32LE(0);
let lengthUntilNullTermination = 0;
let nextByte;
do {
nextByte = data.readInt8(offset + lengthUntilNullTermination++);
} while (nextByte !== 0)
return data.slice(offset, offset + lengthUntilNullTermination - 1).toString();
}

function parseCellValueToType(data, stringsOffset, cellValue, type) {
switch (type) {
case 1:
return readCString(data, stringsOffset, cellValue);
case 2:
return cellValue.readFloatLE(0);
case 3:
return !!cellValue.readInt32LE(0);
}
}

function readCell(data, offset) {
const stringOffset = data.readInt32LE(offset);
const cellValue = data.slice(offset + 4, offset + 8);
return cellValue;
}

function readStringValue(data, offset) {
const hash = data.slice(offset, offset + 4);
const stringOffset = data.slice(offset + 4, offset + 8);
return {hash, stringOffset};
}

async function saveFile(path, out) {
const directory = path.slice(0, path.lastIndexOf('/'));
await fs.mkdir(directory, {recursive: true});
await fs.writeFile(path, out);
}
69 changes: 69 additions & 0 deletions datasheets/datasheet-extractor.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import {open} from 'yauzl';
import workerpool from 'workerpool';
import {fileURLToPath} from 'url';
import {dirname} from 'path';

const __dirname = dirname(fileURLToPath(import.meta.url));

const pool = workerpool.pool(__dirname + '/extract-worker.js', {workerType: 'process'});

let resultCount = 0;

export async function extractDatasheets(pakFilePaths, outPath) {
const start = Date.now();

process.stdout.write('Extracting datasheets..\r');

const recordsPromises = pakFilePaths.map(extractRelevantRecords);
const records = [].concat(...(await Promise.all(recordsPromises)));
const groupedByPakFile =
Object.entries(
records.reduce((acc, entry) => {
if (acc[entry.pakFile] == null) {
acc[entry.pakFile] = [];
}
acc[entry.pakFile].push(entry);
return acc;
}, {}));

return new Promise(resolve => {
for (let [pakFilePath, fileEntries] of groupedByPakFile) {
const serializedParameters = JSON.stringify({pakFilePath, fileEntries, outPath});

pool.exec('extractFromPak', [serializedParameters])
.then(async () => {
resultCount += 1;
if (resultCount === groupedByPakFile.length) {
console.log('Extracting datasheets.. finished in ' + (Date.now() - start) + 'ms');
await pool.terminate();
resolve();
}
});
}
});
}

async function extractRelevantRecords(filePath) {
return new Promise(resolve => {
const entries = [];

open(filePath, {lazyEntries: true}, (err, zipFile) => {
zipFile.readEntry();

zipFile.on('entry', entry => {
if (/\.(datasheet|xml)$/gm.test(entry.fileName)) {
entries.push({
pakFile: filePath,
offset: entry.relativeOffsetOfLocalHeader,
fileName: entry.fileName,
compressedSize: entry.compressedSize,
uncompressedSize: entry.uncompressedSize
});
}
zipFile.readEntry();
});

zipFile.once('end', () => resolve(entries));
});
});
}
10 changes: 10 additions & 0 deletions datasheets/extract-and-convert.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import {extractDatasheets} from './datasheet-extractor.mjs';
import {convertDatasheets} from './datasheet-converter.mjs';

export async function extract(pakFilePaths, outPath) {
await extractDatasheets(pakFilePaths, outPath);
}

export async function convert(outPath) {
await convertDatasheets(outPath);
}
42 changes: 42 additions & 0 deletions datasheets/extract-worker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import ffi from 'ffi-napi';
import {promises as fs} from 'fs';
import workerpool from 'workerpool';

const lib = ffi.Library('oo2core_8_win64.dll', {
'OodleLZ_Decompress': ['void', ['char *', 'int', 'char *', 'int', 'int', 'int', 'int', 'void *', 'void *', 'void *', 'void *', 'void *', 'void *', 'int']]
});

async function extractFromPak(serializedParameters) {
const {pakFilePath, fileEntries, outPath} = JSON.parse(serializedParameters);
const fileHandle = await fs.open(pakFilePath, 'r');

for (let fileEntry of fileEntries) {
const localHeader = Buffer.alloc(4);
await fileHandle.read({buffer: localHeader, position: fileEntry.offset + 26});
const fileNameLength = localHeader.readUInt16LE(0);
const extraFieldLength = localHeader.readUInt16LE(2);

const compressedData = Buffer.alloc(fileEntry.compressedSize);
await fileHandle.read({
buffer: compressedData,
position: fileEntry.offset + 30 + fileNameLength + extraFieldLength
});

const uncompressedData = Buffer.alloc(fileEntry.uncompressedSize);
lib.OodleLZ_Decompress(compressedData, fileEntry.compressedSize, uncompressedData, fileEntry.uncompressedSize, 0, 0, 0, null, null, null, null, null, null, 3);

await saveFile(outPath + fileEntry.fileName, uncompressedData);
}

await fileHandle.close()
}

async function saveFile(path, out) {
const directory = path.slice(0, path.lastIndexOf('/'));
await fs.mkdir(directory, {recursive: true});
await fs.writeFile(path, out);
}

workerpool.worker({
extractFromPak: extractFromPak
});
94 changes: 94 additions & 0 deletions models/3d/3d-converter.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import workerpool from 'workerpool';
import {dirname} from 'path';
import {fileURLToPath} from 'url';
import {promises as fs} from 'fs';

const __dirname = dirname(fileURLToPath(import.meta.url));
const pool = workerpool.pool(__dirname + '/converter-worker.js', {workerType: 'process'});

export async function convertModels(records, outPath) {
const colladaFilePaths = await convertToColladaFiles(records);
await fixColladaFiles(colladaFilePaths);
await convertToGltfFiles(colladaFilePaths, outPath + '/gltf/');
await pool.terminate();
}

async function convertToColladaFiles(records, outPath) {
const toExtract = [];

for (let record of records) {
const itemId = record.itemId;

const skin1Model = record.skin1.model.toLocaleLowerCase();
const skin2Model = record.skin2.model.toLocaleLowerCase();

if (skin1Model !== '') {
const model1Name = skin1Model.slice(skin1Model.lastIndexOf('/') + 1);
toExtract.push(outPath + itemId + '/' + model1Name);
}

if (skin2Model !== '') {
const model2Name = skin2Model.slice(skin2Model.lastIndexOf('/') + 1);
toExtract.push(outPath + itemId + '/' + model2Name);
}
}

const start = Date.now();
let finishedTasks = 0;
const colladaFilePaths = [];

return new Promise(resolve => {
for (let modelPath of toExtract) {
pool.exec('runColladaConverter', [modelPath])
.then(createdColladaFilePath => {
finishedTasks += 1;
process.stdout.write('Converting to Collada files.. ' + Math.round(finishedTasks * 100 / toExtract.length) + '%\r');
colladaFilePaths.push(createdColladaFilePath);
if (finishedTasks === toExtract.length) {
console.log('Converting to Collada files.. finished in ' + (Date.now() - start) + 'ms');
resolve(colladaFilePaths);
}
});
}
});
}

async function fixColladaFiles(colladaFilePaths) {
const start = Date.now();

for (let i = 0; i < colladaFilePaths.length; i++) {
process.stdout.write('Fixing Collada files.. ' + Math.round(i * 100 / colladaFilePaths.length) + '%\r');

const colladaFilePath = colladaFilePaths[i];

const content = await fs.readFile(colladaFilePath, 'utf-8');

const lines = content.split('\n');
const withoutNormals = lines.filter(line => !line.trim().startsWith('<input semantic="NORMAL"')).join('\n');

const fixed = withoutNormals.replace(/<init_from>.*\/(.*)\.(png|dds|tif)<\/init_from>/gm, '<init_from>textures/$1.png</init_from>');

await fs.writeFile(colladaFilePath, fixed);
}

console.log('Fixing Collada files.. finished in ' + (Date.now() - start) + 'ms');
}

export async function convertToGltfFiles(colladaFilePaths) {
const start = Date.now();
let finishedTasks = 0;

return new Promise(resolve => {
for (let colladaFilePath of colladaFilePaths) {
pool.exec('runGltfConverter', [colladaFilePath])
.then(() => {
finishedTasks += 1;
process.stdout.write('Converting to Gltf files.. ' + Math.round(finishedTasks * 100 / colladaFilePaths.length) + '%\r');
if (finishedTasks === colladaFilePaths.length) {
console.log('Converting to Gltf files.. finished in ' + (Date.now() - start) + 'ms');
resolve(colladaFilePaths);
}
});
}
});
}
37 changes: 37 additions & 0 deletions models/3d/converter-worker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import {execSync} from 'child_process';
import workerpool from 'workerpool';
import {promises as fs} from 'fs';

async function runColladaConverter(modelPath) {
const fileName = modelPath.slice(modelPath.lastIndexOf('/') + 1);
const directory = modelPath.slice(0, modelPath.lastIndexOf('/'));

try {
execSync('cgf-converter.exe "' + fileName + '"', {
env: process.env,
cwd: directory
});
await fs.unlink(modelPath);
} catch (e) {
console.log('ColladaConverter: error for ' + modelPath);
}
return directory + '/' + fileName.slice(0, fileName.lastIndexOf('.')) + '.dae';
}

async function runGltfConverter(colladaFilePath) {
const gltfFilePath = colladaFilePath.slice(0, colladaFilePath.lastIndexOf('.')) + '.gltf';

try {
execSync('COLLADA2GLTF-bin.exe "' + colladaFilePath + '" --doubleSided -o "' + gltfFilePath + '"');
await fs.unlink(colladaFilePath);
} catch (e) {
console.log('GltfConverter: error for ' + colladaFilePath);
// probably a missing <triangles count="2016" material=" in the Collada file
}
}


workerpool.worker({
runColladaConverter: runColladaConverter,
runGltfConverter: runGltfConverter
});
Loading

0 comments on commit 803f80d

Please sign in to comment.