1 bug away from working skeletal anims!!!

This commit is contained in:
skillbert
2022-04-01 01:46:38 +02:00
parent 64e84b40ea
commit 13a88a8192
32 changed files with 4123 additions and 256 deletions

View File

@@ -14,7 +14,14 @@ export type framemaps = {
} & {
data: number[],
})[],
footchunks: number[][][],
skeleton: ({
parentbone: number,
nonskinboneid: number,
bonematrix: number[],
dataq: Uint8Array,
} & {
skinid: number,
})[],
bool0: boolean,
always0: number,
};

View File

@@ -5,7 +5,6 @@
export type frames = {
header_always_2: number,
probably_framemap_id: number,
header3: number,
flags: number[],
animdata: Uint8Array,
};

View File

@@ -21,7 +21,7 @@ export type models = {
hasFaceBones: number,
hasBoneIds: number,
isHidden: number,
hasFlag20: number,
hasSkin: number,
colourBuffer: Uint16Array | null,
alphaBuffer: Uint8Array | null,
faceboneidBuffer: Uint16Array | null,
@@ -32,7 +32,11 @@ export type models = {
tagentBuffer: Uint16Array | null,
uvBuffer: Uint16Array | null,
boneidBuffer: Uint16Array | null,
flag20Buffer: Uint8Array | null,
skin: {
skinVertexCount: number,
skinBoneBuffer: Uint8Array,
skinWeightBuffer: Uint8Array,
} | null,
}[],
unk1Buffer: Uint8Array[],
unk2Buffer: Uint8Array[],

22
generated/skeletalanim.d.ts vendored Normal file
View File

@@ -0,0 +1,22 @@
// GENERATED DO NOT EDIT
// This source data is located at '..\src\opcodes\skeletalanim.json'
// run `npm run filetypes` to rebuild
export type skeletalanim = {
header: number,
framebase: number,
endtime: number,
unk_always0: number,
tracks: {
unk_1to4: number,
boneid: number,
type_0to9: number,
bonetype_01or3: number,
always0: number,
flag2: boolean,
chunks: {
time: number,
value: number[],
}[],
}[],
};

3393
package-lock.json generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@
"buildnative": "electron-rebuild -f & node node_modules/sharp/install/dll-copy",
"ts": "tsc",
"webpack": "webpack --watch",
"web": "webpack --watch --config webpack.config.web.js"
"web": "webpack serve --config webpack.config.web.js"
},
"author": "Skillbert",
"license": "GPL-4",
@@ -26,6 +26,7 @@
"@types/sharp": "^0.29.2",
"@types/sqlite3": "^3.1.8",
"@types/three": "^0.133.1",
"@types/webpack-env": "^1.16.3",
"assert": "^2.0.0",
"autobind-decorator": "^2.4.0",
"browserify-zlib": "^0.2.0",
@@ -43,7 +44,8 @@
"typescript": "^4.6.2",
"util": "^0.12.4",
"webpack": "^5.70.0",
"webpack-cli": "^4.9.2"
"webpack-cli": "^4.9.2",
"webpack-dev-server": "^4.7.4"
},
"dependencies": {
"@types/sql.js": "^1.4.3",

View File

@@ -1,8 +1,17 @@
import { Stream, packedHSL2HSL, HSL2RGB } from "./utils";
import { cacheMajors } from "../constants";
import { CacheFileSource } from "../cache";
import { parseFrames, parseFramemaps, parseSequences } from "../opdecoder";
import { AnimationClip, Bone, Euler, KeyframeTrack, Matrix4, Object3D, Quaternion, QuaternionKeyframeTrack, Skeleton, Vector3, VectorKeyframeTrack } from "three";
import { parseFrames, parseFramemaps, parseSequences, parseSkeletalAnim } from "../opdecoder";
import { AnimationClip, Bone, Euler, KeyframeTrack, Matrix3, Matrix4, Object3D, Quaternion, QuaternionKeyframeTrack, Skeleton, Vector3, VectorKeyframeTrack } from "three";
import { skeletalanim } from "../../generated/skeletalanim";
import { framemaps } from "../../generated/framemaps";
import { ThreejsSceneCache } from "./ob3tothree";
import { sequences } from "../../generated/sequences";
//TODO remove
import * as THREE from "three";
(window as any).THREE = THREE;
//test anim ids
//3577 falling plank
@@ -18,6 +27,15 @@ import { AnimationClip, Bone, Euler, KeyframeTrack, Matrix4, Object3D, Quaternio
//114132 weird bugged balloon
//43 fishing spot
//new anims
//115416 obelisk
//114652 pof totem
//117253 dramatic doors
//npc new anims
//27111 butterfly
const framemapCache = new Map<number, ReturnType<typeof parseFramemaps["read"]>>();
let loaded = false;
async function getFramemap(loader: CacheFileSource, id: number) {
@@ -43,6 +61,12 @@ type TransformRotate = TransformBase & { type: "rotate", data: Float32Array }
type TransformScale = TransformBase & { type: "scale", data: Float32Array }
type Transform = TransformTranslateConst | TransformTranslate | TransformRotate | TransformScale;
export type MountableAnimation = {
skeleton: Skeleton,
clip: AnimationClip,
rootbones: Bone[]
};
export type BoneInit = {
translateconst: TransformTranslateConst[],
translate: TransformTranslate[],
@@ -54,7 +78,6 @@ export type BoneInit = {
export type ParsedAnimation = {
rootboneinits: BoneInit[],
keyframetimes: Float32Array,
animid: number,
endtime: number
}
@@ -131,28 +154,223 @@ function findSharedPivot(bones: TransformStack[]) {
}
}
export async function parseAnimationSequence3(loader: CacheFileSource, id: number): Promise<ParsedAnimation | null> {
let seqfile = await loader.getFileById(cacheMajors.sequences, id);
let seq = parseSequences.read(seqfile);
let sequenceframes = seq.frames;
if (!sequenceframes) {
return null;
export async function parseSkeletalAnimation(cache: ThreejsSceneCache, animid: number): Promise<MountableAnimation> {
let anim = parseSkeletalAnim.read(await cache.getFileById(cacheMajors.skeletalAnims, animid));
let base = parseFramemaps.read(await cache.getFileById(cacheMajors.framemaps, anim.framebase));
if (!base.skeleton) {
throw new Error("framebase does not have skeleton");
}
let convertedtracks: KeyframeTrack[] = [];
let animtracks = anim.tracks.sort((a, b) => {
if (a.boneid != b.boneid) { return a.boneid - b.boneid; }
return a.type_0to9 - b.type_0to9;
});
//TODO remove
// animtracks = animtracks.filter(q => q.chunks.length > 4);
// console.log(base.skeleton.map(bone => ([
// "skin,parent,old", bone.skinid, bone.parentbone, bone.nonskinboneid,
// "pivot", ...bone.bonematrix.slice(12, 15).map(q => +q.toFixed(2)),
// bone
// ])));
// console.log(base.skeleton.map(bone => `mats{length(mats)+1}=reshape([${bone.bonematrix.map(q => +q.toFixed(3)).join(",")}],[4,4]);`).join("\n"));
// let boneactions = {};
// animtracks.forEach(q => { boneactions[q.boneid - 64] = [...(boneactions[q.boneid - 64] ?? []), q] });
// console.log(boneactions);
let sc = (a: number) => +(a).toFixed(2);
let logmat = (m: Matrix4) => {
let str = "";
for (let i = 0; i < 4; i++) {
for (let j = 0; j < 4; j++) {
str += m.elements[i + j * 4].toFixed(2).padStart(7) + (j < 3 ? "," : "");
}
str += (i < 3 ? "\n" : "")
}
console.log(str);
}
let bones: Bone[] = [];
let binds: Matrix4[] = [];
let rootbones: Bone[] = [];
let tmp = new Matrix4();
let prematrix = new Matrix4().makeScale(1, 1, -1);
for (let [id, entry] of base.skeleton.entries()) {
let bone = new Bone();
let matrix = new Matrix4().fromArray(entry.bonematrix);
bone.name = "bone_" + id;
if (entry.nonskinboneid == 65535) {
rootbones.push(bone);
matrix.premultiply(prematrix);
} else {
bones[entry.nonskinboneid].add(bone);
// matrix.multiply(binds[entry.nonskinboneid]);
}
tmp.copy(matrix).decompose(bone.position, bone.quaternion, bone.scale);
// bone.matrixAutoUpdate = true;
let angle = new Euler().setFromQuaternion(bone.quaternion);
// console.log(id,
// "TRS", +bone.position.x.toFixed(2), +bone.position.y.toFixed(2), +bone.position.z.toFixed(2),
// "", sc(angle.x), sc(angle.y), sc(angle.z),
// "", +bone.scale.x.toFixed(2), +bone.scale.y.toFixed(2), +bone.scale.z.toFixed(2));
bone.updateMatrixWorld();
// console.log(id, entry.nonskinboneid);
// logmat(matrix);
bones[id] = bone;
binds[id] = matrix;
}
prematrix.invert();
binds.forEach(q => q.multiply(prematrix));
let skeleton = new Skeleton(bones);
let actiontypemap: { t: "unknown" | "rotate" | "translate" | "scale", a: number }[] = [
{ t: "unknown", a: 0 },
//1-9
{ t: "rotate", a: 0 },
{ t: "rotate", a: 1 },
{ t: "rotate", a: 2 },
{ t: "translate", a: 0 },
{ t: "translate", a: 1 },
{ t: "translate", a: 2 },
{ t: "scale", a: 0 },
{ t: "scale", a: 1 },
{ t: "scale", a: 2 },
//10-16 unknown
{ t: "unknown", a: 0 },
{ t: "unknown", a: 0 },
{ t: "unknown", a: 0 },
{ t: "unknown", a: 0 },
{ t: "unknown", a: 0 },
{ t: "unknown", a: 0 },
{ t: "unknown", a: 0 },
]
for (let index = 0; index < animtracks.length;) {
let track = animtracks[index];
let xvalues: skeletalanim["tracks"][number]["chunks"] | null = null;
let yvalues: skeletalanim["tracks"][number]["chunks"] | null = null;
let zvalues: skeletalanim["tracks"][number]["chunks"] | null = null;
let tracktype = actiontypemap[track.type_0to9];
let boneid = track.boneid - 64;
while (index < animtracks.length) {
let track2 = animtracks[index];
let t2 = actiontypemap[track2.type_0to9];
if (track2.boneid - 64 != boneid || t2.t != tracktype.t) { break; }
if (t2.a == 0) { xvalues = track2.chunks; }
if (t2.a == 1) { yvalues = track2.chunks; }
if (t2.a == 2) { zvalues = track2.chunks; }
index++;
}
let bone = bones[track.boneid - 64];//not sure where the 64 comes from
if (!bone) {
console.log("animation track without bone", track.boneid - 64);
continue;
}
let bonename = bone.name;
let defaultvalue = (tracktype.t == "scale" ? 1 : 9);
let intp = (v: { time: number, value: number[] }[] | null, i: number, t: number) => {
let v1 = v?.[i]?.value[0] ?? defaultvalue;
let v2 = v?.[i + 1]?.value[0] ?? defaultvalue;
let t1 = v?.[i].time ?? 0;
let t2 = v?.[i + 1]?.time ?? t1;
let a = (t1 == t2 ? 0 : (t - t1) / (t2 - t1));
return v1 * (1 - a) + v2 * a;
}
let timearray: number[] = [];
let data: number[] = [];
let euler = new Euler();
let quat = new Quaternion();
// let time = new Float32Array(timearray.map(q => q * 0.020));
for (let ix = 0, iy = 0, iz = 0, idata = 0; ;) {
let tx = xvalues?.[ix]?.time ?? Infinity;
let ty = yvalues?.[iy]?.time ?? Infinity;
let tz = zvalues?.[iz]?.time ?? Infinity;
let t = Math.min(tx, ty, tz);
if (!isFinite(t)) { break; }
data[idata++] = intp(xvalues, ix, t);
data[idata++] = intp(yvalues, iy, t);
data[idata++] = intp(zvalues, iz, t);
timearray.push(t);
if (tx == t && xvalues && ix + 1 < xvalues.length) { ix++; }
if (ty == t && yvalues && iy + 1 < yvalues.length) { iy++; }
if (tz == t && zvalues && iz + 1 < zvalues.length) { iz++; }
}
let times = new Float32Array(timearray.map(q => q * 0.020));
if (tracktype.t == "translate") {
if (boneid == 2) {
for (let i = 0; i < data.length; i += 3) {
data[i + 2] *= -1;
// data[i + 2] *= -1;
}
}
convertedtracks.push(new VectorKeyframeTrack(`${bonename}.position`, times as any, data));
}
if (tracktype.t == "scale") {
if (boneid == 0) {
for (let i = 0; i < data.length; i += 3) {
data[i + 0] *= -1;
// data[i + 2] *= -1;
}
}
convertedtracks.push(new VectorKeyframeTrack(`${bonename}.scale`, times as any, data));
}
if (tracktype.t == "rotate") {
let quatdata = new Float32Array(timearray.length * 4);
for (let i = 0; i * 3 < data.length; i++) {
euler.set(
data[i * 3 + 0],
data[i * 3 + 1],
data[i * 3 + 2], 'XYZ');
quat.setFromEuler(euler);
//flip the quaternion along the z axis
quat.z *= -1;
quat.w *= -1;
quat.toArray(quatdata, i * 4);
}
convertedtracks.push(new QuaternionKeyframeTrack(`${bonename}.quaternion`, times as any, quatdata as any));
}
// console.log(bonename, tracktype.t, +data[0].toFixed(2), +data[1].toFixed(2), +data[2].toFixed(2));
}
//TODO remove
// convertedtracks = [];
let clip = new AnimationClip("anim_" + (Math.random() * 1000 | 0), undefined, convertedtracks);
return { skeleton, clip, rootbones };
}
export async function parseAnimationSequence3(loader: ThreejsSceneCache, sequenceframes: NonNullable<sequences["frames"]>): Promise<ParsedAnimation> {
let secframe0 = sequenceframes[0];
if (!secframe0) {
throw new Error("animation has no frames");
}
let frameindices = await loader.getIndexFile(cacheMajors.frames);
let frameindex = frameindices[secframe0!.frameidhi];
if (!frameindex) {
throw new Error("frame not found " + secframe0.frameidhi);
}
let framearch = await loader.getFileArchive(frameindex);
let framearch = await loader.getArchiveById(cacheMajors.frames, secframe0.frameidhi);
let frames = framearch.map(file => {
let framedata = parseFrames.read(file.buffer);
@@ -294,7 +512,7 @@ export async function parseAnimationSequence3(loader: CacheFileSource, id: numbe
//need to copy and reorder the clip since the frame might be out of order/reused
let clip = new Float32Array(nfields * sequenceframes.length);
for (let i = 0; i < sequenceframes.length; i++) {
let frameid = frameindex.subindices.indexOf(sequenceframes[i].frameidlow);
let frameid = framearch.findIndex(q => q.fileid == sequenceframes![i].frameidlow);
for (let j = 0; j < nfields; j++) {
clip[i * nfields + j] = rawclip[frameid * nfields + j];
}
@@ -428,27 +646,10 @@ export async function parseAnimationSequence3(loader: CacheFileSource, id: numbe
console.log(str);
bone.children.forEach(q => logboneinit(q, indent + 1));
}
console.log(bones);
rootboneinits.forEach(q => logboneinit(q, 0));
// let skeleton = new Skeleton(resultbones);
// let clip = new AnimationClip(`sequence_${id}`, endtime, tracks);
// console.log("sequence id:", id, "framebase id:", frames[0].baseid, "framesid:", sequenceframes[0].frameidhi, "framecount:", sequenceframes.length);
// // let clip = new AnimationClip(`sequence_${id}`, endtime, tracks);
console.log(framebase.data.map(q => [q.type, "", ...q.data.map(q => q + 1)]));
// //TODO remove mockup
// let rootbone = new Bone();
// let rootbones = [rootbone];
// let skeleton = new Skeleton(rootbones);
// let clip = new AnimationClip(`sequence_${id}`, endtime, []);
return { rootboneinits, keyframetimes, animid: id, endtime };
return { rootboneinits, keyframetimes, endtime };
}
function readAnimTranslate(str: Stream) {

View File

@@ -21,6 +21,9 @@ import { parseSprite } from "./sprite";
import * as THREE from "three";
import { mergeBufferGeometries } from "three/examples/jsm/utils/BufferGeometryUtils";
module.hot?.accept(["../3d/ob3tothree", "../3d/ob3togltf"]);
const upvector = new THREE.Vector3(0, 1, 0);
const tiledimensions = 512;
@@ -894,13 +897,11 @@ export type ChunkModelData = { floors: FloorMeshData[], models: MapsquareLocatio
type MapConfigData = typeof mapConfigData extends ((...a: any[]) => Promise<infer T>) ? T : never;
export async function mapConfigData(source: CacheFileSource) {
//TODO proper erroring on nulls
let configindex = await source.getIndexFile(cacheMajors.config);
let underlays = (await source.getFileArchive(configindex[cacheConfigPages.mapunderlays]))
let underlays = (await source.getArchiveById(cacheMajors.config, cacheConfigPages.mapunderlays))
.map(q => parseMapsquareUnderlays.read(q.buffer));
let overlays = (await source.getFileArchive(configindex[cacheConfigPages.mapoverlays]))
let overlays = (await source.getArchiveById(cacheMajors.config, cacheConfigPages.mapoverlays))
.map(q => parseMapsquareOverlays.read(q.buffer));
let mapscenes = (await source.getFileArchive(configindex[cacheConfigPages.mapscenes]))
let mapscenes = (await source.getArchiveById(cacheMajors.config, cacheConfigPages.mapscenes))
.map(q => parseMapscenes.read(q.buffer));
return { underlays, overlays, mapscenes };
@@ -971,7 +972,7 @@ export async function mapsquareModels(source: CacheFileSource, grid: TileGrid, c
let materials = new Map<number, MaterialData>();
let materialproms: Promise<any>[] = [];
for (let matid of matids) {
materialproms.push(getMaterialData(source.getFileById.bind(source), matid).then(mat => materials.set(matid, mat)));
materialproms.push(getMaterialData(source, matid).then(mat => materials.set(matid, mat)));
}
await Promise.all(materialproms);
let textures = new Map<number, ImageData>();
@@ -1022,7 +1023,7 @@ export async function mapsquareModels(source: CacheFileSource, grid: TileGrid, c
}
export async function mapsquareToThree(source: CacheFileSource, grid: TileGrid, chunks: ChunkModelData[]) {
let scene = new ThreejsSceneCache(source.getFileById.bind(source));
let scene = new ThreejsSceneCache(source);
let root = new THREE.Group();
for (let chunk of chunks) {

View File

@@ -6,6 +6,7 @@ import { cacheMajors } from "../constants";
import { ParsedTexture } from "./textures";
import { glTypeIds, ModelAttribute, streamChunk, vartypeEnum, buildAttributeBuffer, AttributeSoure } from "./gltfutil";
import * as THREE from "three";
import { CacheFileSource } from "../cache";
export type FileGetter = (major: number, minor: number) => Promise<Buffer>;
@@ -13,20 +14,23 @@ export type FileGetter = (major: number, minor: number) => Promise<Buffer>;
//a wrapper around gltfbuilder that ensures that resouces are correctly shared
export class GLTFSceneCache {
getFileById: FileGetter;
textureCache = new Map<number, number>();
gltfMaterialCache = new Map<number, Promise<number>>();
gltf = new GLTFBuilder();
source: CacheFileSource;
constructor(getfilebyid: FileGetter) {
this.getFileById = getfilebyid;
constructor(source: CacheFileSource) {
this.source = source;
}
getFileById(major: number, id: number) {
return this.source.getFileById(major, id);
}
async getTextureFile(texid: number, allowAlpha) {
let cached = this.textureCache.get(texid);
if (cached) { return cached; }
let file = await this.getFileById(cacheMajors.texturesDds, texid);
let file = await this.source.getFileById(cacheMajors.texturesDds, texid);
let parsed = new ParsedTexture(file, allowAlpha);
let texnode = this.gltf.addImage(await parsed.convertFile("png"));
this.textureCache.set(texid, texnode);
@@ -40,7 +44,7 @@ export class GLTFSceneCache {
let cached = this.gltfMaterialCache.get(matcacheid);
if (!cached) {
cached = (async () => {
let { textures, alphamode } = await getMaterialData(this.getFileById, matid);
let { textures, alphamode } = await getMaterialData(this.source, matid);
let materialdef: Material = {
//TODO check if diffuse has alpha as well
@@ -94,11 +98,11 @@ export type MaterialData = {
raw: any
}
//this one is narly, i have touched it as little as possible, needs a complete refactor together with JMat
export async function getMaterialData(getFile: FileGetter, matid: number) {
export async function getMaterialData(source: CacheFileSource, matid: number) {
if (matid == -1) {
return defaultMaterial();
}
var materialfile = await getFile(cacheMajors.materials, matid);
var materialfile = await source.getFileById(cacheMajors.materials, matid);
return JMat(materialfile);
}
@@ -155,7 +159,7 @@ export function parseOb3Model(modelfile: Buffer) {
for (var n = 0; n < meshCount; ++n) {
// Flag 0x10 is currently used, but doesn't appear to change the structure or data in any way
let groupFlags =model.readUInt();
let groupFlags = model.readUInt();
// Unknown, probably pertains to materials transparency maybe?
let unk6 = model.readUByte();
@@ -169,7 +173,7 @@ export function parseOb3Model(modelfile: Buffer) {
let hasFaceBones = (groupFlags & 0x04) != 0;
let hasBoneids = (groupFlags & 0x08) != 0;
let isHidden = (groupFlags & 0x10) != 0;
let hasFlag20 = (groupFlags & 0x20) != 0;
let hasSkin = (groupFlags & 0x20) != 0;
// console.log(n, "mat", materialId, "faceCount", faceCount, "hasFaceBones:", hasFaceBones, "ishidden:", isHidden, "hasflag20:", hasFlag20, "unk6:", unk6);
if (groupFlags & ~0x2f) {
console.log("unknown model flags", groupFlags & ~0x2f);
@@ -181,6 +185,8 @@ export function parseOb3Model(modelfile: Buffer) {
let normalBuffer: ArrayLike<number> | null = null;
let uvBuffer: Float32Array | null = null;
let boneidBuffer: Uint16Array | null = null;
let skinIdBuffer: Uint16Array | null = null;
let skinWeightBuffer: Uint8Array | null = null;
let faceboneidBuffer: Uint16Array | null = null;
if (hasVertices) {
@@ -222,21 +228,37 @@ export function parseOb3Model(modelfile: Buffer) {
for (let i = 0; i < vertexCount * 2; i++) {
uvBuffer[i] = model.readHalf();
}
//group.uvBuffer = streamChunk(Uint16Array, model, group.vertexCount * 2);
}
if (hasBoneids) {
//TODO there can't be more than ~50 bones in the engine, what happens to the extra byte?
boneidBuffer = streamChunk(Uint16Array, model, vertexCount);
}
}
if (hasFlag20) {
//probably material related
//models from this update/area also for the first time has some sort of "skybox" material
//
if (hasSkin) {
let count = model.readUInt();
let bytes = streamChunk(Uint8Array, model, count * 3);
console.log("mesh flag20", bytes);
let a = 1;
let rawbuf = streamChunk(Uint8Array, model, count * 3);
let dataindex = 0;
let weightindex = count * 2;
skinIdBuffer = new Uint16Array(vertexCount * 4);
skinWeightBuffer = new Uint8Array(vertexCount * 4);
for (let i = 0; i < vertexCount; i++) {
let remainder = 255;
for (let j = 0; j < 4; j++) {
let weight = rawbuf[weightindex++];
let boneid = rawbuf[dataindex++] | (rawbuf[dataindex++] << 8);//manual 16bit building since it might not be alligned
let actualweight = (weight != 0 ? weight : remainder);
remainder -= weight;
skinIdBuffer[i * 4 + j] = boneid;
skinWeightBuffer[i * 4 + j] = actualweight;
if (weight == 0) { break; }
}
}
if (dataindex != count * 2 || weightindex != count * 3) {
console.log("model skin decode failed");
debugger;
}
}
if (isHidden) {
@@ -249,10 +271,6 @@ export function parseOb3Model(modelfile: Buffer) {
continue;
}
// if (faceboneidBuffer) {
// console.log("faceboneidBuffer", faceboneidBuffer);
// }
//TODO somehow this doesn't always work
if (materialId != -1) {
// let replacedmaterial = modifications.replaceMaterials?.find(q => q[0] == materialId)?.[1];
@@ -269,8 +287,6 @@ export function parseOb3Model(modelfile: Buffer) {
miny = positionBuffer[i + 1];
}
}
// let positionfloatbuffer = new Float32Array(positionBuffer);
//highest level of detail only
let indexbuf = indexBuffers[0];
@@ -285,8 +301,11 @@ export function parseOb3Model(modelfile: Buffer) {
};
meshes.push(meshdata);
if (boneidBuffer) {
//every modern animation system uses 4 skinned bones per vertex instead of one
//every modern animation system uses 4 skinned bones per vertex instead of one
if (skinIdBuffer && skinWeightBuffer) {
meshdata.attributes.skinids = new THREE.BufferAttribute(skinIdBuffer, 4);
meshdata.attributes.skinweights = new THREE.BufferAttribute(skinWeightBuffer, 4, true);
} else if (boneidBuffer) {
let quadboneids = new Uint8Array(boneidBuffer.length * 4);
let quadboneweights = new Uint8Array(boneidBuffer.length * 4);
const maxshort = (1 << 16) - 1;
@@ -303,10 +322,12 @@ export function parseOb3Model(modelfile: Buffer) {
meshdata.attributes.skinweights = new THREE.BufferAttribute(quadboneweights, 4, true);
}
if (uvBuffer) {
meshdata.attributes.texuvs = new THREE.BufferAttribute(uvBuffer, 2);
}
if (normalBuffer) {
let normalsrepacked = new Float32Array(normalBuffer.length);
//TODO threejs can probly do this for us
@@ -341,41 +362,51 @@ export function parseOb3Model(modelfile: Buffer) {
}
}
}
}
// // TODO proper toggle for this or remove
// // visualize bone ids
// materialArgument = 0;
// let vertexcolor = new Uint8Array(vertexCount * 4);
// meshdata.attributes.color = new THREE.BufferAttribute(vertexcolor, 4, true);
// let allbones = new Set<number>();
// const bonecols = [
// [255, 255, 255],//0 white no bone
// [255, 0, 0],//1 red
// [0, 255, 0],//2 green
// [0, 0, 255],//3 blue
// [90, 0, 0],//4 red--
// [0, 90, 0],//5 green--
// [0, 0, 90],//6 blue--
// [255, 255, 0],//7 yellow
// [0, 255, 255],//8 cyan
// [255, 0, 255],//9 purple
// ]
// for (let i = 0; i < vertexCount; i++) {
// let index = i * 4;
// let boneid = meshdata.attributes.skinids?.array[index]!;
// // let boneid = n;
// vertexcolor[index + 0] = (boneid < bonecols.length ? bonecols[boneid][0] : (73 + boneid * 9323) % 256);
// vertexcolor[index + 1] = (boneid < bonecols.length ? bonecols[boneid][1] : (171 + boneid * 1071) % 256);
// vertexcolor[index + 2] = (boneid < bonecols.length ? bonecols[boneid][2] : (23 + boneid * 98537) % 256);
// vertexcolor[index + 3] = 255;
// allbones.add(boneid);
// }
// TODO proper toggle for this or remove
// visualize bone ids
materialArgument = 0;
let vertexcolor = new Uint8Array(vertexCount * 4);
meshdata.attributes.color = new THREE.BufferAttribute(vertexcolor, 4, true);
const bonecols = [
// [255, 255, 255],//0 white no bone
[255, 0, 0],//1 red
[0, 255, 0],//2 green
[0, 0, 255],//3 blue
[15, 0, 0],//4 red--
[0, 15, 0],//5 green--
[0, 0, 15],//6 blue--
[255, 255, 0],//7 yellow
[0, 255, 255],//8 cyan
[255, 0, 255],//9 purple
];
let bonecomponent = (i: number, skinindex: number) => {
let boneid = meshdata.attributes.skinids?.array[i + skinindex] ?? 0;
// let weight = meshdata.attributes.skinweights?.array[i + skinindex] ?? (skinindex == 0 ? 255 : 0);
let weight = 255;
vertexcolor[i + 0] += (boneid < bonecols.length ? bonecols[boneid][0] : (73 + boneid * 9323) % 256) * weight / 255;
vertexcolor[i + 1] += (boneid < bonecols.length ? bonecols[boneid][1] : (73 + boneid * 9323) % 256) * weight / 255;
vertexcolor[i + 2] += (boneid < bonecols.length ? bonecols[boneid][2] : (171 + boneid * 1071) % 256) * weight / 255;
}
for (let i = 0; i < vertexCount; i++) {
let index = i * 4;
vertexcolor[index + 0] = 0;
vertexcolor[index + 1] = 0;
vertexcolor[index + 2] = 0;
vertexcolor[index + 3] = 255;
bonecomponent(index, 0);
// bonecomponent(index, 1);
// bonecomponent(index, 2);
// bonecomponent(index, 3);
}
}
for (let n = 0; n < unkCount1; n++) {
console.log("unk1", unkCount1);
model.skip(37);
}
for (let n = 0; n < unkCount2; n++) {
console.log("unk2", unkCount2);
model.skip(2);//material id?
for (let i = 0; i < 3; i++) {
model.skip(2); model.skip(2);//u16 flags mostly 0x0000,0x0040,0x0080, f16 position? mostly -5.0<x<5.0
@@ -385,6 +416,7 @@ export function parseOb3Model(modelfile: Buffer) {
}
}
for (let n = 0; n < unkCount3; n++) {
console.log("unk3", unkCount3);
model.skip(16);
}

View File

@@ -1,17 +1,15 @@
import { Stream, packedHSL2HSL, HSL2RGB, ModelModifications } from "./utils";
import { GLTFBuilder } from "./gltf";
import { GlTf, MeshPrimitive, Material } from "./gltftype";
import { cacheMajors } from "../constants";
import { ParsedTexture } from "./textures";
import { glTypeIds, ModelAttribute, streamChunk, vartypeEnum, buildAttributeBuffer, AttributeSoure } from "./gltfutil";
import { GLTFSceneCache, ModelData, ModelMeshData, FileGetter, parseOb3Model, getMaterialData } from '../3d/ob3togltf';
import { boundMethod } from 'autobind-decorator';
import { materialCacheKey } from "./jmat";
import { modifyMesh } from "./mapsquare";
import * as THREE from "three";
import { BoneInit, parseAnimationSequence3, ParsedAnimation } from "./animation";
import { BoneInit, MountableAnimation, parseAnimationSequence3, ParsedAnimation, parseSkeletalAnimation } from "./animation";
import { CacheFileSource } from "../cache";
import { AnimationClip, Bone, Group, KeyframeTrack, Matrix4, Object3D, Quaternion, QuaternionKeyframeTrack, Skeleton, SkeletonHelper, SkinnedMesh, Vector3, VectorKeyframeTrack } from "three";
import { parseSequences } from "../opdecoder";
(globalThis as any).packedhsl = function (hsl: number) {
return HSL2RGB(packedHSL2HSL(hsl));
@@ -68,12 +66,19 @@ export function augmentThreeJsFloorMaterial(mat: THREE.Material) {
export class ThreejsSceneCache {
getFileById: FileGetter;
textureCache = new Map<number, THREE.Texture>();
gltfMaterialCache = new Map<number, Promise<THREE.Material>>();
source: CacheFileSource;
constructor(getfilebyid: FileGetter) {
this.getFileById = getfilebyid;
constructor(source: CacheFileSource) {
this.source = source;
}
getFileById(major: number, id: number) {
return this.source.getFileById(major, id);
}
getArchiveById(major: number, minor: number) {
return this.source.getArchiveById(major, minor);
}
async getTextureFile(texid: number, allowAlpha: boolean) {
@@ -95,7 +100,7 @@ export class ThreejsSceneCache {
let cached = this.gltfMaterialCache.get(matcacheid);
if (!cached) {
cached = (async () => {
let material = await getMaterialData(this.getFileById, matid);
let material = await getMaterialData(this.source, matid);
let mat = new THREE.MeshPhongMaterial();
mat.transparent = hasVertexAlpha || material.alphamode != "opaque";
@@ -112,6 +117,10 @@ export class ThreejsSceneCache {
mat.normalMap.wrapT = THREE.RepeatWrapping;
}
mat.vertexColors = material.vertexColors || hasVertexAlpha;
//TODO re-enable
mat.vertexColors = true;
mat.map = null;
if (!material.vertexColors && hasVertexAlpha) {
mat.customProgramCacheKey = () => "vertexalphaonly";
mat.onBeforeCompile = (shader, renderer) => {
@@ -132,25 +141,22 @@ export class ThreejsSceneCache {
export async function ob3ModelToThreejsNode(getFile: CacheFileSource, modelfiles: Buffer[], mods: ModelModifications, animids: number[]) {
let scene = new ThreejsSceneCache(getFile.getFileById.bind(getFile));
export async function ob3ModelToThreejsNode(source: CacheFileSource, modelfiles: Buffer[], mods: ModelModifications, animids: number[]) {
let scene = new ThreejsSceneCache(source);
let meshdatas = modelfiles.map(file => {
let meshdata = parseOb3Model(file);
meshdata.meshes = meshdata.meshes.map(q => modifyMesh(q, mods));
return meshdata;
});
let anims = await Promise.all(animids.map(q => parseAnimationSequence3(getFile, q)));
let mesh = await ob3ModelToThree(scene, mergeModelDatas(meshdatas), anims.filter((q): q is ParsedAnimation => !!q));
let mesh = await ob3ModelToThree(scene, mergeModelDatas(meshdatas), animids);
mesh.scale.multiply(new THREE.Vector3(1, 1, -1));
mesh.updateMatrix();
(window as any).mesh = mesh;
return mesh;
}
function mountAnimation(model: ModelData, anim: ParsedAnimation) {
function mountAnimation(model: ModelData, anim: ParsedAnimation): MountableAnimation {
let nbones = model.bonecount + 1;//TODO find out why this number is wrong
let bonecenters: { xsum: number, ysum: number, zsum: number, weightsum: number }[] = [];
@@ -301,7 +307,7 @@ function mountAnimation(model: ModelData, anim: ParsedAnimation) {
let rootbones = anim.rootboneinits.map(b => iter(b, rootangle));
let skeleton = new Skeleton(indexedbones);
let clip = new AnimationClip(`sequence_${anim.animid}`, anim.endtime, keyframetracks);
let clip = new AnimationClip(`sequence_${Math.random() * 1000 | 0}`, undefined, keyframetracks);
if (missingpivots != 0) {
console.log("missing pivots during mountanimation", missingpivots);
@@ -310,13 +316,6 @@ function mountAnimation(model: ModelData, anim: ParsedAnimation) {
return { skeleton, clip, rootbones };
}
function traverseSweep(obj: Object3D, precall: (obj: Object3D) => void, aftercall: (obj: Object3D) => void) {
precall(obj);
for (let c of obj.children) { traverseSweep(c, precall, aftercall); }
aftercall(obj);
}
function mergeModelDatas(models: ModelData[]) {
let r: ModelData = {
bonecount: Math.max(...models.map(q => q.bonecount)),
@@ -327,8 +326,28 @@ function mergeModelDatas(models: ModelData[]) {
return r;
}
export async function ob3ModelToThree(scene: ThreejsSceneCache, model: ModelData, anims: ParsedAnimation[]) {
let rootnode = (anims.length == 0 ? new Object3D() : new SkinnedMesh());
export async function ob3ModelToThree(scene: ThreejsSceneCache, model: ModelData, animids: number[]) {
let mountanim: (() => MountableAnimation) | null = null;
//bit weird since animations are not guaranteed to have compatible bones
for (let animid of animids) {
let seqfile = await scene.getFileById(cacheMajors.sequences, animid);
let seq = parseSequences.read(seqfile);
if (seq.skeletal_animation) {
let anim = await parseSkeletalAnimation(scene, seq.skeletal_animation);
mountanim = () => anim;
break;
} else if (seq.frames) {
let frameanim = await parseAnimationSequence3(scene, seq.frames);
mountanim = () => mountAnimation(model, frameanim);
break;
}
}
let rootnode = (mountanim ? new SkinnedMesh() : new Object3D());
for (let meshdata of model.meshes) {
let attrs = meshdata.attributes;
@@ -344,24 +363,20 @@ export async function ob3ModelToThree(scene: ThreejsSceneCache, model: ModelData
//@ts-ignore
// mat.wireframe = true;
let mesh: THREE.Mesh | THREE.SkinnedMesh;
if (anims.length != 0) {
mesh = new THREE.SkinnedMesh(geo, mat);
} else {
mesh = new THREE.Mesh(geo, mat);
}
if (mountanim && geo.attributes.skinIndex) { mesh = new THREE.SkinnedMesh(geo, mat); }
else { mesh = new THREE.Mesh(geo, mat); }
rootnode.add(mesh);
}
if (anims.length != 0) {
let anim = anims[0];
let mount = mountAnimation(model, anim);
if (mountanim) {
let mount = mountanim();
if (mount.rootbones) { rootnode.add(...mount.rootbones); }
rootnode.traverse(node => {
if (node instanceof SkinnedMesh) {
// node.bindMode = "detached";
node.bind(mount.skeleton);
node.bind(mount.skeleton, new Matrix4());
}
});
(rootnode as SkinnedMesh).bind(mount.skeleton);
// (rootnode as SkinnedMesh).bind(mount.skeleton);
rootnode.animations = [mount.clip];
}
return rootnode;

View File

@@ -5,7 +5,8 @@ import { parseCacheIndex, parseRootCacheIndex } from "./opdecoder";
export type SubFile = {
offset: number,
size: number,
buffer: Buffer
buffer: Buffer,
fileid: number
}
export type CacheIndex = {
@@ -26,9 +27,9 @@ export function packSqliteBufferArchive(buffers: Buffer[]) {
return new Archive(buffers).packSqlite();
}
export function unpackSqliteBufferArchive(buffer: Buffer, length: number) {
if (length == 1) {
return [{ buffer, offset: 0, size: buffer.byteLength }];
export function unpackSqliteBufferArchive(buffer: Buffer, subids: number[]) {
if (subids.length == 1) {
return [{ buffer, offset: 0, size: buffer.byteLength, fileid: subids[0] } as SubFile];
}
let index = 0;
let unknownbyte = buffer.readUInt8(index); index++;
@@ -36,12 +37,13 @@ export function unpackSqliteBufferArchive(buffer: Buffer, length: number) {
let fileoffset = buffer.readUInt32BE(index); index += 4;
let files: SubFile[] = [];
for (let filenr = 0; filenr < length; filenr++) {
for (let filenr = 0; filenr < subids.length; filenr++) {
let endoffset = buffer.readUInt32BE(index); index += 4;
files.push({
buffer: buffer.slice(fileoffset, endoffset),
offset: fileoffset,
size: endoffset - fileoffset
size: endoffset - fileoffset,
fileid: subids[filenr]
});
fileoffset = endoffset;
}
@@ -121,21 +123,21 @@ export function packBufferArchive(buffers: Buffer[]) {
return new Archive(buffers).packNetwork();
}
export function unpackBufferArchive(buffer: Buffer, length: number) {
// if (length == 1) {
// return [{ buffer, offset: 0, size: buffer.byteLength }];
// }
export function unpackBufferArchive(buffer: Buffer, subids: number[]) {
var subbufs: SubFile[] = [];
var scan = 0x0;
//whats in our missing byte?
let endbyte = buffer.readUInt8(buffer.length - 1);
if (endbyte != 1) { console.log("unexpected archive end byte", endbyte) }
var suboffsetScan = buffer.length - 0x1 - (0x4 * length);
if (subids.length != 1) {
//TODO i think this endbyte thing is bullshit?
//whats in our missing byte?
let endbyte = buffer.readUInt8(buffer.length - 1);
if (endbyte != 1) { console.log("unexpected archive end byte", endbyte) }
}
var suboffsetScan = buffer.length - 1 - (4 * subids.length);
var lastRecordSize = 0;
for (var j = 0; j < length; ++j) {
for (var j = 0; j < subids.length; ++j) {
let size: number;
if (length == 1) {
if (subids.length == 1) {
size = buffer.byteLength;
} else {
//the field contains the difference in size from the last record?
@@ -148,7 +150,8 @@ export function unpackBufferArchive(buffer: Buffer, length: number) {
subbufs.push({
buffer: recordBuffer,
offset: scan,
size
size,
fileid: subids[j]
})
}
return subbufs;
@@ -248,6 +251,13 @@ export class CacheFileSource {
throw new Error("not implemented");
}
async getArchiveById(major: number, minor: number) {
let indexfile = await this.getIndexFile(major);
let index = indexfile[minor];
if (!index) { throw new Error(`minor id ${minor} does not exist in major ${major}.`); }
return this.getFileArchive(index);
}
async getFileById(major: number, fileid: number) {
let holderindex = fileIdToArchiveminor(major, fileid);
let indexfile = await this.getIndexFile(major);

View File

@@ -110,7 +110,7 @@ export class GameCacheLoader extends cache.CacheFileSource {
async getFileArchive(index: cache.CacheIndex) {
let arch = await this.getFile(index.major, index.minor, index.crc);
let res = cache.unpackSqliteBufferArchive(arch, index.subindexcount);
let res = cache.unpackSqliteBufferArchive(arch, index.subindices);
return res;
}

View File

@@ -65,7 +65,7 @@ export class WasmGameCacheLoader extends cache.CacheFileSource {
async getFileArchive(index: cache.CacheIndex) {
let arch = await this.getFile(index.major, index.minor, index.crc);
return cache.unpackSqliteBufferArchive(arch, index.subindexcount);
return cache.unpackSqliteBufferArchive(arch, index.subindices);
}
async getIndexFile(major: number) {

View File

@@ -18,6 +18,7 @@ export const cacheMajors = {
texturesDds: 52,
texturesPng: 53,
texturesBmp: 54,
skeletalAnims: 56,
achievements: 57,

View File

@@ -115,7 +115,7 @@ export class Downloader extends CacheFileSource {
return decompress(await this.downloadFile(major, minor, (major == 255 && minor == 255 ? undefined : crc)));
}
async getFileArchive(meta: CacheIndex) {
return unpackBufferArchive(await this.getFile(meta.major, meta.minor, meta.crc), meta.subindexcount);
return unpackBufferArchive(await this.getFile(meta.major, meta.minor, meta.crc), meta.subindices);
}
async getIndexFile(major: number) {
if (!this.indexMap.get(major)) {

View File

@@ -170,9 +170,7 @@ let cmd = cmdts.command({
let file = await filesource.getFileById(cacheMajors.enums, 708);
let mapenum = parseEnums.read(file);
let indexmeta = await filesource.getIndexFile(cacheMajors.worldmap);
let index = indexmeta[0];
let files = await filesource.getFileArchive(index);
let files = await filesource.getArchiveById(cacheMajors.worldmap, 0);
mask = mapenum.intArrayValue2!.values
.map(q => parseMapZones.read(files[q[1]].buffer))
// .filter(q => q.show && q.name)
@@ -368,7 +366,7 @@ export class MapRenderer {
function disposeThreeTree(node: THREE.Object3D | null) {
if (!node) { return; }
const cleanMaterial = (material:Material) => {
const cleanMaterial = (material: Material) => {
count++;
material.dispose();

View File

@@ -8,6 +8,11 @@ type PrimitiveInt = {
readmode: "fixed" | "smart" | "sumtail",
endianness: "big" | "little"
};
type PrimitiveFloat = {
primitive: "float",
bytes: number,
endianness: "big" | "little"
};
type PrimitiveBool = {
primitive: "bool"
}
@@ -25,7 +30,7 @@ export type ScanBuffer = Buffer & { scan: number };
type CompareMode = "eq" | "eqnot" | "bitflag" | "bitflagnot";
export type Primitive<T> = PrimitiveInt | PrimitiveBool | PrimitiveString | PrimitiveValue<T>;
export type Primitive<T> = PrimitiveInt | PrimitiveFloat | PrimitiveBool | PrimitiveString | PrimitiveValue<T>;
export type ChunkType<T> = Primitive<T> | string;
export type ComposedChunk = string
@@ -191,6 +196,14 @@ export function buildParser(chunkdef: ComposedChunk, typedef: TypeDef): ChunkPar
}
}
function validateFloatType(primitive: PrimitiveFloat) {
let hasBytes = "bytes" in primitive;
let hasEndianness = "endianness" in primitive;
if (!(hasBytes && hasEndianness)) throw new Error(`Invalid primitive definition '${JSON.stringify(primitive)}', 'float' variables need to specify 'bytes' and 'endianness'`);
if (typeof primitive.bytes !== "number" || primitive.bytes != 4) throw new Error(`Invalid primitive definition '${JSON.stringify(primitive)}', 'bytes' must be an integer 4`);
if (primitive.endianness !== "big" && primitive.endianness !== "little") throw new Error(`Invalid primitive definition '${JSON.stringify(primitive)}', 'endianness' must be "big" or "little"`);
}
function validateIntType(primitive: PrimitiveInt) {
let hasUnsigned = "unsigned" in primitive;
let hasBytes = "bytes" in primitive;
@@ -329,9 +342,10 @@ function structParser<TUPPLE extends boolean, T extends Record<TUPPLE extends tr
getJsonSChema() {
return {
type: "object",
properties: Object.fromEntries([...Object.entries(props)].map(([key, prop]) => {
return [key, (prop as ChunkParser<any>).getJsonSChema()];
})),
properties: Object.fromEntries([...Object.entries(props)]
.filter(([key]) => !key.startsWith("$"))
.map(([key, prop]) => [key, (prop as ChunkParser<any>).getJsonSChema()])
),
required: keys
}
}
@@ -572,6 +586,38 @@ function arrayNullTerminatedParser<T>(lengthtype: ChunkParser<number>, proptype:
};
}
function floatParser(primitive: PrimitiveFloat): ChunkParser<number> {
validateFloatType(primitive);
let parser: ChunkParser<number> = {
read(buffer, ctx) {
let bytes = primitive.bytes;
if (primitive.bytes == 4) {
let r = (primitive.endianness == "big" ? buffer.readFloatBE(buffer.scan) : buffer.readFloatLE(buffer.scan));
buffer.scan += 4;
return r;
} else {
throw new Error("only 4 byte floats supported");
}
},
write(buf, v) {
if (typeof v != "number") { throw new Error("number expected"); }
if (primitive.bytes == 4) {
if (primitive.endianness == "big") { buf.writeFloatBE(v, buf.scan); }
else { buf.writeFloatLE(v, buf.scan); }
} else {
throw new Error("only 4 byte flaots supported");
}
},
getTypescriptType() {
return "number";
},
getJsonSChema() {
return { type: "number" };
}
}
return parser;
}
function intParser(primitive: PrimitiveInt): ChunkParser<number> {
validateIntType(primitive);
let parser: ChunkParser<number> = {
@@ -844,6 +890,8 @@ function primitiveParser(primitive: Primitive<any>): ChunkParser<any> {
return booleanParser();
case "int":
return intParser(primitive);
case "float":
return floatParser(primitive);
case "string":
return stringParser(primitive);
case "value":

View File

@@ -6,7 +6,17 @@
["struct",["length","varushort"]],
["struct",["data",["array",["ref","length"],"varushort"]]]
]],
["footchunks",["array","ushort",["array",4,["array",20,"ubyte"]]]],
["skeleton",["chunkedarray","ushort",
["struct",
["parentbone","ubyte"],
["nonskinboneid","ushort"],
["bonematrix",["array",16,"float"]],
["dataq",["buffer",11,"hex"]]
],
["struct",
["skinid","ushort le"]
]
]],
["bool0","bool"],
["always0","ushort"]

View File

@@ -20,7 +20,7 @@
["hasFaceBones",["ref","groupFlags",[2,1]]],
["hasBoneIds",["ref","groupFlags",[3,1]]],
["isHidden",["ref","groupFlags",[4,1]]],
["hasFlag20",["ref","groupFlags",[5,1]]],
["hasSkin",["ref","groupFlags",[5,1]]],
["colourBuffer",["opt",["hasVertices",1],["buffer",["ref","faceCount"],"ushort"]]],
@@ -36,7 +36,11 @@
["uvBuffer",["opt",["hasVertices",1],["buffer",["ref","vertexCount"],"ushort",2]]],
["boneidBuffer",["opt",["hasBoneIds",1],["buffer",["ref","vertexCount"],"ushort"]]],
["flag20Buffer",["opt",["hasFlag20",1],["buffer","uint le","hex",3]]]
["skin",["opt",["hasSkin",1],["struct",
["skinVertexCount","uint le"],
["skinBoneBuffer",["buffer",["ref","skinVertexCount"],"hex",2]],
["skinWeightBuffer",["buffer",["ref","skinVertexCount"],"hex",1]]
]]]
]]],
["unk1Buffer",["array",["ref","unkCount1"],["buffer",39,"hex"]]],

View File

@@ -0,0 +1,19 @@
["struct",
["header","ubyte"],
["framebase","ushort"],
["endtime","uint"],
["unk_always0","ubyte"],
["tracks",["array","ushort",["struct",
["unk_1to4","ubyte"],
["boneid","varushort"],
["type_0to9","ubyte"],
["$packetlength","ushort"],
["bonetype_01or3","ubyte"],
["always0","ushort"],
["flag2","bool"],
["chunks",["array",["ref","$packetlength"],["struct",
["time","ushort"],
["value",["array",5,"float"]]
]]]
]]]
]

View File

@@ -26,6 +26,8 @@
"ushort le": { "primitive": "int", "unsigned": true, "bytes": 2, "readmode": "fixed", "endianness": "little" },
"uint le": { "primitive": "int", "unsigned": true, "bytes": 4, "readmode": "fixed", "endianness": "little" },
"float": { "primitive": "float", "bytes": 4, "endianness": "big" },
"ubyte": "unsigned byte",
"ushort": "unsigned short",
"uint": "unsigned int",

View File

@@ -32,7 +32,7 @@ export class FileParser<T> {
console.log("too many bytes left over warning, no more warnings will be logged");
}
// TODO remove this stupid condition, needed this to fail only in some situations
if (buffer.byteLength < 10000) {
if (buffer.byteLength < 100000) {
throw new Error(`bytes left over after decoding file: ${scanbuf.length - scanbuf.scan}`);
}
}
@@ -69,5 +69,6 @@ export const parseAnimgroupConfigs = new FileParser<import("../generated/animgro
export const parseModels = new FileParser<import("../generated/models").models>(require("./opcodes/models.json"));
export const parseSpotAnims = new FileParser<import("../generated/spotanims").spotanims>(require("./opcodes/spotanims.json"));
export const parseRootCacheIndex = new FileParser<import("../generated/rootcacheindex").rootcacheindex>(require("./opcodes/rootcacheindex.json"));
export const parseSkeletalAnim = new FileParser<import("../generated/skeletalanim").skeletalanim>(require("./opcodes/skeletalanim.json"));

View File

@@ -3,7 +3,7 @@ import { run, command, number, option, string, boolean, Type, flag, oneOf, optio
import * as fs from "fs";
import * as path from "path";
import { cacheConfigPages, cacheMajors, cacheMapFiles } from "../constants";
import { parseAchievement, parseItem, parseObject, parseNpc, parseMapsquareTiles, FileParser, parseMapsquareUnderlays, parseMapsquareOverlays, parseMapZones, parseFrames, parseEnums, parseMapscenes, parseAnimgroupConfigs, parseMapsquareLocations, parseSequences, parseFramemaps, parseModels, parseRootCacheIndex, parseSpotAnims, parseCacheIndex } from "../opdecoder";
import { parseAchievement, parseItem, parseObject, parseNpc, parseMapsquareTiles, FileParser, parseMapsquareUnderlays, parseMapsquareOverlays, parseMapZones, parseFrames, parseEnums, parseMapscenes, parseAnimgroupConfigs, parseMapsquareLocations, parseSequences, parseFramemaps, parseModels, parseRootCacheIndex, parseSpotAnims, parseCacheIndex, parseSkeletalAnim } from "../opdecoder";
import { achiveToFileId, CacheFileSource, CacheIndex, fileIdToArchiveminor, SubFile } from "../cache";
import { parseSprite } from "../3d/sprite";
import sharp from "sharp";
@@ -227,6 +227,7 @@ const modes: Record<string, DecodeModeFactory> = {
frames: standardFile(parseFrames, standardIndex(cacheMajors.frames)),
models: standardFile(parseModels, standardIndex(cacheMajors.models)),
skeletons: standardFile(parseSkeletalAnim, standardIndex(cacheMajors.skeletalAnims)),
indices: standardFile(parseCacheIndex, indexfileIndex()),
rootindex: standardFile(parseRootCacheIndex, rootindexfileIndex())

View File

@@ -29,7 +29,7 @@ async function run(cachedir: string, jsondir: string, replaceid: number) {
let rawfile = await getfile(chunk);
let archive = decompressSqlite(rawfile);
let files = cache.unpackSqliteBufferArchive(archive, chunk.subindices.length).map(f => f.buffer);
let files = cache.unpackSqliteBufferArchive(archive, chunk.subindices).map(f => f.buffer);
for (let i = 0; i < chunk.subindices.length; i++) {
let itemid = chunk.subindices[i];

View File

@@ -3,7 +3,7 @@ import { run, command, number, option, string, boolean, Type, flag, oneOf } from
import * as fs from "fs";
import * as path from "path";
import { cacheConfigPages, cacheMajors, cacheMapFiles } from "../constants";
import { parseAchievement, parseItem, parseObject, parseNpc, parseCacheIndex, parseMapsquareTiles, FileParser, parseModels, parseMapsquareUnderlays, parseSequences, parseMapsquareOverlays, parseMapZones, parseFrames, parseEnums, parseMapscenes, parseMapsquareLocations, parseFramemaps, parseAnimgroupConfigs, parseSpotAnims, parseRootCacheIndex } from "../opdecoder";
import { parseAchievement, parseItem, parseObject, parseNpc, parseCacheIndex, parseMapsquareTiles, FileParser, parseModels, parseMapsquareUnderlays, parseSequences, parseMapsquareOverlays, parseMapZones, parseFrames, parseEnums, parseMapscenes, parseMapsquareLocations, parseFramemaps, parseAnimgroupConfigs, parseSpotAnims, parseRootCacheIndex, parseSkeletalAnim } from "../opdecoder";
import { achiveToFileId, CacheFileSource, CacheIndex, fileIdToArchiveminor, SubFile } from "../cache";
import { parseSprite } from "../3d/sprite";
import sharp from "sharp";
@@ -21,16 +21,16 @@ let cmd = command({
args: {},
handler: async (args) => {
const errdir = "./cache5/errs";
const major = cacheMajors.index;
const major = cacheMajors.skeletalAnims;
const minor = -1;
const decoder = parseCacheIndex;
const decoder = parseSkeletalAnim;
const skipMinorAfterError = false;
const skipFilesizeAfterError = true;
const memlimit = 100e6;
const orderBySize = false;
const memlimit = 200e6;
const orderBySize = true;
// let source = new GameCacheLoader();
let source = new Downloader();
let source = new GameCacheLoader();
// let source = new Downloader();
let indices = await source.getIndexFile(major);
fs.mkdirSync(errdir, { recursive: true });
let olderrfiles = fs.readdirSync(errdir);
@@ -58,6 +58,7 @@ let cmd = command({
getDebug(true);
try {
// console.log("reading ", file.major, file.minor, file.subfile);
let res = decoder.read(file.file);
nsuccess++;
} catch (e) {
@@ -135,7 +136,9 @@ let cmd = command({
allfiles.sort((a, b) => a.file.byteLength - b.file.byteLength);
console.log("starting files:", allfiles.length);
// allfiles = allfiles.filter((q, i) => i % 20 == 0);
allfiles.forEach(testFile);
for (let file of allfiles) {
if (testFile(file) == false) { break; }
}
console.log("completed files: ", nsuccess);
}

View File

@@ -0,0 +1,97 @@
import { filesource, cliArguments } from "../cliparser";
import { run, command, number, option, string, boolean, Type, flag, oneOf } from "cmd-ts";
import * as fs from "fs";
import * as path from "path";
import { cacheConfigPages, cacheMajors, cacheMapFiles } from "../constants";
import { parseAchievement, parseItem, parseObject, parseNpc, parseCacheIndex, parseMapsquareTiles, FileParser, parseModels, parseMapsquareUnderlays, parseSequences, parseMapsquareOverlays, parseMapZones, parseFrames, parseEnums, parseMapscenes, parseMapsquareLocations, parseFramemaps, parseAnimgroupConfigs, parseSpotAnims, parseRootCacheIndex, parseSkeletalAnim } from "../opdecoder";
import { achiveToFileId, CacheFileSource, CacheIndex, fileIdToArchiveminor, SubFile } from "../cache";
import { parseSprite } from "../3d/sprite";
import sharp from "sharp";
import { FlatImageData } from "../3d/utils";
import * as cache from "../cache";
import { GameCacheLoader } from "../cacheloader";
import { crc32_backward, forge } from "../libs/crc32util";
import { getDebug } from "../opcode_reader";
import { Downloader } from "../downloader";
import prettyJson from "json-stringify-pretty-compact";
import { framemaps } from "../../generated/framemaps";
async function start() {
let cache = new GameCacheLoader();
let seqindices = await cache.getIndexFile(cacheMajors.sequences);
let skeltoseqs = new Map<number, number[]>();
for (let index of seqindices) {
if (!index) { continue; }
let arch = await cache.getFileArchive(index);
for (let file of arch) {
let seq = parseSequences.read(file.buffer)
if (seq.skeletal_animation) {
let seqarr = skeltoseqs.get(seq.skeletal_animation) ?? [];
seqarr.push(achiveToFileId(index.major, index.minor, file.fileid));
skeltoseqs.set(seq.skeletal_animation, seqarr);
}
}
}
let locindices = await cache.getIndexFile(cacheMajors.objects);
let seqtolocs = new Map<number, number[]>();
for (let index of locindices) {
if (!index) { continue; }
let arch = await cache.getFileArchive(index);
for (let file of arch) {
let seq = parseObject.read(file.buffer)
if (seq.probably_animation) {
let loc = seqtolocs.get(seq.probably_animation) ?? [];
loc.push(achiveToFileId(index.major, index.minor, file.fileid));
seqtolocs.set(seq.probably_animation, loc);
}
}
}
let animgroupfiles = await cache.getArchiveById(cacheMajors.config, cacheConfigPages.animgroups);
let seqtogroups = new Map<number, number[]>();
for (let file of animgroupfiles) {
let animgroup = parseAnimgroupConfigs.read(file.buffer);
let anim = animgroup.unknown_26 ?? animgroup.unknown_01?.[1];
if (anim) {
let animarr = seqtogroups.get(anim) ?? [];
animarr.push(file.fileid);
seqtogroups.set(anim, animarr);
}
}
let npcindices = await cache.getIndexFile(cacheMajors.npcs);
let groupstonpcs = new Map<number, number[]>();
for (let index of npcindices) {
if (!index) { continue; }
let arch = await cache.getFileArchive(index);
for (let file of arch) {
let seq = parseNpc.read(file.buffer)
if (seq.animation_group) {
let npc = groupstonpcs.get(seq.animation_group) ?? [];
npc.push(achiveToFileId(index.major, index.minor, file.fileid));
groupstonpcs.set(seq.animation_group, npc);
}
}
}
let skelindices = await cache.getIndexFile(cacheMajors.skeletalAnims);
skelindices.sort((a, b) => a.size! - b.size!);
for (let skelindex of skelindices) {
if (!skelindex) { continue; }
let seqs = skeltoseqs.get(skelindex.minor);
if (!seqs) { console.log("skeleton", skelindex.minor, "has no sequence"); continue; }
for (let seq of seqs) {
let locs = seqtolocs.get(seq) ?? [];
let npcs = (seqtogroups.get(seq) ?? []).flatMap(gr => groupstonpcs.get(gr) ?? []);
console.log("skeleton", skelindex.minor, skelindex.size, "locs", ...locs, "npcs", ...npcs);
}
}
}
start();

View File

@@ -147,7 +147,7 @@ async function updateRecords(downloader: Downloader, index: CacheIndex & { isNew
var recordIndex = recordIndices[i];
var buffer = await downloader.getFile(recordIndex.major, recordIndex.minor, recordIndex.crc);
var subbuffers = unpackBufferArchive(buffer, recordIndex.subindices.length);
var subbuffers = unpackBufferArchive(buffer, recordIndex.subindices);
for (var j = 0; j < recordIndex.subindices.length; ++j, ++n) {
var recordSubindex = recordIndex.subindices[j];

View File

@@ -6,18 +6,24 @@ import * as React from "react";
import * as ReactDOM from "react-dom";
import classNames from "classnames";
import { boundMethod } from "autobind-decorator";
import { ModelModifications } from "3d/utils";
import { mapsquareModels, mapsquareToThree, ParsemapOpts, parseMapsquare, resolveMorphedObject } from "../3d/mapsquare";
import { ModelModifications } from "../3d/utils";
import { WasmGameCacheLoader as GameCacheLoader } from "../cacheloaderwasm";
import { CacheFileSource, cachingFileSourceMixin } from "../cache";
import { mapsquareModels, mapsquareToThree, ParsemapOpts, parseMapsquare, resolveMorphedObject } from "../3d/mapsquare";
import { getMaterialData } from "../3d/ob3togltf";
import { ParsedTexture } from "../3d/textures";
import { CacheFileSource, cachingFileSourceMixin } from "../cache";
import * as datastore from "idb-keyval";
type LookupMode = "model" | "item" | "npc" | "object" | "material" | "map";
type RenderMode = "gltf" | "three";
if (module.hot) {
module.hot.accept("../3d/ob3togltf");
}
function start() {
window.addEventListener("keydown", e => {
if (e.key == "F5") { document.location.reload(); }
@@ -66,6 +72,12 @@ type WebkitDirectoryHandle = WebkitFsHandleBase & {
var cacheDirectoryHandle: WebkitDirectoryHandle | null = null;
var cacheDirectoryLoaded = false;
if (module.hot) {
module.hot.accept("../3d/ob3tothree.ts", () => {
console.log("notified");
})
}
async function ensureCachePermission() {
if (cacheDirectoryLoaded) { return }
if (!cacheDirectoryHandle) {
@@ -92,58 +104,48 @@ if (typeof window != "undefined") {
datastore.get("cachefilehandles").then(oldhandle => {
if (typeof FileSystemHandle != "undefined" && oldhandle instanceof FileSystemHandle && oldhandle.kind == "directory") {
cacheDirectoryHandle = oldhandle;
// document.body.addEventListener("click", async () => {
// let files: Record<string, Blob> = {};
// console.log(await oldhandle.queryPermission());
// await oldhandle.requestPermission();
// for await (let handle of oldhandle.values()) {
// if (handle.kind == "file") {
// files[handle.name] = await handle.getFile();
// }
// }
// hackyCacheFileSource.giveBlobs(files);
// }, { once: true });
}
});
// document.body.ondragover = e => e.preventDefault();
// document.body.ondrop = async e => {
// e.preventDefault();
// if (e.dataTransfer) {
// let files: Record<string, Blob> = {};
// let items: DataTransferItem[] = [];
// let folderhandles: WebkitFsHandle[] = [];
// let filehandles: WebkitFsHandle[] = [];
// for (let i = 0; i < e.dataTransfer.items.length; i++) { items.push(e.dataTransfer.items[i]); }
// //needs to start synchronously as the list is cleared after the event
// await Promise.all(items.map(async item => {
// //@ts-ignore
// if (item.getAsFileSystemHandle) {
// //@ts-ignore
// let filehandle: WebkitFsHandle = await item.getAsFileSystemHandle();
// if (filehandle.kind == "file") {
// filehandles.push(filehandle);
// files[filehandle.name] = await filehandle.getFile();
// } else {
// folderhandles.push(filehandle);
// for await (let handle of filehandle.values()) {
// if (handle.kind == "file") {
// files[handle.name] = await handle.getFile();
// }
// }
// }
// } else if (item.kind == "file") {
// let file = item.getAsFile()!;
// files[file.name] = file;
// }
// }));
// if (folderhandles.length == 1 && filehandles.length == 0) {
// datastore.set("cachefilehandles", folderhandles[0]);
// console.log("stored folder " + folderhandles[0].name);
// }
// console.log(`added ${Object.keys(files).length} files`);
// hackyCacheFileSource.giveBlobs(files);
// }
// }
document.body.ondragover = e => e.preventDefault();
document.body.ondrop = async e => {
e.preventDefault();
if (e.dataTransfer) {
let files: Record<string, Blob> = {};
let items: DataTransferItem[] = [];
let folderhandles: WebkitDirectoryHandle[] = [];
let filehandles: WebkitFsHandle[] = [];
for (let i = 0; i < e.dataTransfer.items.length; i++) { items.push(e.dataTransfer.items[i]); }
//needs to start synchronously as the list is cleared after the event
await Promise.all(items.map(async item => {
//@ts-ignore
if (item.getAsFileSystemHandle) {
//@ts-ignore
let filehandle: WebkitFsHandle = await item.getAsFileSystemHandle();
if (filehandle.kind == "file") {
filehandles.push(filehandle);
files[filehandle.name] = await filehandle.getFile();
} else {
folderhandles.push(filehandle);
for await (let handle of filehandle.values()) {
if (handle.kind == "file") {
files[handle.name] = await handle.getFile();
}
}
}
} else if (item.kind == "file") {
let file = item.getAsFile()!;
files[file.name] = file;
}
}));
if (folderhandles.length == 1 && filehandles.length == 0) {
datastore.set("cachefilehandles", folderhandles[0]);
console.log("stored folder " + folderhandles[0].name);
cacheDirectoryHandle = folderhandles[0];
}
console.log(`added ${Object.keys(files).length} files`);
hackyCacheFileSource.giveBlobs(files);
}
}
}
// const hackyCacheFileSource = new CachedHacky(path.resolve(process.env.ProgramData!, "jagex/runescape"));
@@ -418,8 +420,7 @@ export async function requestLoadModel(searchid: string, mode: LookupMode, rende
let npc = parseNpc.read(await hackyCacheFileSource.getFileById(cacheMajors.npcs, +searchid));
metatext = JSON.stringify(npc, undefined, 2)
if (npc.animation_group) {
let index = await hackyCacheFileSource.getIndexFile(cacheMajors.config);
let arch = await hackyCacheFileSource.getFileArchive(index[cacheConfigPages.animgroups]);
let arch = await hackyCacheFileSource.getArchiveById(cacheMajors.config, cacheConfigPages.animgroups);
let animgroup = parseAnimgroupConfigs.read(arch[npc.animation_group].buffer);
console.log(animgroup);
let forcedanim = (window as any).forcedanim;
@@ -453,7 +454,7 @@ export async function requestLoadModel(searchid: string, mode: LookupMode, rende
// mods.replaceMaterials = [
// [8868, +searchid]
// ];
let mat = await getMaterialData(hackyCacheFileSource.getFile, +searchid);
let mat = await getMaterialData(hackyCacheFileSource, +searchid);
let info: any = { mat };
let addtex = async (name: string, texid: number) => {
let file = await hackyCacheFileSource.getFile(cacheMajors.texturesDds, texid);

View File

@@ -16,6 +16,23 @@ import { CacheFileSource } from '../cache';
import { ModelExtras, MeshTileInfo, ClickableMesh, resolveMorphedObject } from '../3d/mapsquare';
import { AnimationClip, AnimationMixer, Clock, Material, Mesh, SkeletonHelper } from "three";
let lastob3modelcall: { args: any[], inst: ThreeJsRenderer } | null = null;
if (module.hot) {
module.hot.accept("../3d/ob3tothree", () => {
console.log("accept module")
setTimeout(() => {
if (lastob3modelcall) {
//@ts-ignore
lastob3modelcall.inst.setOb3Models(...lastob3modelcall.args);
}
}, 1);
});
}
export class ThreeJsRenderer implements ModelSink {
renderer: THREE.WebGLRenderer;
canvas: HTMLCanvasElement;
@@ -307,6 +324,7 @@ export class ThreeJsRenderer implements ModelSink {
}
async setOb3Models(modelfiles: Buffer[], cache: CacheFileSource, mods: ModelModifications, metastr: string, anims: number[]) {
lastob3modelcall = { args: [...arguments] as any, inst: this };
if (this.unpackOb3WithGltf) {
//TODO
// let models = await Promise.all(modelfiles.map(file => ob3ModelToGltfFile(cache.get.bind(cache), file, mods)));

View File

@@ -1,7 +1,7 @@
{
"compilerOptions": {
"moduleResolution": "node",
"module": "CommonJS",
"module": "ESNext",
"lib": [ "ESNext","DOM"],
"target": "ES2020",
"sourceMap": false,

View File

@@ -13,7 +13,8 @@ module.exports = {
extract: "./src/scripts/extractfiles.ts",
indexoverview: "./src/scripts/indexoverview.ts",
testdecode: "./src/scripts/testdecode.ts",
opcode_reader: "./src/opcode_reader.ts"
opcode_reader: "./src/opcode_reader.ts",
skeleton: "./src/scripts/testskeletons.ts"
},
module: {
rules: [

View File

@@ -1,6 +1,6 @@
const path = require('path');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const { ProvidePlugin } = require('webpack');
const { ProvidePlugin ,HotModuleReplacementPlugin} = require('webpack');
/**
* @type {import("webpack").Configuration}
@@ -21,9 +21,9 @@ module.exports = {
],
},
target: "web",
externals: {
// "sharp": { commonjs: "sharp" },
// "lzma": { commonjs: "lzma" }
devServer: {
static: "./dist",
hot: true,
},
resolve: {
extensions: ['.tsx', '.ts', '.js'],
@@ -58,5 +58,6 @@ module.exports = {
Buffer: ['buffer', 'Buffer'],
process: [require.resolve('process/browser')]
}),
new HotModuleReplacementPlugin(),
]
};