basic support for classic caches (and removed obsolete modules)

This commit is contained in:
Skillbert
2023-03-16 18:49:26 +01:00
parent d44e4408d4
commit f0114bc9b6
28 changed files with 1385 additions and 943 deletions

17
generated/classicmodels.d.ts vendored Normal file
View File

@@ -0,0 +1,17 @@
// GENERATED DO NOT EDIT
// This source data is located at '..\src\opcodes\classicmodels.jsonc'
// run `npm run filetypes` to rebuild
export type classicmodels = {
vertexcount: number,
facecount: number,
xpos: number[],
ypos: number[],
zpos: number[],
faces: {
color: number,
backcolor: number,
intensity: number,
verts: (number|number)[],
}[],
};

View File

@@ -1,6 +1,5 @@
import { packedHSL2HSL, HSL2RGB, ModelModifications, posmod } from "../utils";
import { CacheFileSource, CacheIndex, CacheIndexFile, SubFile } from "../cache";
import { cacheConfigPages, cacheMajors, cacheMapFiles, lastLegacyBuildnr } from "../constants";
import { cacheConfigPages, cacheMajors, cacheMapFiles, lastClassicBuildnr, lastLegacyBuildnr } from "../constants";
import { parse } from "../opdecoder";
import { mapsquare_underlays } from "../../generated/mapsquare_underlays";
import { mapsquare_overlays } from "../../generated/mapsquare_overlays";
@@ -8,7 +7,7 @@ import { mapsquare_locations } from "../../generated/mapsquare_locations";
import { ModelMeshData, ModelData } from "./rt7model";
import { mapsquare_tiles } from "../../generated/mapsquare_tiles";
import { mapsquare_watertiles } from "../../generated/mapsquare_watertiles";
import { augmentThreeJsFloorMaterial, ThreejsSceneCache, ob3ModelToThree, EngineCache, ParsedMaterial, applyMaterial } from "./modeltothree";
import { augmentThreeJsFloorMaterial, ThreejsSceneCache, ob3ModelToThree, EngineCache, ParsedMaterial, applyMaterial, constModelsIds } from "./modeltothree";
import { BufferAttribute, DataTexture, Matrix4, MeshBasicMaterial, Object3D, Quaternion, RGBAFormat, Vector3 } from "three";
import { defaultMaterial, materialCacheKey, MaterialData } from "./jmat";
import { objects } from "../../generated/objects";
@@ -16,17 +15,19 @@ import { parseSprite } from "./sprite";
import * as THREE from "three";
import { mergeBufferGeometries } from "three/examples/jsm/utils/BufferGeometryUtils";
import { legacyMajors } from "../cache/legacycache";
import { classicIntToModelMods, getClassicMapData } from "../cache/classicloader";
import { MeshBuilder, topdown2dWallModels } from "./modelutils";
const upvector = new THREE.Vector3(0, 1, 0);
export const tiledimensions = 512;
export const squareSize = 64;
export const rs2ChunkSize = 64;
export const classicChunkSize = 48;
export const squareLevels = 4;
const heightScale = 1 / 16;
export const worldStride = 128;
const { tileshapes, defaulttileshape, defaulttileshapeflipped } = generateTileShapes();
const wallmodels = generateWallModels();
const defaultVertexProp: TileVertex = { material: -1, materialTiling: 128, color: [255, 0, 255] };
@@ -75,14 +76,13 @@ export type TileVertex = {
}
export type ChunkData = {
xoffset: number,
zoffset: number,
tilerect: MapRect,
levelcount: number,
mapsquarex: number,
mapsquarez: number,
tiles: mapsquare_tiles["tiles"],
extra: mapsquare_tiles["extra"],
locsfile: Buffer | null,
tilefile: Buffer | null,
rawlocs: mapsquare_locations["locations"],
locs: WorldLocation[]
}
@@ -170,177 +170,7 @@ type FloorMorph = {
level: number
}
function squareMesh(sizex: number, sizez: number, color: number[]): ModelMeshData {
let pos = new Float32Array([
-sizex / 2, 0, -sizez / 2,
sizex / 2, 0, -sizez / 2,
-sizex / 2, 0, sizez / 2,
sizex / 2, 0, sizez / 2
]);
let col = new Uint8Array([
color[0], color[1], color[2],
color[0], color[1], color[2],
color[0], color[1], color[2],
color[0], color[1], color[2]
]);
let uvs = new Float32Array([
0, 1,
1, 1,
0, 0,
1, 0
]);
let indexbuffer = new Uint16Array([
0, 3, 1,
0, 2, 3
]);
return {
attributes: {
pos: new THREE.BufferAttribute(pos, 3, false),
color: new THREE.BufferAttribute(col, 3, true),
texuvs: new THREE.BufferAttribute(uvs, 2, false)
},
needsNormalBlending: false,
indices: new THREE.BufferAttribute(indexbuffer, 1, false),
hasVertexAlpha: false,
materialId: -1
}
}
function extrudedPolygonMesh(points: { x: number, z: number }[], height: number, color: number[]): ModelMeshData {
let nvertices = points.length * 2;
let nfaces = 2;
if (height != 0) {
nvertices += points.length * 4;
nfaces += points.length;
}
let pos = new Float32Array(3 * nvertices);
let col = new Uint8Array(3 * nvertices);
for (let a = 0; a < col.length; a += 3) {
col[a + 0] = color[0]; col[a + 1] = color[1]; col[a + 2] = color[2];
}
let indexbuffer = new Uint16Array((nvertices - nfaces) * 3);
//side faces
let vertexindex = 0;
let index = 0;
let lastpoint = points[points.length - 1];
//side faces
if (height != 0) {
for (let a = 0; a < points.length; a++) {
let point = points[a];
let firstvertex = vertexindex / 3;
pos[vertexindex++] = lastpoint.x; pos[vertexindex++] = 0; pos[vertexindex++] = lastpoint.z;
pos[vertexindex++] = point.x; pos[vertexindex++] = 0; pos[vertexindex++] = point.z;
pos[vertexindex++] = lastpoint.x; pos[vertexindex++] = height; pos[vertexindex++] = lastpoint.z;
pos[vertexindex++] = point.x; pos[vertexindex++] = height; pos[vertexindex++] = point.z;
indexbuffer[index++] = firstvertex; indexbuffer[index++] = firstvertex + 1; indexbuffer[index++] = firstvertex + 3;
indexbuffer[index++] = firstvertex; indexbuffer[index++] = firstvertex + 3; indexbuffer[index++] = firstvertex + 2;
lastpoint = point;
}
}
//bottom polygon
let firstvertex = vertexindex / 3;
pos[vertexindex++] = points[0].x; pos[vertexindex++] = 0; pos[vertexindex++] = points[0].z;
let lastvertex = vertexindex / 3;
pos[vertexindex++] = points[points.length - 1].x; pos[vertexindex++] = 0; pos[vertexindex++] = points[points.length - 1].z;
for (let a = points.length - 2; a >= 1; a--) {
let vertex = vertexindex / 3;
pos[vertexindex++] = points[a].x; pos[vertexindex++] = 0; pos[vertexindex++] = points[a].z;
indexbuffer[index++] = firstvertex; indexbuffer[index++] = lastvertex; indexbuffer[index++] = vertex;
lastvertex = vertex
}
//top polygon
firstvertex = vertexindex / 3;
pos[vertexindex++] = points[0].x; pos[vertexindex++] = height; pos[vertexindex++] = points[0].z;
lastvertex = vertexindex / 3;
pos[vertexindex++] = points[1].x; pos[vertexindex++] = height; pos[vertexindex++] = points[1].z;
for (let a = 2; a < points.length; a++) {
let vertex = vertexindex / 3;
pos[vertexindex++] = points[a].x; pos[vertexindex++] = height; pos[vertexindex++] = points[a].z;
indexbuffer[index++] = firstvertex; indexbuffer[index++] = lastvertex; indexbuffer[index++] = vertex;
lastvertex = vertex
}
return {
attributes: {
pos: new THREE.BufferAttribute(pos, 3, false),
color: new THREE.BufferAttribute(col, 3, true)
},
needsNormalBlending: false,
indices: new THREE.BufferAttribute(indexbuffer, 1, false),
hasVertexAlpha: false,
materialId: -1
}
}
function generateWallModels() {
const thick = tiledimensions / 8;
const height = 0;//tiledimensions * 1.5;
const white = [255, 255, 255];
const red = [255, 0, 0];
const halftile = tiledimensions / 2;
return {
wall: {
maxy: height,
miny: 0,
meshes: [extrudedPolygonMesh([
{ x: -halftile, z: -halftile },
{ x: -halftile, z: halftile },
{ x: -halftile + thick, z: halftile },
{ x: -halftile + thick, z: -halftile }
], height, white)]
} as ModelData,
shortcorner: {
maxy: height,
miny: 0,
meshes: [extrudedPolygonMesh([
{ x: -halftile, z: halftile },
{ x: -halftile + thick, z: halftile },
{ x: -halftile + thick, z: halftile - thick },
{ x: -halftile, z: halftile - thick }
], height, white)]
} as ModelData,
longcorner: {
maxy: height,
miny: 0,
meshes: [extrudedPolygonMesh([
{ x: -halftile + thick, z: halftile - thick },
{ x: -halftile + thick, z: -halftile },
{ x: -halftile, z: -halftile },
{ x: -halftile, z: halftile },
{ x: halftile, z: halftile },
{ x: halftile, z: halftile - thick },
], height, white)]
} as ModelData,
pillar: {
maxy: height,
miny: 0,
meshes: [extrudedPolygonMesh([
{ x: -halftile, z: halftile },
{ x: -halftile + thick, z: halftile },
{ x: -halftile + thick, z: halftile - thick },
{ x: -halftile, z: halftile - thick }
], height, white)]
} as ModelData,
diagonal: {
maxy: height,
miny: 0,
meshes: [extrudedPolygonMesh([
{ x: -halftile, z: -halftile },
{ x: -halftile, z: -halftile + thick },
{ x: halftile - thick, z: halftile },
{ x: halftile, z: halftile },
{ x: halftile, z: halftile - thick },
{ x: -halftile + thick, z: -halftile },
], height, white)]
} as ModelData,
}
}
function generateTileShapes() {
//we have 8 possible vertices along the corners and halfway on the edges of the tile
//select these vertices to draw the tile shape
//from bottom to top: [[0,1,2],[7,<9>,3],[6,5,4]]
@@ -651,8 +481,9 @@ export class TileGrid implements TileGridSource {
engine: EngineCache;
area: MapRect;
tilemask: undefined | MapRect[];
width: number;
height: number;
xsize: number;
zsize: number;
levels = 4;
//position of this grid measured in tiles
xoffset: number;
zoffset: number;
@@ -662,14 +493,14 @@ export class TileGrid implements TileGridSource {
xstep: number;
zstep: number;
levelstep: number;
constructor(engine: EngineCache, area: MapRect, tilemask: MapRect[] | undefined) {
constructor(engine: EngineCache, area: MapRect, tilemask?: MapRect[] | undefined) {
this.area = area;
this.tilemask = tilemask && tilemask.filter(q => mapRectsIntersect(q, area));
this.tilemask = tilemask?.filter(q => mapRectsIntersect(q, area));
this.engine = engine;
this.xoffset = area.x;
this.zoffset = area.z;
this.width = area.xsize;
this.height = area.zsize;
this.xsize = area.xsize;
this.zsize = area.zsize;
this.xstep = 1;
this.zstep = this.xstep * area.xsize;
this.levelstep = this.zstep * area.zsize;
@@ -700,18 +531,18 @@ export class TileGrid implements TileGridSource {
getTile(x: number, z: number, level: number) {
x -= this.xoffset;
z -= this.zoffset;
if (x < 0 || z < 0 || x >= this.width || z >= this.height) { return undefined; }
if (x < 0 || z < 0 || x >= this.xsize || z >= this.zsize) { return undefined; }
return this.tiles[this.levelstep * level + z * this.zstep + x * this.xstep];
}
blendUnderlays(kernelRadius = 3) {
for (let z = this.zoffset; z < this.zoffset + this.height; z++) {
for (let x = this.xoffset; x < this.xoffset + this.width; x++) {
for (let z = this.zoffset; z < this.zoffset + this.zsize; z++) {
for (let x = this.xoffset; x < this.xoffset + this.xsize; x++) {
let effectiveVisualLevel = 0;
let layer1tile = this.getTile(x, z, 1);
let flag2 = ((layer1tile?.raw.settings ?? 0) & 2) != 0;
let leveloffset = (flag2 ? -1 : 0);
for (let level = 0; level < squareLevels; level++) {
for (let level = 0; level < this.levels; level++) {
let currenttile = this.getTile(x, z, level);
if (!currenttile) { continue; }
@@ -796,9 +627,9 @@ export class TileGrid implements TileGridSource {
}
}
for (let z = this.zoffset; z < this.zoffset + this.height; z++) {
for (let x = this.xoffset; x < this.xoffset + this.width; x++) {
for (let level = 0; level < squareLevels; level++) {
for (let z = this.zoffset; z < this.zoffset + this.zsize; z++) {
for (let x = this.xoffset; x < this.xoffset + this.xsize; x++) {
for (let level = 0; level < this.levels; level++) {
let currenttile = this.getTile(x, z, level);
if (!currenttile) { continue; }
//bleed overlay materials
@@ -835,7 +666,7 @@ export class TileGrid implements TileGridSource {
mats.set(id, repeat);
}
}
for (let level = 0; level < squareLevels; level++) {
for (let level = 0; level < this.levels; level++) {
for (let dz = 0; dz < zsize; dz++) {
for (let dx = 0; dx < xsize; dx++) {
let tile = this.getTile(x + dx, z + dz, level);
@@ -851,20 +682,20 @@ export class TileGrid implements TileGridSource {
}
return mats;
}
addMapsquare(chunk: ChunkData, docollision = false) {
const tiles = chunk.tiles;
if (tiles.length != squareSize * squareSize * squareLevels) { throw new Error(); }
let baseoffset = (chunk.xoffset - this.xoffset) * this.xstep + (chunk.zoffset - this.zoffset) * this.zstep;
for (let z = 0; z < squareSize; z++) {
for (let x = 0; x < squareSize; x++) {
let tilex = (chunk.xoffset + x) * tiledimensions;
let tilez = (chunk.zoffset + z) * tiledimensions;
if (!mapRectContains(this.area, chunk.xoffset + x, chunk.zoffset + z)) { continue; }
if (this.tilemask && !this.tilemask.some(q => mapRectContains(q, chunk.xoffset + x, chunk.zoffset + z))) { continue; }
let tileindex = z + x * squareSize;
addMapsquare(tiles: mapsquare_tiles["tiles"], chunkrect: MapRect, levels: number, docollision = false) {
if (tiles.length != chunkrect.xsize * chunkrect.zsize * levels) { throw new Error(); }
let baseoffset = (chunkrect.x - this.xoffset) * this.xstep + (chunkrect.z - this.zoffset) * this.zstep;
for (let z = 0; z < chunkrect.zsize; z++) {
for (let x = 0; x < chunkrect.xsize; x++) {
if (!mapRectContains(this.area, chunkrect.x + x, chunkrect.z + z)) { continue; }
if (this.tilemask && !this.tilemask.some(q => mapRectContains(q, chunkrect.x + x, chunkrect.z + z))) { continue; }
let tilex = (chunkrect.x + x) * tiledimensions;
let tilez = (chunkrect.z + z) * tiledimensions;
let tileindex = z + x * chunkrect.zsize;
let height = 0;
for (let level = 0; level < squareLevels; level++) {
let tile = tiles[tileindex];
for (let level = 0; level < this.levels; level++) {
let tile = (level < levels ? tiles[tileindex] : {} as typeof tiles[number]);
if (tile.height != undefined) {
//not sure what the 1=0 thing is about, but seems correct for trees
height += (tile.height == 1 ? 0 : tile.height);
@@ -939,7 +770,7 @@ export class TileGrid implements TileGridSource {
effectiveVisualLevel: 0
}
this.tiles[newindex] = parsedTile;
tileindex += squareSize * squareSize;
tileindex += chunkrect.xsize * chunkrect.zsize;
}
}
}
@@ -950,8 +781,8 @@ export type ParsemapOpts = { padfloor?: boolean, invisibleLayers?: boolean, coll
export type ChunkModelData = { floors: FloorMeshData[], models: MapsquareLocation[], overlays: PlacedModel[], chunk: ChunkData, grid: TileGrid };
export async function parseMapsquare(engine: EngineCache, rect: MapRect, opts?: ParsemapOpts) {
let chunkfloorpadding = (opts?.padfloor ? 20 : 0);//TODO same as max(blending kernel,max loc size), put this in a const somewhere
let squareSize = (engine.classicData ? classicChunkSize : rs2ChunkSize);
let chunkpadding = Math.ceil(chunkfloorpadding / squareSize);
let grid = new TileGrid(engine, {
x: rect.x * squareSize - chunkfloorpadding,
@@ -963,65 +794,89 @@ export async function parseMapsquare(engine: EngineCache, rect: MapRect, opts?:
for (let z = -chunkpadding; z < rect.zsize + chunkpadding; z++) {
for (let x = -chunkpadding; x < rect.xsize + chunkpadding; x++) {
let squareindex = (rect.x + x) + (rect.z + z) * worldStride;
let tilefile: Buffer;
let locsfile: Buffer | null = null;
if (engine.getBuildNr() >= 759) {
let mapunderlaymeta = await engine.getCacheIndex(cacheMajors.mapsquares);
let selfindex = mapunderlaymeta[squareindex];
if (!selfindex) {
// console.log(`skipping mapsquare ${rect.x + x} ${rect.z + z} as it does not exist`);
continue;
}
let selfarchive = (await engine.getFileArchive(selfindex));
let tileindex = selfindex.subindices.indexOf(cacheMapFiles.squares);
if (tileindex == -1) { continue; }
tilefile = selfarchive[tileindex].buffer;
let locsindex = selfindex.subindices.indexOf(cacheMapFiles.locations);
if (locsindex != -1) {
locsfile = selfarchive[locsindex].buffer;
}
} else if (engine.getBuildNr() > lastLegacyBuildnr) {
try {
let index = await engine.findFileByName(cacheMajors.mapsquares, `m${rect.x + x}_${rect.z + z}`);
if (!index) { continue; }
tilefile = await engine.getFile(index.major, index.minor, index.crc);
} catch (e) {
//missing xtea
continue;
}
try {
let index = await engine.findFileByName(cacheMajors.mapsquares, `l${rect.x + x}_${rect.z + z}`);
if (index) {
locsfile = await engine.getFile(index.major, index.minor, index.crc);
let tiles: mapsquare_tiles["tiles"];
let tilesextra: mapsquare_tiles["extra"] = {};
let locs: mapsquare_locations["locations"] = [];
let tilerect: MapRect;
let levelcount = squareLevels;
if (engine.getBuildNr() > lastClassicBuildnr) {
let tilefile: Buffer;
let locsfile: Buffer | null = null;
if (engine.getBuildNr() >= 759) {
let mapunderlaymeta = await engine.getCacheIndex(cacheMajors.mapsquares);
let selfindex = mapunderlaymeta[squareindex];
if (!selfindex) {
// console.log(`skipping mapsquare ${rect.x + x} ${rect.z + z} as it does not exist`);
continue;
}
} catch (e) {
//ignore
let selfarchive = (await engine.getFileArchive(selfindex));
let tileindex = selfindex.subindices.indexOf(cacheMapFiles.squares);
if (tileindex == -1) { continue; }
tilefile = selfarchive[tileindex].buffer;
let locsindex = selfindex.subindices.indexOf(cacheMapFiles.locations);
if (locsindex != -1) {
locsfile = selfarchive[locsindex].buffer;
}
} else if (engine.getBuildNr() > lastLegacyBuildnr) {
try {
let index = await engine.findFileByName(cacheMajors.mapsquares, `m${rect.x + x}_${rect.z + z}`);
if (!index) { continue; }
tilefile = await engine.getFile(index.major, index.minor, index.crc);
} catch (e) {
//missing xtea
continue;
}
try {
let index = await engine.findFileByName(cacheMajors.mapsquares, `l${rect.x + x}_${rect.z + z}`);
if (index) {
locsfile = await engine.getFile(index.major, index.minor, index.crc);
}
} catch (e) {
//ignore
}
} else {
let index = (rect.x + x) * 256 + (rect.z + z);
let info = engine.legacyData?.mapmeta.get(index);
if (!info) {
continue;
}
tilefile = await engine.getFile(legacyMajors.map, info.map);
locsfile = await engine.getFile(legacyMajors.map, info.loc);
}
let tiledata = parse.mapsquareTiles.read(tilefile, engine.rawsource);
tiles = tiledata.tiles;
tilesextra = tiledata.extra;
if (locsfile) {
locs = parse.mapsquareLocations.read(locsfile, engine.rawsource).locations;
}
tilerect = {
x: (rect.x + x) * squareSize,
z: (rect.z + z) * squareSize,
xsize: squareSize,
zsize: squareSize
};
} else {
let index = (rect.x + x) * 256 + (rect.z + z);
let info = engine.legacyData?.mapmeta.get(index);
if (!info) {
continue;
}
tilefile = await engine.getFile(legacyMajors.map, info.map);
locsfile = await engine.getFile(legacyMajors.map, info.loc);
let mapdata = await getClassicMapData(engine, rect.x + x, rect.z + z, 0);
if (!mapdata) { continue; }
tiles = mapdata.tiles;
tilerect = mapdata.rect;
levelcount = mapdata.levels;
locs = mapdata.locs;
}
//let watertilefile = selfarchive[tileindexwater]?.buffer;
//let watertiles = parse.mapsquareWaterTiles.read(watertilefile);
let tiledata = parse.mapsquareTiles.read(tilefile, engine.rawsource);
let chunk: ChunkData = {
xoffset: (rect.x + x) * squareSize,
zoffset: (rect.z + z) * squareSize,
tilerect,
levelcount,
mapsquarex: rect.x + x,
mapsquarez: rect.z + z,
tiles: tiledata.tiles,
extra: tiledata.extra,
locsfile,
tilefile,
tiles: tiles,
extra: tilesextra,
rawlocs: locs,
locs: []
};
grid.addMapsquare(chunk, !!opts?.collision);
grid.addMapsquare(chunk.tiles, chunk.tilerect, chunk.levelcount, !!opts?.collision);
//only add the actual ones we need to the queue
if (chunk.mapsquarex < rect.x || chunk.mapsquarex >= rect.x + rect.xsize) { continue; }
@@ -1031,7 +886,7 @@ export async function parseMapsquare(engine: EngineCache, rect: MapRect, opts?:
}
grid.blendUnderlays();
for (let chunk of chunks) {
chunk.locs = await mapsquareObjects(engine, chunk, grid, !!opts?.collision);
chunk.locs = await mapsquareObjects(engine, grid, chunk.rawlocs, chunk.tilerect.x, chunk.tilerect.z, !!opts?.collision);
}
return { grid, chunks };
@@ -1058,7 +913,7 @@ export async function mapsquareSkybox(scene: ThreejsSceneCache, mainchunk: Chunk
export async function mapsquareModels(scene: ThreejsSceneCache, grid: TileGrid, chunk: ChunkData, opts?: ParsemapOpts) {
let floors: FloorMeshData[] = [];
let matids = grid.gatherMaterials(chunk.xoffset, chunk.zoffset, squareSize + 1, squareSize + 1);
let matids = grid.gatherMaterials(chunk.tilerect.x, chunk.tilerect.z, chunk.tilerect.xsize + 1, chunk.tilerect.zsize + 1);
let textures = new Map<number, { tex: CanvasImage, repeat: number }>();
let textureproms: Promise<void>[] = [];
for (let [matid, repeat] of matids.entries()) {
@@ -1108,11 +963,11 @@ export async function mapsquareModels(scene: ThreejsSceneCache, grid: TileGrid,
export async function mapsquareToThreeSingle(scene: ThreejsSceneCache, grid: TileGrid, chunk: ChunkModelData, placedlocs: PlacedModel[]) {
let node = new THREE.Group();
node.matrixAutoUpdate = false;
node.position.set(chunk.chunk.xoffset * tiledimensions, 0, chunk.chunk.zoffset * tiledimensions);
node.position.set(chunk.chunk.tilerect.x * tiledimensions, 0, chunk.chunk.tilerect.z * tiledimensions);
node.updateMatrix();
let rootx = chunk.chunk.xoffset * tiledimensions;
let rootz = chunk.chunk.zoffset * tiledimensions;
let rootx = chunk.chunk.tilerect.x * tiledimensions;
let rootz = chunk.chunk.tilerect.z * tiledimensions;
if (placedlocs.length != 0) { node.add(...placedlocs.map(q => meshgroupsToThree(grid, q, rootx, rootz))); }
let chunkoverlays = chunk.overlays.filter(q => q.models.length != 0).map(q => meshgroupsToThree(grid, q, rootx, rootz));
if (chunkoverlays.length != 0) { node.add(...chunkoverlays); }
@@ -1266,20 +1121,36 @@ export function defaultMorphId(locmeta: objects) {
//TODO move this to a more logical location
export async function resolveMorphedObject(source: EngineCache, id: number) {
let objectfile = await source.getGameFile("objects", id);
let rawloc = parse.object.read(objectfile, source);
let morphedloc = rawloc;
if (rawloc.morphs_1 || rawloc.morphs_2) {
let newid = defaultMorphId(rawloc);
if (newid != -1) {
let newloc = await source.getGameFile("objects", newid);
morphedloc = {
...rawloc,
...parse.object.read(newloc, source)
};
if (source.classicData) {
let rawloc = source.classicData.wallobjects[id];
//TODO recolor+retexture
let rs2loc: objects = {
name: rawloc.name,
probably_morphFloor: true,
models: [
{ type: 0, values: [constModelsIds.paperWall] },
{ type: 9, values: [constModelsIds.paperWallDiag] }
],
//sets replace_colors/mats and if invisible sets models to null
...classicIntToModelMods(rawloc.frontdecor, rawloc.backdecor)
}
return { rawloc: rs2loc, morphedloc: rs2loc };
} else {
let objectfile = await source.getGameFile("objects", id);
let rawloc = parse.object.read(objectfile, source);
let morphedloc = rawloc;
if (rawloc.morphs_1 || rawloc.morphs_2) {
let newid = defaultMorphId(rawloc);
if (newid != -1) {
let newloc = await source.getGameFile("objects", newid);
morphedloc = {
...rawloc,
...parse.object.read(newloc, source)
};
}
}
return { rawloc, morphedloc };
}
return { rawloc, morphedloc };
}
async function mapsquareOverlays(engine: EngineCache, grid: TileGrid, locs: WorldLocation[]) {
@@ -1348,7 +1219,11 @@ async function mapsquareOverlays(engine: EngineCache, grid: TileGrid, locs: Worl
let tex = (group.material.mat as MeshBasicMaterial).map! as DataTexture;
const spritescale = 128;
let mesh = squareMesh(tex.image.width * spritescale, tex.image.height * spritescale, [255, 255, 255]);
let w = tex.image.width * spritescale;
let h = tex.image.height * spritescale;
let mesh = new MeshBuilder(null)
.addParallelogram([255, 255, 255], [-w / 2, 0, -h / 2], [w, 0, 0], [0, 0, h])
.convertSubmesh(0);
let translate = new THREE.Vector3((loc.x + loc.sizex / 2) * tiledimensions, 0, (loc.z + loc.sizez / 2) * tiledimensions);
group.models.push({
model: mesh,
@@ -1375,15 +1250,15 @@ async function mapsquareOverlays(engine: EngineCache, grid: TileGrid, locs: Worl
for (let loc of locs) {
if (loc.type == 0) {
addwall(wallmodels.wall, loc);
addwall(topdown2dWallModels.wall, loc);
} else if (loc.type == 1) {
addwall(wallmodels.shortcorner, loc);
addwall(topdown2dWallModels.shortcorner, loc);
} else if (loc.type == 2) {
addwall(wallmodels.longcorner, loc);
addwall(topdown2dWallModels.longcorner, loc);
} else if (loc.type == 3) {
addwall(wallmodels.pillar, loc);
addwall(topdown2dWallModels.pillar, loc);
} else if (loc.type == 9) {
addwall(wallmodels.diagonal, loc);
addwall(topdown2dWallModels.diagonal, loc);
}
if (loc.location.mapscene != undefined) {
@@ -1619,19 +1494,15 @@ export type WorldLocation = {
effectiveLevel: number
}
export async function mapsquareObjects(engine: EngineCache, chunk: ChunkData, grid: TileGrid, collision = false) {
export async function mapsquareObjects(engine: EngineCache, grid: TileGrid, locations: mapsquare_locations["locations"], originx: number, originz: number, collision = false) {
let locs: WorldLocation[] = [];
if (!chunk.locsfile) { return locs; }
let locations = parse.mapsquareLocations.read(chunk.locsfile, engine.rawsource).locations;
for (let loc of locations) {
let { morphedloc, rawloc } = await resolveMorphedObject(engine, loc.id);
if (!morphedloc) { continue; }
for (let inst of loc.uses) {
let callingtile = grid.getTile(inst.x + chunk.xoffset, inst.y + chunk.zoffset, inst.plane);
let callingtile = grid.getTile(inst.x + originx, inst.y + originz, inst.plane);
if (!callingtile) {
// console.log("callingtile not found");
continue;
@@ -1649,7 +1520,7 @@ export async function mapsquareObjects(engine: EngineCache, chunk: ChunkData, gr
let visualLevel = callingtile.effectiveVisualLevel;
for (let dz = 0; dz < sizez; dz++) {
for (let dx = 0; dx < sizex; dx++) {
let tile = grid.getTile(inst.x + chunk.xoffset + dx, inst.y + chunk.zoffset + dz, inst.plane);
let tile = grid.getTile(inst.x + originx + dx, inst.y + originz + dz, inst.plane);
if (tile && tile.effectiveVisualLevel > visualLevel) {
visualLevel = tile.effectiveVisualLevel;
}
@@ -1662,8 +1533,8 @@ export async function mapsquareObjects(engine: EngineCache, chunk: ChunkData, gr
placement: inst.extra,
sizex,
sizez,
x: inst.x + chunk.xoffset,
z: inst.y + chunk.zoffset,
x: inst.x + originx,
z: inst.y + originz,
type: inst.type,
rotation: inst.rotation,
plane: inst.plane,
@@ -1680,7 +1551,7 @@ export async function mapsquareObjects(engine: EngineCache, chunk: ChunkData, gr
if (collision && !rawloc.probably_nocollision) {
for (let dz = 0; dz < sizez; dz++) {
for (let dx = 0; dx < sizex; dx++) {
let tile = grid.getTile(inst.x + chunk.xoffset + dx, inst.y + chunk.zoffset + dz, callingtile.effectiveLevel);
let tile = grid.getTile(inst.x + originx + dx, inst.y + originz + dz, callingtile.effectiveLevel);
if (tile) {
let col = tile.effectiveCollision!;
//TODO check for other loc types
@@ -1721,7 +1592,7 @@ export async function mapsquareObjects(engine: EngineCache, chunk: ChunkData, gr
}
function mapsquareCollisionMesh(grid: TileGrid, chunk: ChunkData, level: number, rawmode = false) {
const maxtriangles = squareSize * squareSize * 5 * 6 * 2;
const maxtriangles = chunk.tilerect.xsize * chunk.tilerect.zsize * 5 * 6 * 2;
let posoffset = 0;
let coloroffset = 12;
let stride = 16;
@@ -1732,8 +1603,8 @@ function mapsquareCollisionMesh(grid: TileGrid, chunk: ChunkData, level: number,
let posbuffer = new Float32Array(buf);
let colorbuffer = new Uint8Array(buf);
let rootx = chunk.xoffset * tiledimensions;
let rootz = chunk.zoffset * tiledimensions;
let rootx = chunk.tilerect.x * tiledimensions;
let rootz = chunk.tilerect.z * tiledimensions;
let vertexindex = 0;
let indexpointer = 0;
@@ -1783,8 +1654,8 @@ function mapsquareCollisionMesh(grid: TileGrid, chunk: ChunkData, level: number,
indexbuf[indexpointer++] = v100; indexbuf[indexpointer++] = v111; indexbuf[indexpointer++] = v110;
indexbuf[indexpointer++] = v100; indexbuf[indexpointer++] = v101; indexbuf[indexpointer++] = v111;
}
for (let z = chunk.zoffset; z < chunk.zoffset + squareSize; z++) {
for (let x = chunk.xoffset; x < chunk.xoffset + squareSize; x++) {
for (let z = chunk.tilerect.z; z < chunk.tilerect.z + chunk.tilerect.zsize; z++) {
for (let x = chunk.tilerect.x; x < chunk.tilerect.x + chunk.tilerect.xsize; x++) {
let tile = grid.getTile(x, z, level);
let collision = (rawmode ? tile?.rawCollision : tile?.effectiveCollision);
if (tile && collision) {
@@ -1968,7 +1839,7 @@ function meshgroupsToThree(grid: TileGrid, meshgroup: PlacedModel, rootx: number
function mapsquareMesh(grid: TileGrid, chunk: ChunkData, level: number, atlas: SimpleTexturePacker, showhidden: boolean, keeptileinfo = false, worldmap = false) {
const maxtiles = squareSize * squareSize * squareLevels;
const maxtiles = chunk.tilerect.xsize * chunk.tilerect.zsize * grid.levels;
const maxVerticesPerTile = 8;
const posoffset = 0;// 0/4
const normaloffset = 3;// 12/4
@@ -1997,8 +1868,8 @@ function mapsquareMesh(grid: TileGrid, chunk: ChunkData, level: number, atlas: S
let vertexindex = 0;
let indexpointer = 0;
const modelx = chunk.xoffset * tiledimensions;
const modelz = chunk.zoffset * tiledimensions;
const modelx = chunk.tilerect.x * tiledimensions;
const modelz = chunk.tilerect.z * tiledimensions;
let tileinfos: MeshTileInfo[] = [];
let tileindices: number[] = [];
@@ -2070,11 +1941,11 @@ function mapsquareMesh(grid: TileGrid, chunk: ChunkData, level: number, atlas: S
return vertexindex++;
}
for (let tilelevel = level; tilelevel < squareLevels; tilelevel++) {
for (let tilelevel = level; tilelevel < chunk.levelcount; tilelevel++) {
if (showhidden && tilelevel != level) { continue; }
for (let z = 0; z < squareSize; z++) {
for (let x = 0; x < squareSize; x++) {
let tile = grid.getTile(chunk.xoffset + x, chunk.zoffset + z, tilelevel);
for (let z = 0; z < chunk.tilerect.zsize; z++) {
for (let x = 0; x < chunk.tilerect.xsize; x++) {
let tile = grid.getTile(chunk.tilerect.x + x, chunk.tilerect.z + z, tilelevel);
if (!tile) { continue; }
if (!showhidden && tile.effectiveVisualLevel != level) { continue; }

View File

@@ -1,7 +1,7 @@
import { parse } from "../opdecoder";
import { appearanceUrl, avatarStringToBytes, avatarToModel } from "./avatar";
import * as THREE from "three";
import { ThreejsSceneCache, mergeModelDatas, ob3ModelToThree, mergeNaiveBoneids } from '../3d/modeltothree';
import { ThreejsSceneCache, mergeModelDatas, ob3ModelToThree, mergeNaiveBoneids, constModelsIds } from '../3d/modeltothree';
import { ModelModifications, constrainedMap, TypedEmitter } from '../utils';
import { boundMethod } from 'autobind-decorator';
import { resolveMorphedObject, modifyMesh, MapRect, ParsemapOpts, parseMapsquare, mapsquareModels, mapsquareToThreeSingle, ChunkData, TileGrid, mapsquareSkybox, generateLocationMeshgroups, PlacedMesh } from '../3d/mapsquare';
@@ -16,6 +16,7 @@ import { animgroupconfigs } from "../../generated/animgroupconfigs";
import fetch from "node-fetch";
import { MaterialData } from "./jmat";
import { legacyMajors } from "../cache/legacycache";
import { classicGroups } from "../cache/classicloader";
export type SimpleModelDef = {
@@ -34,10 +35,13 @@ export async function modelToModel(cache: ThreejsSceneCache, id: number) {
let modeldata = await cache.getModelData(id);
//getting the same file a 2nd time to get the full json
let info: any;
if (cache.useOldModels) {
if (cache.modelType == "classic") {
let arch = await cache.engine.getArchiveById(0, classicGroups.models);
info = parse.classicmodels.read(arch[id].buffer, cache.engine.rawsource);
} else if (cache.modelType == "old") {
let major = (cache.engine.legacyData ? legacyMajors.oldmodels : cacheMajors.oldmodels);
info = parse.oldmodels.read(await cache.engine.getFileById(major, id), cache.engine.rawsource);
} else {
} else if (cache.modelType == "nxt") {
info = parse.models.read(await cache.engine.getFileById(cacheMajors.models, id), cache.engine.rawsource);
}
return { models: [{ modelid: id, mods: {} }], anims: {}, info: { modeldata, info }, id };
@@ -124,14 +128,10 @@ export async function itemToModel(cache: ThreejsSceneCache, id: number) {
}
export async function materialToModel(sceneCache: ThreejsSceneCache, modelid: number) {
let assetid = 93808;//"RuneTek_Asset" jagex test model
let assetid = constModelsIds.materialCube;
let mods: ModelModifications = {
replaceMaterials: [[4311, modelid]]
replaceMaterials: [[0, modelid]]
};
// modelids = [67768];//is a cube but has transparent vertices
// mods.replaceMaterials = [
// [8868, +searchid]
// ];
let mat = sceneCache.engine.getMaterialData(modelid);
let texs: Record<string, { texid: number, filesize: number, img0: HTMLImageElement | HTMLCanvasElement | HTMLVideoElement | ImageBitmap }> = {};
let addtex = async (type: keyof MaterialData["textures"], name: string, texid: number) => {
@@ -312,6 +312,7 @@ export class RSModel extends TypedEmitter<{ loaded: undefined, animchanged: numb
export type RSMapChunkData = {
grid: TileGrid,
chunks: ChunkData[],
chunkSize: number,
groups: Set<string>,
sky: { skybox: Object3D, fogColor: number[], skyboxModelid: number } | null,
modeldata: PlacedMesh[][],
@@ -342,8 +343,8 @@ export class RSMapChunk extends TypedEmitter<{ loaded: undefined }> implements T
}
async renderSvg(level = 0, wallsonly = false, pxpersquare = 1) {
let { chunks, grid } = await this.model;
let rect: MapRect = { x: this.rect.x * 64, z: this.rect.z * 64, xsize: this.rect.xsize * 64, zsize: this.rect.zsize * 64 };
let { chunks, grid, chunkSize } = await this.model;
let rect: MapRect = { x: this.rect.x * chunkSize, z: this.rect.z * chunkSize, xsize: this.rect.xsize * chunkSize, zsize: this.rect.zsize * chunkSize };
return svgfloor(this.cache.engine, grid, chunks.flatMap(q => q.locs), rect, level, pxpersquare, wallsonly);
}
@@ -394,6 +395,7 @@ export class RSMapChunk extends TypedEmitter<{ loaded: undefined }> implements T
}));
let sky = (extraopts?.skybox ? await mapsquareSkybox(cache, chunks[0]) : null);
let chunkSize = chunks[0].tilerect.xsize;//TODO depends on classic or rs2, must be better way
if (processedChunks.length != 0) {
this.rootnode.add(...processedChunks.map(q => q.group));
}
@@ -421,7 +423,7 @@ export class RSMapChunk extends TypedEmitter<{ loaded: undefined }> implements T
let modeldata = processedChunks.flatMap(q => q.locmeshes.byLogical);
let chunkmodels = processedChunks.map(q => q.group);
this.loaded = { grid, chunks, groups, sky, modeldata, chunkmodels };
this.loaded = { grid, chunks, groups, sky, modeldata, chunkmodels, chunkSize };
this.onModelLoaded();
return this.loaded;
})();

View File

@@ -1,4 +1,4 @@
import { cacheConfigPages, cacheMajors, lastLegacyBuildnr } from "../constants";
import { cacheConfigPages, cacheMajors, lastClassicBuildnr, lastLegacyBuildnr } from "../constants";
import { ParsedTexture } from "./textures";
import { ModelData, parseOb3Model } from '../3d/rt7model';
import { parseRT5Model } from "../3d/rt5model";
@@ -15,8 +15,25 @@ import { JSONSchema6Definition } from "json-schema";
import { models } from "../../generated/models";
import { crc32, CrcBuilder } from "../libs/crc32util";
import { makeImageData } from "../imgutils";
import { parseLegacySprite, parseSprite } from "./sprite";
import { LegacyData, legacyGroups, legacyMajors, legacyPreload } from "../cache/legacycache";
import { parseLegacySprite, parseSprite, parseTgaSprite, SubImageData } from "./sprite";
import { combineLegacyTexture, LegacyData, legacyGroups, legacyMajors, legacyPreload, parseLegacyImageFile } from "../cache/legacycache";
import { classicConfig, ClassicConfig, classicGroups, classicUnderlays } from "../cache/classicloader";
import { parseRT2Model } from "./rt2model";
import { materialPreviewCube, paperWall, paperWallDiag } from "./modelutils";
const constModelOffset = 1000000;
export const constModelsIds = {
materialCube: constModelOffset + 1,
paperWall: constModelOffset + 2,
paperWallDiag: constModelOffset + 3
}
const constModels = new Map([
[constModelsIds.materialCube, Promise.resolve(materialPreviewCube)],
[constModelsIds.paperWall, Promise.resolve(paperWall)],
[constModelsIds.paperWallDiag, Promise.resolve(paperWallDiag)]
]);
export type ParsedMaterial = {
//TODO rename
@@ -75,7 +92,6 @@ export function augmentThreeJsFloorMaterial(mat: THREE.Material) {
//basically stores all the config of the game engine
export class EngineCache extends CachingFileSource {
ready: Promise<EngineCache>;
hasOldModels: boolean;
hasNewModels: boolean;
@@ -87,15 +103,15 @@ export class EngineCache extends CachingFileSource {
jsonSearchCache = new Map<string, { files: Promise<any[]>, schema: JSONSchema6Definition }>();
legacyData: LegacyData | null = null;
classicData: ClassicConfig | null = null;
static async create(source: CacheFileSource) {
static create(source: CacheFileSource) {
let ret = new EngineCache(source);
return ret.ready;
return ret.preload();
}
private constructor(source: CacheFileSource) {
super(source);
this.ready = this.preload();
}
private async preload() {
@@ -128,13 +144,18 @@ export class EngineCache extends CachingFileSource {
let rootindex = await this.getCacheIndex(cacheMajors.index);
this.hasNewModels = !!rootindex[cacheMajors.models];
this.hasOldModels = !!rootindex[cacheMajors.oldmodels];
} else {
} else if (this.getBuildNr() >= lastClassicBuildnr) {
this.legacyData = await legacyPreload(this);
let floors = this.legacyData.overlays.map(q => parse.mapsquareOverlays.read(q, this));
this.mapOverlays = floors;
this.mapUnderlays = floors;
this.hasNewModels = false;
this.hasOldModels = true;
} else {
this.classicData = await classicConfig(this);
//TODO
this.mapUnderlays = classicUnderlays();
this.hasNewModels = false;
this.hasOldModels = true;
}
@@ -158,7 +179,12 @@ export class EngineCache extends CachingFileSource {
} else {
if (this.getBuildNr() <= lastLegacyBuildnr) {
cached = defaultMaterial();
if (id != -1) { cached.textures.diffuse = id; }
if (id != -1) {
cached.textures.diffuse = id;
cached.vertexColorWhitening = (id == 0 ? 0 : 1);
cached.texmodes = "mirror";
cached.texmodet = "mirror";
}
} else if (this.getBuildNr() <= 498) {
let file = this.materialArchive.get(id);
if (!file) { throw new Error("material " + id + " not found"); }
@@ -229,7 +255,10 @@ export async function detectTextureMode(source: CacheFileSource) {
}
let textureMode: TextureModes = "none";
if (source.getBuildNr() <= lastLegacyBuildnr) {
if (source.getBuildNr() <= lastClassicBuildnr) {
let texindex = await source.findSubfileByName(0, classicGroups.textures, "INDEX.DAT");
textureMode = (texindex ? "legacy" : "legacytga");
} else if (source.getBuildNr() <= lastLegacyBuildnr) {
textureMode = "legacy";
} else if (source.getBuildNr() <= 498) {
textureMode = "oldproc";
@@ -344,17 +373,17 @@ async function convertMaterialToThree(source: ThreejsSceneCache, material: Mater
return { mat, matmeta: material };
}
type TextureModes = "png" | "dds" | "bmp" | "ktx" | "oldpng" | "png2014" | "dds2014" | "none" | "oldproc" | "legacy";
type ModelModes = "nxt" | "old" | "classic";
type TextureModes = "png" | "dds" | "bmp" | "ktx" | "oldpng" | "png2014" | "dds2014" | "none" | "oldproc" | "legacy" | "legacytga";
type TextureTypes = keyof MaterialData["textures"];
export class ThreejsSceneCache {
private modelCache = new Map<number, CachedObject<ModelData>>();
private oldModelCache = new Map<number, CachedObject<ModelData>>();
private threejsTextureCache = new Map<number, CachedObject<ParsedTexture>>();
private threejsMaterialCache = new Map<number, CachedObject<ParsedMaterial>>();
engine: EngineCache;
textureType: TextureModes = "dds";
useOldModels: boolean;
modelType: ModelModes = "nxt";
static textureIndices: Record<TextureTypes, Record<Exclude<TextureModes, "none">, number>> = {
diffuse: {
@@ -366,7 +395,8 @@ export class ThreejsSceneCache {
dds2014: cacheMajors.textures2015Dds,
oldpng: cacheMajors.texturesOldPng,
oldproc: cacheMajors.sprites,
legacy: legacyMajors.data
legacy: legacyMajors.data,
legacytga: 0
},
normal: {
png: cacheMajors.texturesPng,
@@ -378,7 +408,8 @@ export class ThreejsSceneCache {
dds2014: cacheMajors.textures2015CompoundDds,
oldpng: cacheMajors.texturesOldCompoundPng,
oldproc: 0,
legacy: 0
legacy: 0,
legacytga: 0
},
compound: {
png: cacheMajors.texturesPng,
@@ -390,16 +421,25 @@ export class ThreejsSceneCache {
dds2014: cacheMajors.textures2015CompoundDds,
oldpng: cacheMajors.texturesOldCompoundPng,
oldproc: 0,
legacy: 0
legacy: 0,
legacytga: 0
}
}
private constructor(scenecache: EngineCache) {
private constructor(scenecache: EngineCache, modeltype: ModelModes | "auto") {
this.engine = scenecache;
this.useOldModels = scenecache.hasOldModels && !scenecache.hasNewModels;
if (modeltype != "auto") {
this.modelType = modeltype;
} else if (scenecache.getBuildNr() <= lastClassicBuildnr) {
this.modelType = "classic";
} else if (scenecache.hasOldModels && !scenecache.hasNewModels) {
this.modelType = "old";
} else {
this.modelType = "nxt";
}
}
static async create(engine: EngineCache, texturemode: TextureModes | "auto" = "auto") {
let scene = new ThreejsSceneCache(engine);
static async create(engine: EngineCache, texturemode: TextureModes | "auto" = "auto", modelmode: ModelModes | "auto" = "auto") {
let scene = new ThreejsSceneCache(engine, modelmode);
scene.textureType = (texturemode == "auto" ? await detectTextureMode(engine.rawsource) : texturemode);
return scene;
}
@@ -410,11 +450,16 @@ export class ThreejsSceneCache {
let texmode = this.textureType;
return this.engine.fetchCachedObject(this.threejsTextureCache, cachekey, async () => {
if (texmode == "legacy") {
let spritedata = await this.engine.getArchiveById(legacyMajors.data, legacyGroups.textures);
let metafile = await this.engine.findSubfileByName(legacyMajors.data, legacyGroups.textures, "INDEX.DAT");
let img = parseLegacySprite(metafile!.buffer, spritedata[texid].buffer);
return new ParsedTexture(img, stripAlpha, false);
if (texmode == "legacytga" || texmode == "legacy") {
let img: SubImageData;
if (this.engine.classicData) {
let texmeta = this.engine.classicData.textures[texid - 1];
img = await combineLegacyTexture(this.engine, texmeta.name, texmeta.subname, texmode == "legacytga");
} else {
let imgfile = await this.engine.getArchiveById(legacyMajors.data, legacyGroups.textures);
img = await parseLegacyImageFile(this.engine, imgfile[texid].buffer)
}
return new ParsedTexture(img.img, stripAlpha, false);
} else {
let file = await this.engine.getFileById(cacheindex, texid);
if (texmode == "oldproc") {
@@ -427,19 +472,28 @@ export class ThreejsSceneCache {
}, obj => obj.filesize * 2);
}
getModelData(id: number, type: "auto" | "old" | "new" = "auto") {
if (type == "old" || (type == "auto" && this.useOldModels)) {
return this.engine.fetchCachedObject(this.oldModelCache, id, () => {
let major = (this.engine.legacyData ? legacyMajors.oldmodels : cacheMajors.oldmodels);
return this.engine.getFileById(major, id)
.then(f => parseRT5Model(f, this.engine.rawsource));
}, obj => obj.meshes.reduce((a, m) => m.indices.count, 0) * 30);
} else {
return this.engine.fetchCachedObject(this.modelCache, id, () => {
return this.engine.getFileById(cacheMajors.models, id)
.then(f => parseOb3Model(f, this.engine));
}, obj => obj.meshes.reduce((a, m) => m.indices.count, 0) * 30);
getModelData(id: number) {
if (id >= constModelOffset) {
let res = constModels.get(id);
if (!res) { throw new Error(`constmodel ${id} does not exist`); }
return res;
}
return this.engine.fetchCachedObject(this.modelCache, id, async () => {
if (this.modelType == "nxt") {
let file = await this.engine.getFileById(cacheMajors.models, id);
return parseOb3Model(file, this.engine);
} else if (this.modelType == "old") {
let major = (this.engine.legacyData ? legacyMajors.oldmodels : cacheMajors.oldmodels);
let file = await this.engine.getFileById(major, id);
return parseRT5Model(file, this.engine.rawsource);
} else if (this.modelType == "classic") {
let arch = await this.engine.getArchiveById(0, classicGroups.models);
return parseRT2Model(arch[id].buffer, this.engine);
} else {
throw new Error("unexpected");
}
}, obj => obj.meshes.reduce((a, m) => m.indices.count, 0) * 30);
}
getMaterial(matid: number, hasVertexAlpha: boolean) {
@@ -626,6 +680,7 @@ export async function ob3ModelToThree(scene: ThreejsSceneCache, model: ModelData
mesh = new THREE.Mesh(geo);
}
applyMaterial(mesh, await scene.getMaterial(meshdata.materialId, meshdata.hasVertexAlpha));
mesh.geometry.computeVertexNormals();//TODO remove, only used for classic models atm
rootnode.add(mesh);
}
if (model.debugmeshes && model.debugmeshes.length != 0) {

264
src/3d/modelutils.ts Normal file
View File

@@ -0,0 +1,264 @@
import { BufferAttribute } from "three";
import { ModelData, ModelMeshData } from "./rt7model";
type rgb = [r: number, g: number, b: number];
type xyz = [x: number, y: number, z: number];
const white: rgb = [255, 255, 255];
const red: rgb = [255, 0, 0];
const tile = 512;
const halftile = 256;
export class MeshBuilder {
pos: number[] = [];
color: number[] = [];
uvs: number[] = [];
index: number[] = [];
normals: number[] = [];
vertindex = 0;
parent: ModelBuilder | null;
constructor(parent: ModelBuilder | null) {
this.parent = parent;
}
addParallelogram(col: rgb, [x, y, z]: xyz, [dx1, dy1, dz1]: xyz, [dx2, dy2, dz2]: xyz) {
this.pos.push(
x, y, z,
x + dx1, y + dy1, z + dz1,
x + dx1 + dx2, y + dy1 + dy2, z + dz1 + dz2,
x + dx2, y + dy2, z + dz2
);
this.color.push(
...col,
...col,
...col,
...col
);
this.uvs.push(
0, 0,
1, 0,
1, 1,
0, 1
);
let normx = dy2 * dz1 - dy1 * dz2;
let normy = dz2 * dx1 - dz1 * dx2;
let normz = dx2 * dy1 - dx1 * dy2;
let len = Math.hypot(normx, normy, normz);
normx /= len;
normy /= len;
normz /= len;
this.normals.push(
normx, normy, normz,
normx, normy, normz,
normx, normy, normz,
normx, normy, normz,
)
this.index.push(
this.vertindex + 0, this.vertindex + 2, this.vertindex + 1,
this.vertindex + 0, this.vertindex + 3, this.vertindex + 2,
);
this.vertindex += 4;
return this;
}
addDoubleParallelogram(col: rgb, [x, y, z]: xyz, [dx1, dy1, dz1]: xyz, [dx2, dy2, dz2]: xyz) {
this.addParallelogram(col, [x, y, z], [dx1, dy1, dz1], [dx2, dy2, dz2]);
this.addParallelogram(col, [x + dx1, y + dy1, z + dz1], [-dx1, -dy1, -dz1], [dx2, dy2, dz2]);
return this;
}
addCube(col: rgb, [centerx, centery, centerz]: xyz, [sizex, sizey, sizez]: xyz) {
let x0 = centerx - sizex / 2;
let y0 = centery - sizey / 2;
let z0 = centerz - sizez / 2;
let x1 = x0 + sizex;
let y1 = y0 + sizey;
let z1 = z0 + sizez;
this.addParallelogram(col, [x0, y0, z0], [sizex, 0, 0], [0, sizey, 0]);
this.addParallelogram(col, [x1, y0, z0], [0, 0, sizez], [0, sizey, 0]);
this.addParallelogram(col, [x1, y0, z1], [-sizex, 0, 0], [0, sizey, 0]);
this.addParallelogram(col, [x0, y0, z1], [0, 0, -sizez], [0, sizey, 0]);
this.addParallelogram(col, [x0, y0, z1], [sizex, 0, 0], [0, 0, -sizez]);
this.addParallelogram(col, [x0, y1, z0], [sizex, 0, 0], [0, 0, sizez]);
return this;
}
addExtrusion(color: rgb, vector: xyz, points: xyz[]) {
//side faces
let prevpoint = points[points.length - 1];
if (Math.hypot(...vector) != 0) {
for (let a = 0; a < points.length; a++) {
let point = points[a];
this.addParallelogram(color, prevpoint, [point[0] - prevpoint[0], point[1] - prevpoint[1], point[2] - prevpoint[2]], vector);
prevpoint = point;
}
}
if (points.length > 2) {
let dx1 = points[2][0] - points[1][0], dy1 = points[2][0] - points[1][0], dz1 = points[2][0] - points[1][0];
let dx2 = points[0][0] - points[1][0], dy2 = points[0][0] - points[1][0], dz2 = points[0][0] - points[1][0];
let normx = dy2 * dz1 - dy1 * dz2;
let normy = dz2 * dx1 - dz1 * dx2;
let normz = dx2 * dy1 - dx1 * dy2;
let len = Math.hypot(normx, normy, normz);
normx /= len;
normy /= len;
normz /= len;
//top polygon
let zeroindex = -1;
let previndex = -1;
for (let a = 0; a < points.length; a++) {
let point = points[a];
this.pos.push(...point);
this.color.push(...color);
this.uvs.push(0, 0);
this.normals.push(normx, normy, normz);
let index = this.vertindex++;
if (zeroindex == -1) {
zeroindex = index;
} else if (previndex == -1) {
previndex = index
} else {
this.index.push(zeroindex, previndex, index);
previndex = index;
}
}
//bottom polygon
zeroindex = -1;
previndex = -1;
for (let a = points.length - 1; a >= 0; a--) {
let point = points[a];
this.pos.push(...point);
this.color.push(...color);
this.uvs.push(0, 0);
this.normals.push(-normx, -normy, -normz);
let index = this.vertindex++;
if (zeroindex == -1) {
zeroindex = index;
} else if (previndex == -1) {
previndex = index
} else {
this.index.push(zeroindex, previndex, index);
previndex = index;
}
}
}
return this;
}
convertSubmesh(matid: number): ModelData["meshes"][number] {
return {
attributes: {
pos: new BufferAttribute(new Float32Array(this.pos), 3),
color: new BufferAttribute(new Uint8Array(this.color), 3, true),
texuvs: new BufferAttribute(new Float32Array(this.uvs), 2),
normals: new BufferAttribute(new Float32Array(this.normals), 3)
},
indices: new BufferAttribute(new Uint16Array(this.index), 1),
hasVertexAlpha: false,
materialId: matid,
needsNormalBlending: false,
}
}
mat(mat: number) {
return this.parent!.mat(mat);
}
convert() {
return this.parent!.convert();
}
}
function meshBuildersToModel(builders: Map<number, MeshBuilder>): ModelData {
let miny = 0;
let maxy = 0;
let meshes: ModelMeshData[] = []
builders.forEach((builder, mat) => {
let mesh = builder.convertSubmesh(mat);
meshes.push(mesh);
let posattr = mesh.attributes.pos;
for (let i = 0; i < posattr.count; i++) {
let y = posattr.getY(i);
miny = Math.min(miny, y);
maxy = Math.max(maxy, y);
}
});
return {
miny, maxy,
bonecount: 0,
skincount: 0,
meshes
}
}
export class ModelBuilder {
meshes = new Map<number, MeshBuilder>();
mat(mat: number) {
let mesh = this.meshes.get(mat);
if (!mesh) {
mesh = new MeshBuilder(this);
this.meshes.set(mat, mesh);
}
return mesh;
}
convert() {
return meshBuildersToModel(this.meshes);
}
}
export const materialPreviewCube = new ModelBuilder()
.mat(0).addCube(white, [0, 300, 0], [600, 600, 600])
.convert();
export const paperWall = new ModelBuilder()
.mat(0).addParallelogram(white, [-halftile, 0, halftile], [0, 2 * tile, 0], [0, 0, -tile])
.mat(1).addParallelogram(red, [-halftile, 0, -halftile], [0, 2 * tile, 0], [0, 0, tile])
.convert();
export const paperWallDiag = new ModelBuilder()
.mat(0).addParallelogram(white, [halftile, 0, halftile], [0, 2 * tile, 0], [-tile, 0, -tile])
.mat(0).addParallelogram(white, [-halftile, 0, -halftile], [0, 2 * tile, 0], [tile, 0, tile],)
.convert();
export const topdown2dWallModels = generateTopdown2dWallModels();
function generateTopdown2dWallModels() {
const thick = tile / 8;
const height = 0;
const wallvec: xyz = [0, height, 0];
return {
wall: new ModelBuilder().mat(0).addExtrusion(white, wallvec, [
[-halftile, 0, -halftile],
[-halftile, 0, halftile],
[-halftile + thick, 0, halftile],
[-halftile + thick, 0, -halftile]
]).convert(),
shortcorner: new ModelBuilder().mat(0).addExtrusion(white, wallvec, [
[-halftile, 0, halftile],
[-halftile + thick, 0, halftile],
[-halftile + thick, 0, halftile - thick],
[-halftile, 0, halftile - thick]
]).convert(),
longcorner: new ModelBuilder().mat(0).addExtrusion(white, wallvec, [
[-halftile + thick, 0, halftile - thick],
[-halftile + thick, 0, -halftile],
[-halftile, 0, -halftile],
[-halftile, 0, halftile],
[halftile, 0, halftile],
[halftile, 0, halftile - thick],
]).convert(),
pillar: new ModelBuilder().mat(0).addExtrusion(white, wallvec, [
[-halftile + thick, 0, halftile - thick],
[-halftile + thick, 0, -halftile],
[-halftile, 0, -halftile],
[-halftile, 0, halftile],
[halftile, 0, halftile],
[halftile, 0, halftile - thick],
]).convert(),
diagonal: new ModelBuilder().mat(0).addExtrusion(white, wallvec, [
[-halftile, 0, halftile],
[-halftile + thick, 0, halftile],
[-halftile + thick, 0, halftile - thick],
[-halftile, 0, halftile - thick]
]).convert(),
}
}

122
src/3d/rt2model.ts Normal file
View File

@@ -0,0 +1,122 @@
import { BufferAttribute } from "three";
import { CacheFileSource } from "../cache";
import { parse } from "../opdecoder";
import { WorkingSubmesh } from "./rt5model";
import { ModelData } from "./rt7model";
export function parseRT2Model(modelfile: Buffer, source: CacheFileSource) {
let parsed = parse.classicmodels.read(modelfile, source);
let matusecount = new Map<number, { verts: number, tris: number }>();
let allocmat = (colorid: number, nverts: number) => {
if (colorid == 0x7fff) { return; }
if (nverts < 3) { return; }
let matid = (colorid & 0x8000 ? 0 : colorid);
let count = matusecount.get(matid);
if (!count) {
count = { tris: 0, verts: 0 };
matusecount.set(matid, count);
}
count.verts += nverts;
count.tris += nverts - 2;
}
for (let face of parsed.faces) {
allocmat(face.color, face.verts.length);
allocmat(face.backcolor, face.verts.length);
}
let matmeshes = new Map<number, WorkingSubmesh & { currentindex: number }>();
for (let [matid, count] of matusecount.entries()) {
matmeshes.set(matid, {
pos: new BufferAttribute(new Float32Array(count.verts * 3), 3),
normals: new BufferAttribute(new Float32Array(count.verts * 3), 3),
color: new BufferAttribute(new Uint8Array(count.verts * 3), 3, true),
texuvs: new BufferAttribute(new Float32Array(count.verts * 2), 2),
index: new Uint16Array(count.tris * 3),
currentface: 0,
currentindex: 0,
matid: matid
});
}
let addvert = (group: WorkingSubmesh, verts: number[], polyindex: number, color: number) => {
let posindex = verts[polyindex];
if (group.matid == 0) {
group.color.setXYZ(
group.currentface,
((~color >> 10) & 0x1f) / 31,
((~color >> 5) & 0x1f) / 31,
((~color >> 0) & 0x1f) / 31
);
} else {
group.color.setXYZ(group.currentface, 1, 1, 1);
}
group.pos.setXYZ(
group.currentface,
parsed.xpos[posindex],
-parsed.ypos[posindex],
parsed.zpos[posindex]
);
group.texuvs.setXY(
group.currentface,
(polyindex <= 1 ? 0 : 1),
(polyindex % 2 == 0 ? 0 : 1)
)
return group.currentface++;
}
for (let face of parsed.faces) {
if (face.verts.length < 3) { continue; }
if (face.color != 0x7fff) {
//convert n-poly to tris
//reverse iteration
let group = matmeshes.get(face.color & 0x8000 ? 0 : face.color)!;
let firstvert = addvert(group, face.verts, face.verts.length - 1, face.color);
let lastvert = addvert(group, face.verts, face.verts.length - 2, face.color);
for (let i = face.verts.length - 3; i >= 0; i--) {
let newvert = addvert(group, face.verts, i, face.color);
group.index[group.currentindex++] = firstvert;
group.index[group.currentindex++] = lastvert;
group.index[group.currentindex++] = newvert;
lastvert = newvert;
}
}
if (face.backcolor != 0x7fff) {
let group = matmeshes.get(face.backcolor & 0x8000 ? 0 : face.backcolor)!;
let firstvert = addvert(group, face.verts, 0, face.backcolor);
let lastvert = addvert(group, face.verts, 1, face.backcolor);
for (let i = 0; i < face.verts.length; i++) {
let newvert = addvert(group, face.verts, i, face.backcolor);
group.index[group.currentindex++] = firstvert;
group.index[group.currentindex++] = lastvert;
group.index[group.currentindex++] = newvert;
lastvert = newvert;
}
}
}
let maxy = 0;
let miny = 0;
let r: ModelData = {
bonecount: 0,
miny, maxy,
skincount: 0,
meshes: [...matmeshes.values()].map(q => ({
attributes: {
pos: q.pos,
color: q.color,
texuvs: q.texuvs
},
hasVertexAlpha: false,
indices: new BufferAttribute(q.index, 1),
materialId: q.matid,
needsNormalBlending: false
}))
}
return r;
}
globalThis.parseRT2Model = parseRT2Model;

View File

@@ -22,13 +22,12 @@ type OldTextureMapping = {
args: oldmodels["texflags"][number]
}
type WorkingSubmesh = {
export type WorkingSubmesh = {
pos: BufferAttribute,
texuvs: BufferAttribute,
color: BufferAttribute,
normals: BufferAttribute,
index: Uint16Array,
originalface: Uint16Array,
currentface: number,
matid: number
}
@@ -104,7 +103,7 @@ function jagexOldNormalSpace(normal_x: number, normal_y: number, normal_z: numbe
}
export function parseRT5Model(modelfile: Buffer, source: CacheFileSource) {
const enabletextures = false;
const enabletextures = false;//TODO fix this flag
let modeldata = parse.oldmodels.read(modelfile, source);
let maxy = 0;
@@ -196,7 +195,6 @@ export function parseRT5Model(modelfile: Buffer, source: CacheFileSource) {
color: new BufferAttribute(new Uint8Array(finalvertcount * colstride), colstride, true),
texuvs: new BufferAttribute(new Float32Array(finalvertcount * 2), 2),
index: new Uint16Array(facecount * 3),
originalface: new Uint16Array(facecount),
currentface: 0,
matid: flipEndian16(matid) - 1//TODO fix endianness elsewhere
};

View File

@@ -1,6 +1,12 @@
import { makeImageData } from "../imgutils";
import { Stream } from "../utils";
export type SubImageData = {
x: number,
y: number,
img: ImageData
}
export function parseSubsprite(buf: Buffer, palette: Buffer, width: number, height: number, alpha: boolean, transposed: boolean) {
let imgsize = width * height;
let offset = 0;
@@ -48,7 +54,7 @@ export function parseLegacySprite(metafile: Buffer, buf: Buffer) {
let palettecount = meta.readUByte() - 1;
let palette = meta.readBuffer(palettecount * 3);
let imgs: ImageData[] = [];
let imgs: SubImageData[] = [];
while (!file.eof()) {
let offsetx = meta.readUByte();
let offsety = meta.readUByte();
@@ -57,7 +63,11 @@ export function parseLegacySprite(metafile: Buffer, buf: Buffer) {
let transpose = meta.readUByte() != 0;
let imgbytes = file.readBuffer(width * height);
imgs.push(parseSubsprite(imgbytes, palette, width, height, false, transpose).img);
imgs.push({
x: offsetx,
y: offsety,
img: parseSubsprite(imgbytes, palette, width, height, false, transpose).img
});
}
if (imgs.length != 1) {
@@ -73,7 +83,7 @@ export function parseSprite(buf: Buffer) {
let format = data >> 15;
let count = (data & 0x7FFF);
let spriteimgs: { x: number, y: number, img: ImageData }[] = [];
let spriteimgs: SubImageData[] = [];
if (format == 0) {
let footsize = 7 + 8 * count;
@@ -135,4 +145,58 @@ export function parseSprite(buf: Buffer) {
spriteimgs.push({ x: 0, y: 0, img: makeImageData(imgdata, width, height) });
}
return spriteimgs;
}
export function parseTgaSprite(file: Buffer) {
let str = new Stream(file);
let idlength = str.readUByte();
let colormaptype = str.readUByte();
let datatypecode = str.readUByte();
let colormapoffset = str.readUShort(false);
let colormaplen = str.readUShort(false);
let colormapdepth = str.readUByte();
let originx = str.readUShort(false);
let originy = str.readUShort(false);
let width = str.readUShort(false);
let height = str.readUShort(false);
let bpp = str.readUByte();
let imgdescr = str.readUByte();
str.skip(idlength);//possible text content
if (colormaptype != 1 || bpp != 8) { throw new Error("only palette based uncompressed TGA supported"); }
if (colormapdepth != 24) { throw new Error("only 24bpp rgb TGA supported"); }
if (imgdescr != 0) { throw new Error("no fancy TGA's allowed"); }
let palette = str.readBuffer(colormaplen * 3);
let imgdata = new Uint8ClampedArray(width * height * 4);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let outoffset = x * 4 + y * width * 4;
let pxindex = str.readUByte();
let paletteoffset = pxindex * 3;
//bgr->rgb flip!!
imgdata[outoffset + 0] = palette[paletteoffset + 2];
imgdata[outoffset + 1] = palette[paletteoffset + 1];
imgdata[outoffset + 2] = palette[paletteoffset + 0];
imgdata[outoffset + 3] = 255;
//jagex treats 255,0,255 as transparent
if (imgdata[outoffset + 0] == 255 && imgdata[outoffset + 1] == 0 && imgdata[outoffset + 2] == 255) {
imgdata[outoffset + 0] = 0;
imgdata[outoffset + 1] = 0;
imgdata[outoffset + 2] = 0;
imgdata[outoffset + 3] = 0;
}
}
}
if (!str.eof) {
console.warn("didn't parse TGA sprite to completion");
}
let r: SubImageData = {
x: originx,
y: originy,
img: makeImageData(imgdata, width, height)
};
return r;
}

47
src/cache/autocache.ts vendored Normal file
View File

@@ -0,0 +1,47 @@
import { GameCacheLoader } from "./sqlite";
import { WasmGameCacheLoader } from "./sqlitewasm";
import { ClassicFileSource } from "./classicloader";
import { CLIScriptFS, ScriptFS, UIScriptFS } from "../viewer/scriptsui";
import { CacheOpts } from "../cliparser";
//TODO .dat / .dat2
export async function selectFsCache(fs: ScriptFS, opts?: CacheOpts) {
let filenames = await fs.readDir(".");
let jcachecount = 0;
let datcount = 0;
let dat2count = 0;
let jagcount = 0;
for (let name of filenames) {
let ext = name.match(/\.(\w+)$/);
if (ext?.[1] == "jcache") { jcachecount++; }
if (ext?.[1] == "dat2") { dat2count++; }
if (ext?.[1] == "dat") { datcount++; }
if (ext?.[1] == "jag") { jagcount++; }
}
let maxcount = Math.max(jcachecount, datcount, dat2count, jagcount);
if (maxcount == 0) { throw new Error("no cache files found in selected directory"); }
if (maxcount == jcachecount) {
if (fs instanceof CLIScriptFS) {
return new GameCacheLoader(fs.dir, !!opts?.writable);
} else if (fs instanceof UIScriptFS) {
if (!fs.rootdirhandle) { throw new Error("need fs with hard disk backing"); }
let cache = new WasmGameCacheLoader();
await cache.giveFsDirectory(fs.rootdirhandle);
return cache;
}
}
if (maxcount == datcount) {
//TODO
}
if (maxcount == dat2count) {
//TODO
}
if (maxcount == jagcount) {
let cache = new ClassicFileSource();
await cache.loadFiles(fs);
return cache;
}
throw new Error("couldn't detect cache type");
}

417
src/cache/classicloader.ts vendored Normal file
View File

@@ -0,0 +1,417 @@
import { CacheFileSource, DirectCacheFileSource } from ".";
import { mapsquare_locations } from "../../generated/mapsquare_locations";
import { mapsquare_tiles } from "../../generated/mapsquare_tiles";
import { mapsquare_underlays } from "../../generated/mapsquare_underlays";
import { objects } from "../../generated/objects";
import { ChunkData, classicChunkSize, MapRect, PlacedMesh, TileGrid, TileGridSource, TileProps } from "../3d/mapsquare";
import { EngineCache } from "../3d/modeltothree";
import { HSL2packHSL, ModelModifications, RGB2HSL, Stream } from "../utils";
import { ScriptFS } from "../viewer/scriptsui";
export const classicGroups = {
//same as early rs2
textures: 6,
//classic only
models: 101,
entity: 102,
maps: 103,
land: 104,
filter: 105,
jagex: 106,
media: 107,
sounds: 108,
config: 110
} as const;
//reverse lookup
const classicGroupNames = Object.fromEntries(Object.entries(classicGroups)
.map(([name, id]) => [id, name]));
export class ClassicFileSource extends DirectCacheFileSource {
files: Record<string, { file: Buffer, version: number, mem: boolean }[]> = {};
constructor() {
super(false);
}
async loadFiles(files: ScriptFS) {
this.files = {};
let filenames = await files.readDir(".");
for (let filename of filenames) {
let namematch = filename.match(/^(?<name>[a-zA-Z]+)(?<version>\d+)\.(?<type>jag|mem)$/);
if (!namematch) { continue; }
//TODO support members stuff
if (namematch.groups!.type == "mem") { continue; }
let file = await files.readFileBuffer(filename);
let group = this.files[namematch.groups!.name] ??= [];
group.push({
file,
mem: namematch.groups!.type == "mem",
version: +namematch.groups!.version
});
}
for (let group of Object.values(this.files)) {
//sort highest number+members first
group.sort((a, b) => a.version == b.version ? +a.mem - +b.mem : b.version - a.version);
}
console.log(await classicConfig(this));
}
getNamedFile(name: string) {
let group = this.files[name];
if (!group) { throw new Error(`no cache files for group ${name}`); }
console.log("loading", name, group[0].version);
return group[0].file;
}
getBuildNr() {
return 200;//somewhat high rsc build nr
}
async getFile(major: number, minor: number) {
if (major != 0) {
throw new Error("all files are placed in index 0 for classic caches");
}
let name = classicGroupNames[minor];
if (!name) { throw new Error(`no file for ${major}.${minor}`); }
return this.getNamedFile(name);
}
}
function mapprops<T extends Record<string, any>>(count: number, template: { [key in keyof T]: () => T[key] }) {
let res: T[] = new Array(count).fill(null).map(() => ({} as any));
for (let [key, callback] of Object.entries(template)) {
for (let i = 0; i < count; i++) {
res[i][key as keyof T] = callback();
}
}
return res;
}
export type ClassicConfig = Awaited<ReturnType<typeof classicConfig>>;
export async function classicConfig(source: CacheFileSource) {
let stringsbuf = (await source.findSubfileByName(0, classicGroups.config, "STRING.DAT"))!.buffer;
let intbuf = (await source.findSubfileByName(0, classicGroups.config, "INTEGER.DAT"))!.buffer;
let stringcursor = 0;
let getstring = () => {
let start = stringcursor;
while (stringcursor < stringsbuf.length && stringsbuf[stringcursor++] != 0);
return stringsbuf.toString("latin1", start, stringcursor - 1);
}
let intcursor = 0;
let getuint = () => { let r = intbuf.readUint32BE(intcursor); intcursor += 4; return r; }
let getint = () => { let r = intbuf.readInt32BE(intcursor); intcursor += 4; return r; }
let getushort = () => { let r = intbuf.readUint16BE(intcursor); intcursor += 2; return r; }
let getubyte = () => intbuf.readUint8(intcursor++);
let getbool = () => !!getubyte();
let items = mapprops(getushort(), {
name: getstring,
examine: getstring,
command: getstring,
sprite: getushort,
price: getuint,
stackable: getbool,
special: getbool,
equip: getushort,
color: getuint,
untradeable: getbool,
member: getbool
});
let npcs = mapprops(getushort(), {
name: getstring,
examine: getstring,
command: getstring,
attack: getubyte,
strength: getubyte,
hits: getubyte,
defence: getubyte,
hostility: getubyte,
anims: () => new Array(12).fill(null).map(getubyte),
haircolor: getuint,
topcolor: getuint,
bottomcolor: getuint,
skincolor: getuint,
width: getushort,
height: getushort,
walkmodel: getubyte,
combatmodel: getubyte,
combatanim: getubyte
});
let textures = mapprops(getushort(), {
name: getstring,
subname: getstring
});
let anims = mapprops(getushort(), {
name: getstring,
color: getuint,
gendermodel: getubyte,
has_a: getbool,
has_f: getbool,
unk: getubyte
});
let objects = mapprops(getushort(), {
name: getstring,
examine: getstring,
command_0: getstring,
command_1: getstring,
model: getstring,
xsize: getubyte,
zsize: getubyte,
type: getubyte,
item_height: getubyte
});
let wallobjects = mapprops(getushort(), {
name: getstring,
examine: getstring,
command_0: getstring,
command_1: getstring,
height: getushort,
frontdecor: getint,
backdecor: getint,
blocked: getbool,
invisible: getbool
});
let roofs = mapprops(getushort(), {
height: getubyte,
texture: getubyte
});
let tiles = mapprops(getushort(), {
decor: getuint,
type: getubyte,
blocked: getbool
});
let projectile = mapprops(getushort(), {
//empty
});
let spells = mapprops(getushort(), {
name: getstring,
examine: getstring,
level: getubyte,
num_runes: getubyte,
type: getubyte,
runetypes: () => new Array(getubyte()).fill(null).map(getushort),
runeamounts: () => new Array(getubyte()).fill(null).map(getubyte)
});
let prayers = mapprops(getushort(), {
name: getstring,
examine: getstring,
level: getubyte,
drain: getubyte
});
console.log(`decoded rsc config, ints ${intcursor}/${intbuf.length}, strings ${stringcursor}/${stringsbuf.length}`);
return { items, npcs, textures, anims, objects, wallobjects, roofs, tiles, projectile, spells, prayers }
}
const chunkSize = 48;
const chunkTileCount = chunkSize * chunkSize;
export async function getClassicMapData(engine: EngineCache, rs2x: number, rs2z: number, level: number) {
let chunkx = 100 - rs2x;
let chunkz = 100 - rs2z;
let chunknum = `${level}${chunkx.toString().padStart(2, "0")}${chunkz.toString().padStart(2, "0")}`;
let datfile = await engine.findSubfileByName(0, classicGroups.maps, `M${chunknum}.DAT`);
let locfile = await engine.findSubfileByName(0, classicGroups.maps, `M${chunknum}.LOC`);
let heifile = await engine.findSubfileByName(0, classicGroups.land, `M${chunknum}.HEI`);
if (!heifile) { return null; }
if (!heifile) { throw new Error("need hei"); }
let mappedtiles: mapsquare_tiles["tiles"] = new Array(chunkTileCount);
let convertTileIndex = (i: number) => {
const last = classicChunkSize - 1;
let x = last - (i / classicChunkSize | 0);
let z = last - i % classicChunkSize;
return { index: x * classicChunkSize + z, x, z };
}
let hei = new Stream(heifile.buffer);
//based on https://github.com/2003scape/rsc-landscape/blob/master/src/sector.js#L138
let lastVal = 0;
let terrainHeight: number[] = [];
let terrainColor: number[] = [];
for (let tile = 0; tile < chunkTileCount;) {
let val = hei.readUByte();
if (val < 128) {
terrainHeight[tile++] = val & 0xff;
lastVal = val;
}
if (val >= 128) {
for (let i = 0; i < val - 128; i++) {
terrainHeight[tile++] = lastVal & 0xff;
}
}
}
for (let tile = 0; tile < chunkTileCount;) {
let val = hei.readUByte();
if (val < 128) {
terrainColor[tile++] = val & 0xff;
lastVal = val;
}
if (val >= 128) {
for (let i = 0; i < val - 128; i++) {
terrainColor[tile++] = lastVal & 0xff;
}
}
}
let lastHeight = 64;
let lastColor = 35;
for (let tileY = 0; tileY < chunkSize; tileY++) {
for (let tileX = 0; tileX < chunkSize; tileX++) {
const index = tileX * chunkSize + tileY;
lastHeight = terrainHeight[index] + (lastHeight & 0x7f);
let height = (lastHeight * 2) & 0xff;
lastColor = terrainColor[index] + lastColor & 0x7f;
terrainColor[index] = (lastColor * 2) & 0xff;
mappedtiles[convertTileIndex(index).index] = {
flags: 0,
height: height / 4,
overlay: null,
settings: null,
shape: null,
underlay: lastColor + 1 //1 offset as per rs2 spec
}
}
}
if (!hei.eof()) {
throw new Error("unexpected height file length");
}
let locs: mapsquare_locations["locations"] = [];
if (datfile) {
let dat = new Stream(datfile.buffer);
let walls = dat.readBuffer(chunkTileCount * 4);
for (let i = 0; i < chunkTileCount; i++) {
let hor = walls[chunkTileCount * 0 + i];
let ver = walls[chunkTileCount * 1 + i];
let diag1 = walls[chunkTileCount * 2 + i];
let diag2 = walls[chunkTileCount * 3 + i];
let pos = convertTileIndex(i);
if (hor) {
locs.push({
id: hor - 1,
uses: [{ x: pos.x - 1, y: pos.z - 1, plane: level, rotation: 2, type: 0, extra: null }]
});
}
if (ver) {
locs.push({
id: ver - 1,
uses: [{ x: pos.x - 1, y: pos.z - 1, plane: level, rotation: 1, type: 0, extra: null }]
});
}
if (diag1) {
locs.push({
id: diag1 - 1,
uses: [{ x: pos.x - 1, y: pos.z - 1, plane: level, rotation: 0, type: 9, extra: null }]
});
}
if (diag2) {
locs.push({
id: diag2 - 1,
uses: [{ x: pos.x - 1, y: pos.z - 1, plane: level, rotation: 1, type: 9, extra: null }]
});
}
}
}
let rect: MapRect = { x: rs2x * chunkSize, z: rs2z * chunkSize, xsize: chunkSize, zsize: chunkSize };
return {
rect,
tiles: mappedtiles,
locs,
levels: 1
};
}
function intToMods(int: number) {
if (int == 12345678) {
//TODO should be transparent/hidden
return { material: 0, color: HSL2packHSL(...RGB2HSL(0, 0, 0)), invisible: true };
} else if (int < 0) {
let col = -int - 1;
let r = (col >> 10) & 0x1f;
let g = (col >> 5) & 0x1f;
let b = (col >> 0) & 0x1f;
return { material: 0, color: HSL2packHSL(...RGB2HSL(r, g, b)), invisible: false };
} else {
return { material: int, color: 0, invisible: false };
}
}
export function classicIntToModelMods(int1: number, int2: number) {
let mods1 = intToMods(int1);
let mods2 = intToMods(int2);
let r: objects = {
color_replacements: [
[0, mods1.color],
[1, mods2.color]
],
material_replacements: [
[0, mods1.material + 1],
[1, mods2.material + 1]
]
};
if (mods1.invisible || mods2.invisible) {
r.models = null;
}
return r;
}
export function classicUnderlays() {
let underlays: mapsquare_underlays[] = [];
for (let i = 0; i < 64; i += 1) {
const r = 255 - i * 4;
const g = 255 - ((i * 1.75) | 0);
const b = 255 - i * 4;
underlays.push({ color: [r, g, b] });
}
for (let i = 0; i < 64; i += 1) {
const r = i * 3;
const g = 144;
const b = 0;
underlays.push({ color: [r, g, b] });
}
for (let i = 0; i < 64; i += 1) {
const r = 192 - ((i * 1.5) | 0);
const g = 144 - ((i * 1.5) | 0);
const b = 0;
underlays.push({ color: [r, g, b] });
}
for (let l = 0; l < 64; l++) {
const r = 96 - ((l * 1.5) | 0);
const g = 48 + ((l * 1.5) | 0);
const b = 0;
underlays.push({ color: [r, g, b] });
}
return underlays;
}

4
src/cache/index.ts vendored
View File

@@ -1,5 +1,5 @@
import { crc32, crc32_backward, forge_crcbytes } from "../libs/crc32util";
import { cacheConfigPages, cacheMajors, lastLegacyBuildnr, latestBuildNumber } from "../constants";
import { cacheConfigPages, cacheMajors, lastClassicBuildnr, lastLegacyBuildnr, latestBuildNumber } from "../constants";
import { parse } from "../opdecoder";
import { cacheFilenameHash } from "../utils";
import { parseLegacyArchive } from "./legacycache";
@@ -342,7 +342,7 @@ export abstract class DirectCacheFileSource extends CacheFileSource {
async getFileArchive(meta: CacheIndex) {
let file = await this.getFile(meta.major, meta.minor, meta.crc);
if (this.getBuildNr() <= lastLegacyBuildnr) {
return parseLegacyArchive(file, meta.major, meta.minor);
return parseLegacyArchive(file, meta.major, this.getBuildNr() <= lastClassicBuildnr);
} else {
return unpackBufferArchive(file, meta.subindices, meta.subnames);
}

View File

@@ -1,7 +1,9 @@
import { SubFile } from "./index";
import { CacheFileSource, SubFile } from "./index";
import { EngineCache } from "../3d/modeltothree";
import { cacheFilenameHash, Stream } from "../utils";
import { legacybz2 } from "./compression";
import { parseLegacySprite, parseTgaSprite } from "../3d/sprite";
import { makeImageData } from "../imgutils";
export const legacyMajors = {
data: 0,//mostly index 2 in dat2
@@ -9,7 +11,7 @@ export const legacyMajors = {
oldframebases: 2,//index 0 in dat2
//3? has 636 files sprites?
map: 4// index 5 in dat2
}
} as const;
export const legacyGroups = {
//1 login
@@ -18,11 +20,11 @@ export const legacyGroups = {
sprites: 4,
index: 5,
textures: 6
}
} as const;
//pre-2006 caches
export function parseLegacyArchive(file: Buffer, major: number, minor: number): SubFile[] {
if (major != 0) {
export function parseLegacyArchive(file: Buffer, major: number, isclassic: boolean): SubFile[] {
if (!isclassic && major != 0) {
return [{
buffer: file,
fileid: 0,
@@ -32,10 +34,11 @@ export function parseLegacyArchive(file: Buffer, major: number, minor: number):
}];
}
let stream = new Stream(file);
let compressedlen = stream.readTribyte();
let len = stream.readTribyte();
let compressedlen = stream.readTribyte();
if (compressedlen != len) {
stream = new Stream(legacybz2(stream.readBuffer()));
if (stream.bytesLeft() != len) { throw new Error("decompress failed"); }
}
let files: SubFile[] = [];
@@ -49,6 +52,7 @@ export function parseLegacyArchive(file: Buffer, major: number, minor: number):
let subfile = filestream.readBuffer(subcomplen);
if (subdecomplen != subcomplen) {
subfile = legacybz2(subfile);
if (subfile.length != subdecomplen) { throw new Error("decompress failed"); }
}
files.push({
fileid: i,
@@ -60,7 +64,6 @@ export function parseLegacyArchive(file: Buffer, major: number, minor: number):
}
return files;
}
globalThis.parseLegacyArchive = parseLegacyArchive;
type Mapinfo = Map<number, { map: number, loc: number, crc: number, version: number }>;
type LegacyKeys = "items" | "objects" | "overlays" | "underlays" | "npcs" | "spotanims";
@@ -128,3 +131,50 @@ function readLegacySubGroup(group: SubFile[], groupname: string) {
}
return files;
}
async function getLegacyImage(source: CacheFileSource, name: string, usetga) {
let filename = `${name}.${usetga ? "tga" : "dat"}`;
let spritefile = await source.findSubfileByName(legacyMajors.data, legacyGroups.textures, filename);
if (usetga) {
return parseTgaSprite(spritefile!.buffer);
} else {
return parseLegacyImageFile(source, spritefile!.buffer);
}
}
export async function parseLegacyImageFile(source: CacheFileSource, buf: Buffer) {
let metafile = await source.findSubfileByName(legacyMajors.data, legacyGroups.textures, "INDEX.DAT");
return parseLegacySprite(metafile!.buffer, buf);
}
export async function combineLegacyTexture(engine: EngineCache, name: string, subname: string, useTga: boolean) {
let img = await getLegacyImage(engine, name, useTga);
if (!subname) {
return img;
}
let subimg = await getLegacyImage(engine, subname, useTga);
if (subimg.img.width + subimg.x > img.img.width || subimg.img.height + subimg.y > img.img.height) {
throw new Error("tried to overlay image outside of dest bounds");
}
let combined = makeImageData(img.img.data.slice(), img.img.width, img.img.height);
for (let srcy = 0; srcy < subimg.img.height; srcy++) {
for (let srcx = 0; srcx < subimg.img.width; srcx++) {
let srci = (srcy * subimg.img.width + srcx) * 4;
let dsti = ((srcy + subimg.y) * img.img.width + (srcx + subimg.x)) * 4;
let subr = subimg.img.data[srci + 0];
let subg = subimg.img.data[srci + 1];
let subb = subimg.img.data[srci + 2];
let suba = subimg.img.data[srci + 3];
let forcetrans = (subr == 0 && subg == 255 && subb == 0 && suba == 255);
let usesub = (suba == 255);
combined.data[dsti + 0] = (forcetrans ? 0 : usesub ? subr : img.img.data[dsti + 0]);
combined.data[dsti + 1] = (forcetrans ? 0 : usesub ? subg : img.img.data[dsti + 1]);
combined.data[dsti + 2] = (forcetrans ? 0 : usesub ? subb : img.img.data[dsti + 2]);
combined.data[dsti + 3] = (forcetrans ? 0 : usesub ? suba : img.img.data[dsti + 3]);
}
}
return { x: img.x, y: img.y, img: combined };
}

View File

@@ -76,6 +76,19 @@ export class WasmGameCacheLoader extends cache.CacheFileSource {
Object.assign(this.dbfiles, blobs);
this.sendWorker({ type: "blobs", blobs });
}
async giveFsDirectory(dir: FileSystemDirectoryHandle) {
let files: Record<string, Blob> = {};
if (await dir.queryPermission() != "granted") {
console.log("tried to open cache without permission");
return null;
}
// await source.handle.requestPermission();
for await (let file of dir.values()) {
if (file.kind == "file") {
files[file.name] = await file.getFile();
}
}
}
async getFile(major: number, minor: number, crc?: number) {
if (major == cacheMajors.index) { return this.getIndexFile(minor); }

339
src/cache/updater.ts vendored
View File

@@ -1,339 +0,0 @@
import { CacheDownloader } from "./downloader";
import * as fs from "fs";
import * as sqlite3 from "sqlite3";//.verbose();
import { CacheIndex, CacheFileSource, unpackBufferArchive, indexBufferToObject } from "./index";
import { ParsedTexture } from "../3d/textures";
import { cacheMajors } from "../constants";
var cachedir: string;
var progressDelay = 20;
type DatabaseInst = sqlite3.Database & {
statements: {
insert: sqlite3.Statement,
update: sqlite3.Statement
}
}
export type SaveFileArguments = {
singular: string,
plural: string,
folder: string,
commitFrequency?: number,
fileExtension: string,
bufferCallback(staticArguments: SaveFileArguments, recordIndex: number, buffer: Buffer): void,
//converts an fs file back to cache file format
hydrateFsFile?(staticArguments: SaveFileArguments, recordIndex: number, buffer: Buffer): Buffer
}
type CacheUpdateHook = {
callback: (downloader: CacheDownloader, index: CacheIndex & { isNew: boolean }, db: DatabaseInst, db_state: DatabaseState, staticArguments: SaveFileArguments) => Promise<void>;
staticArguments: SaveFileArguments
}
type DatabaseState = { [major: number]: { [minor: number]: { version: number, crc: number } } };
function commit(db: DatabaseInst, major: number, minor: number, version: number, crc: number, isNew?: boolean) {
if (isNew) db.statements.insert.run(major, minor, version, crc);
else db.statements.update.run(version, crc, major, minor);
}
//the pogress event was causing about 50% of all cpu usage during update!!
let progressDebounceInterval: any = null;
let queuedProcessMsg: any = null;
function progress(message: string, value: number | null = null, max: number | null = null) {
let msg: any;
if (value !== null || max !== null) msg = { "message": message, "value": value, "max": max };
else msg = { "message": message };
if (!progressDebounceInterval) {
progressDebounceInterval = setInterval(progressDebounceTick, progressDelay);
events.emit("update-progress", msg);
} else {
queuedProcessMsg = msg;
}
}
function progressDebounceTick() {
if (queuedProcessMsg) {
events.emit("update-progress", queuedProcessMsg);
queuedProcessMsg = null;
} else {
clearInterval(progressDebounceInterval);
progressDebounceInterval = 0;
}
}
function prepareDatabase() {
var db: DatabaseInst = new sqlite3.Database(`${cachedir}/db.sql`, sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE, (e) => { if (e) throw e; }) as any;
return new Promise<{ db: DatabaseInst, db_state: DatabaseState }>((resolve, reject) => {
// Create index if it's missing
db.get("SELECT name FROM sqlite_master WHERE type='table' AND name='index';", (e, row) => {
if (e) throw e;
db.serialize(() => {
if (!row)
db.run("CREATE TABLE 'index' (" +
"major INT NOT NULL, " +
"minor INT NOT NULL, " +
"version TIMESTAMP(0) NOT NULL, " +
"crc INT NOT NULL, " +
"PRIMARY KEY (major, minor));", (e) => { if (e) throw e; });
// Create our db_state and resolve the promise
db.all("SELECT major, minor, version, crc FROM 'index'", (e, rows) => {
if (e) throw e;
var db_state: DatabaseState = {};
for (var i = 0; i < rows.length; ++i) {
var row = rows[i];
if (!db_state[row.major]) db_state[row.major] = {};
db_state[row.major][row.minor] = { "version": row.version, "crc": row.crc };
}
db.statements = {
insert: db.prepare("INSERT INTO 'index' (major, minor, version, crc) VALUES (?, ?, ?, ?);"),
update: db.prepare("UPDATE 'index' SET version=?, crc=? WHERE major=? AND minor=?;")
}
resolve({ db, db_state });
});
});
});
});
}
function prepareFolder(dir: string) {
if (!fs.existsSync(`${cachedir}/${dir}`)) {
fs.mkdirSync(`${cachedir}/${dir}`);
return true;
}
return false;
}
function findMissingIndices<T extends CacheIndex>(db_state: DatabaseState, indices: T[]) {
var pile: (T & { isNew: boolean })[] = [];
// Loop through our indices and check the database for updates
for (var i in indices) {
var index = indices[i];
var row = (db_state[index.major] ? db_state[index.major] : {})[index.minor];
// If row doesn't exist, or the version or crc are incorrect, place it into the pile
if (row == null || row.version != index.version || row.crc != index.crc) {
pile.push({ ...index, isNew: (row == null) });
}
}
return pile;
}
async function updateRecords(downloader: CacheDownloader, index: CacheIndex & { isNew: boolean }, db: DatabaseInst, db_state: DatabaseState, staticArguments: SaveFileArguments) {
var singular = staticArguments.singular;
var plural = staticArguments.plural;
var folder = staticArguments.folder;
var commitFrequency = (staticArguments.commitFrequency || 128);
progress(`Finding ${singular} updates...`);
prepareFolder(folder);
var file = await downloader.getFile(index.major, index.minor, index.crc);
var allRecordIndices = indexBufferToObject(index.minor, file, downloader);
var recordIndices = findMissingIndices(db_state, allRecordIndices);
var newRecords = 0;
var updatedRecords = 0;
var n = 0;
for (var i = 0; i < recordIndices.length; ++i) {
if ((n % commitFrequency) == 0) db.run("BEGIN TRANSACTION");
var recordIndex = recordIndices[i];
var buffer = await downloader.getFile(recordIndex.major, recordIndex.minor, recordIndex.crc);
var subbuffers = unpackBufferArchive(buffer, recordIndex.subindices, recordIndex.subnames);
for (var j = 0; j < recordIndex.subindices.length; ++j, ++n) {
var recordSubindex = recordIndex.subindices[j];
progress(`Downloading ${singular} ${recordSubindex}`, i + (j / recordIndex.subindices.length), recordIndices.length);
staticArguments.bufferCallback(staticArguments, recordSubindex, subbuffers[j].buffer);
if (recordIndex.isNew) newRecords++; else updatedRecords++; // Just verbose so the CLI looks professional af
}
// Add it to the index so we know we've processed it
commit(db, recordIndex.major, recordIndex.minor, recordIndex.version, recordIndex.crc, recordIndex.isNew);
if ((n % commitFrequency) == commitFrequency - 1) await new Promise<void>((resolve, reject) => { db.run("COMMIT", () => { resolve(); }); });
}
// v
//if ((n % commitFrequency) != commitFrequency - 1) await new Promise((resolve, reject) => { db.run("COMMIT", () => { resolve(); }); }); // If we have any uncommitted transactions
process.stdout.write((`\rDownloaded ${newRecords} new ${plural}, updated ${updatedRecords} ${plural}`).padEnd(54, " ") + "\n");
// Finished updating models, commit to the database
commit(db, index.major, index.minor, index.version, index.crc, index.isNew);
await new Promise<void>((resolve, reject) => { db.run("COMMIT", () => { resolve(); }); });
}
var aysncWriteCount = 0;
function dumpBufferToFile(staticArguments: SaveFileArguments, recordIndex: number, buffer: Buffer) {
let filename = `${cachedir}/${staticArguments.folder}/${recordIndex}.${staticArguments.fileExtension}`;
//can actually overload node's async file writer...
if (aysncWriteCount < 100) {
aysncWriteCount++;
fs.promises.writeFile(filename, buffer).finally(() => aysncWriteCount--);
} else {
fs.writeFileSync(filename, buffer);
}
}
export const updateCallbacks: { [major: number]: { [minor: number]: CacheUpdateHook } } = {
"255": {
"16": {
"callback": updateRecords, "staticArguments": {
"singular": "object", "plural": "objects", "folder": "objects", "commitFrequency": 1,
"fileExtension": "rsobj",
"bufferCallback": dumpBufferToFile
}
},
"18": {
"callback": updateRecords, "staticArguments": {
"singular": "NPC", "plural": "NPCs", "folder": "npcs", "commitFrequency": 1,
"fileExtension": "rsnpc",
"bufferCallback": dumpBufferToFile
}
},
"19": {
"callback": updateRecords, "staticArguments": {
"singular": "item", "plural": "items", "folder": "items", "commitFrequency": 1,
"fileExtension": "rsitem",
"bufferCallback": dumpBufferToFile
}
},
"26": {
"callback": updateRecords, "staticArguments": {
"singular": "material", "plural": "materials", "folder": "materials",
"fileExtension": "jmat",
"bufferCallback": dumpBufferToFile
}
},
"47": {
"callback": updateRecords, "staticArguments": {
"singular": "model", "plural": "models", "folder": "models",
"fileExtension": "ob3",
"bufferCallback": dumpBufferToFile
}
},
"53": {
"callback": updateRecords, "staticArguments": {
"singular": "texture", "plural": "textures", "folder": "textures",
"fileExtension": "png",
"bufferCallback": (staticArguments, record, buffer) => {
let texture = new ParsedTexture(buffer, true);//TODO destructive loss of alpha here
if (texture.type != "png") { throw new Error("png image expected"); }
//TODO actually extract all subimgs/mipmaps
fs.writeFile(`${cachedir}/${staticArguments.folder}/${record}.png`, texture.imagefiles[0], () => { });
},
hydrateFsFile: (staticarguments, record, buffer) => {
//TODO read all subimgs/mipmaps
// let texture = ParsedTexture.fromFile([buffer]);
// return texture.fullfile;
return null!;//TODO remove
}
}
}
}
};
var events = {
"emit": (event: string, ...args: any[]) => {
if (event in events) {
var eventList = events[event];
for (var i = 0; i < eventList.length; ++i)
eventList[i](args);
}
},
"add": (event: string, callback) => {
if (!(event in events)) events[event] = [];
events[event].push(callback);
}
};
export async function run(cachedirarg: string, progressDebounceDelay?: number) {
if (progressDebounceDelay) { progressDelay = progressDebounceDelay; }
cachedir = cachedirarg;
fs.mkdirSync(cachedir, { recursive: true });
progress("Preparing database...");
var { db, db_state } = await prepareDatabase();
progress("Connecting to servers...");
var downloader = new CacheDownloader();
progress("Downloading index...");
let indices = downloader.getCacheIndex(cacheMajors.index);
progress("Finding updates...");
for (let i in indices) {
if (!(i.toString() in updateCallbacks["255"])) continue;
var pile = findMissingIndices(db_state, [indices[i]]);
for (let i = 0; i < pile.length; ++i) {
var index = pile[i];
var major = index.major.toString();
var minor = index.minor.toString();
if (major in updateCallbacks) {
if (minor in updateCallbacks[major]) {
let hook = updateCallbacks[major][minor] as CacheUpdateHook;
await hook.callback(downloader, index, db, db_state, hook.staticArguments);
}
}
}
}
downloader.close();
//prevent extra progress events from firing after completion
if (progressDebounceInterval) {
clearInterval(progressDebounceInterval);
progressDebounceInterval = 0;
}
queuedProcessMsg = null;
//not sure if these are necessary
db.statements.insert.finalize((e) => { if (e) throw e; });
db.statements.update.finalize((e) => { if (e) throw e; });
db.close();
}
export function on(event: string, callback) {
events.add(event, callback);
}
export const fileSource = new class extends CacheFileSource {
getFile(major, minor) {
//the original (packed) files are lost, would have to rebuild it completely
throw new Error("not implemented");
return null as any;//return to make typescript shut up
}
getCacheIndex(major) {
throw new Error("not implemented");
return null as any;
}
getFileArchive(index) {
//the updater script already places the subfiles in seperator files
//would have to find out which subfile belong to which minor from the sqlite database
throw new Error("not implemented");
return null as any;
}
//TODO not sure if this is still correct
async getFileById(major, fileid) {
let meta = updateCallbacks[255][major].staticArguments as SaveFileArguments;
if (!meta) { throw new Error("this file source does not have this file major index"); }
let filename = `${cachedir}/${meta.folder}/${fileid}.${meta.fileExtension}`;
let file = await fs.promises.readFile(filename);
if (meta.hydrateFsFile) {
file = meta.hydrateFsFile(meta, fileid, file);
}
return file;
}
};

View File

@@ -3,32 +3,22 @@ import { ArgParser } from "cmd-ts/dist/cjs/argparser";
import { CacheFileSource, CallbackCacheLoader } from "./cache";
import { CacheDownloader } from "./cache/downloader";
import * as updater from "./cache/updater";
import { GameCacheLoader } from "./cache/sqlite";
import { RawFileLoader } from "./cache/rawfiles";
import { Openrs2CacheSource } from "./cache/openrs2loader";
import type { MapRect } from "./3d/mapsquare";
import { stringToFileRange, stringToMapArea } from "./utils";
import { selectFsCache } from "./cache/autocache";
import { CLIScriptFS } from "./viewer/scriptsui";
export type Rect = { x: number, y: number, width: number, height: number };
let loadingIndicator = {
interval: 1000,
start: async () => { },
progress: (d: any) => { console.log(`${d.message}${d.max ? ` ${d.value}/${d.max}` : ""}`) },
done: async () => { console.log("done"); }
}
//expose here so we can override it for ui
export function setLoadingIndicator(ind: typeof loadingIndicator) {
loadingIndicator = ind;
}
export type CacheOpts = { writable?: boolean } | undefined;
function cacheSourceFromString(str: string) {
let [mode, ...argparts] = str.split(":",);
let arg = argparts.join(":");
return async (opts?: { writable?: boolean }) => {
return async (opts: CacheOpts) => {
switch (mode) {
case "live":
return new CacheDownloader();
@@ -38,14 +28,9 @@ function cacheSourceFromString(str: string) {
throw new Error("the 'global' cache source requires a callback function with name <arg> to be exposed on the global scope");
}
return new CallbackCacheLoader(fn, false);
case "local":
updater.on("update-progress", loadingIndicator.progress.bind(loadingIndicator));
await loadingIndicator.start();
await updater.run(arg || "cache", loadingIndicator.interval);
await loadingIndicator.done();
return updater.fileSource;
case "cache":
return new GameCacheLoader(arg, !!opts?.writable);
let fs = new CLIScriptFS(arg);
return selectFsCache(fs, opts);
case "files":
return new RawFileLoader(arg, 0);
case "openrs":

View File

@@ -71,4 +71,7 @@ export const cacheConfigPages = {
spotanim_old: 13
}
export const lastLegacyBuildnr = 377;
export const lastLegacyBuildnr = 377;
//unclear if there ended up beign overlap with (public) rs2 since this was 12 years after rs2 release
//first known rs2 is 254
export const lastClassicBuildnr = 235;

View File

@@ -1,13 +1,11 @@
import { disposeThreeTree, ThreeJsRenderer } from "../viewer/threejsrender";
import { ParsemapOpts, TileGrid, ChunkData, ChunkModelData, MapRect, worldStride, CombinedTileGrid, squareSize } from "../3d/mapsquare";
import { ParsemapOpts, MapRect, worldStride, CombinedTileGrid } from "../3d/mapsquare";
import { CacheFileSource } from "../cache";
import type { Material, Object3D } from "three";
import { svgfloor } from "./svgrender";
import { cacheMajors } from "../constants";
import { parse } from "../opdecoder";
import { canvasToImageFile, flipImage, isImageEqual, pixelsToImageFile } from "../imgutils";
import * as THREE from "three";
import { EngineCache, ThreejsSceneCache } from "../3d/modeltothree";
import { crc32addInt, DependencyGraph, getDependencies } from "../scripts/dependencies";
import { CLIScriptOutput, ScriptOutput } from "../viewer/scriptsui";
@@ -658,10 +656,10 @@ export async function renderMapsquare(engine: EngineCache, config: MapRender, re
let grid = new CombinedTileGrid(chunks.map(ch => ({
src: ch.chunk.loaded!.grid,
rect: {
x: ch.chunk.rect.x * squareSize,
z: ch.chunk.rect.z * squareSize,
xsize: ch.chunk.rect.xsize * squareSize,
zsize: ch.chunk.rect.zsize * squareSize,
x: ch.chunk.rect.x * ch.chunk.loaded!.chunkSize,
z: ch.chunk.rect.z * ch.chunk.loaded!.chunkSize,
xsize: ch.chunk.rect.xsize * ch.chunk.loaded!.chunkSize,
zsize: ch.chunk.rect.zsize * ch.chunk.loaded!.chunkSize,
}
})));
let locs = chunks.flatMap(ch => ch.chunk.loaded!.chunks.flatMap(q => q.locs));
@@ -696,8 +694,8 @@ export async function renderMapsquare(engine: EngineCache, config: MapRender, re
hash: depcrc,
async run() {
let chunks = await renderer.setArea(x, z, 1, 1);
let { grid, modeldata } = await chunks[0].chunk.model;
let res = await chunkSummary(engine, grid, modeldata, { x: x * squareSize, z: z * squareSize, xsize: squareSize, zsize: squareSize });
let { grid, modeldata, chunkSize } = await chunks[0].chunk.model;
let res = await chunkSummary(engine, grid, modeldata, { x: x * chunkSize, z: z * chunkSize, xsize: chunkSize, zsize: chunkSize });
let textual = prettyJson(res, { indent: "\t" });
return { file: () => Promise.resolve(Buffer.from(textual, "utf8")) };
}
@@ -718,10 +716,10 @@ export async function renderMapsquare(engine: EngineCache, config: MapRender, re
let grid = new CombinedTileGrid(chunks.map(ch => ({
src: ch.chunk.loaded!.grid,
rect: {
x: ch.chunk.rect.x * squareSize,
z: ch.chunk.rect.z * squareSize,
xsize: ch.chunk.rect.xsize * squareSize,
zsize: ch.chunk.rect.zsize * squareSize,
x: ch.chunk.rect.x * ch.chunk.loaded!.chunkSize,
z: ch.chunk.rect.z * ch.chunk.loaded!.chunkSize,
xsize: ch.chunk.rect.xsize * ch.chunk.loaded!.chunkSize,
zsize: ch.chunk.rect.zsize * ch.chunk.loaded!.chunkSize,
}
})));
let file = drawCollision(grid, area, thiscnf.level, thiscnf.pxpersquare, 1);

View File

@@ -1,50 +0,0 @@
declare var L: typeof import("leaflet");
var chunkoffsetx = 16;
var chunkoffsetz = 48;
var mapsizex = 100;
var mapsizez = 200;
var chunksize = 64;
var lowestmip = 10;
var pxpertile = 32;
var mip0pxpertile = 1;
var crs = L.CRS.Simple;
//@ts-ignore
crs.transformation = L.transformation(
mip0pxpertile, chunkoffsetx * mip0pxpertile,
-mip0pxpertile, (mapsizez * chunksize + chunkoffsetz) * mip0pxpertile
);
var mymap = L.map('map', {
crs: crs,
minZoom: -5,
maxZoom: 7,
//maxBounds: [[0, 0], [mapsizez * chunksize * mip0pxpertile, mapsizex * chunksize * mip0pxpertile]]
});
//@ts-ignore
mymap.on("click", e => console.log(e.latlen));
const tilebase = "../../cache/map5";
L.tileLayer(tilebase + "/full/{z}/{x}-{y}.png", {
attribution: 'Skillbert',
tileSize: 512,
maxNativeZoom: 5,
minZoom: -5
}).addTo(mymap);
let layers: Record<string, any> = {};
for (let floor = 2; floor >=0; floor--) {
layers["indoors-" + floor] = L.tileLayer(tilebase + "/indoors-" + floor + "/{z}/{x}-{y}.png", {
attribution: 'Skillbert',
tileSize: 512,
maxNativeZoom: 5,
minZoom: -5
});
}
mymap.setView([3200, 3000], 4);
L.control.layers(undefined, layers).addTo(mymap);
// export { };

View File

@@ -1,4 +1,4 @@
import { TileGridSource, ChunkData, squareLevels, squareSize, TileVertex, WorldLocation, MapRect, TileProps } from "../3d/mapsquare";
import { TileGridSource, squareLevels, WorldLocation, MapRect, TileProps } from "../3d/mapsquare";
import { parseSprite } from "../3d/sprite";
import { cacheMajors } from "../constants";
import { EngineCache } from "../3d/modeltothree";

View File

@@ -0,0 +1,21 @@
["struct",
["vertexcount","ushort"],
["facecount","ushort"],
["xpos",["array",["ref","vertexcount"],"short"]],
["ypos",["array",["ref","vertexcount"],"short"]],
["zpos",["array",["ref","vertexcount"],"short"]],
["faces",["chunkedarray",["ref","facecount"],[
["$nverts","ubyte"]
],[
["color","ushort"]
],[
["backcolor","ushort"]
],[
["intensity","ubyte"]
],[
["verts",["array",["ref","$nverts"],["match",["ref","vertexcount"],{
"<256":"ubyte",
"other":"ushort"
}]]]
]]]
]

View File

@@ -100,6 +100,7 @@ function allParsers() {
animgroupConfigs: FileParser.fromJson<import("../generated/animgroupconfigs").animgroupconfigs>(require("./opcodes/animgroupconfigs.jsonc")),
models: FileParser.fromJson<import("../generated/models").models>(require("./opcodes/models.jsonc")),
oldmodels: FileParser.fromJson<import("../generated/oldmodels").oldmodels>(require("./opcodes/oldmodels.jsonc")),
classicmodels: FileParser.fromJson<import("../generated/classicmodels").classicmodels>(require("./opcodes/classicmodels.jsonc")),
spotAnims: FileParser.fromJson<import("../generated/spotanims").spotanims>(require("./opcodes/spotanims.json")),
rootCacheIndex: FileParser.fromJson<import("../generated/rootcacheindex").rootcacheindex>(require("./opcodes/rootcacheindex.jsonc")),
skeletalAnim: FileParser.fromJson<import("../generated/skeletalanim").skeletalanim>(require("./opcodes/skeletalanim.jsonc")),

View File

@@ -3,7 +3,7 @@
import { cacheConfigPages, cacheMajors, cacheMapFiles } from "../constants";
import { parse } from "../opdecoder";
import { archiveToFileId, CacheFileSource } from "../cache";
import { defaultMorphId, squareSize } from "../3d/mapsquare";
import { defaultMorphId } from "../3d/mapsquare";
import { convertMaterial } from "../3d/jmat";
import { crc32 } from "../libs/crc32util";
import { arrayEnum } from "../utils";

View File

@@ -1,18 +1,19 @@
import { cacheConfigPages, cacheMajors, cacheMapFiles, lastLegacyBuildnr } from "../constants";
import { cacheConfigPages, cacheMajors, cacheMapFiles, lastClassicBuildnr, lastLegacyBuildnr } from "../constants";
import { parse, FileParser } from "../opdecoder";
import { Archive, archiveToFileId, CacheFileSource, CacheIndex, fileIdToArchiveminor, SubFile } from "../cache";
import { cacheFilenameHash, constrainedMap } from "../utils";
import prettyJson from "json-stringify-pretty-compact";
import { ScriptFS, ScriptOutput } from "../viewer/scriptsui";
import { JSONSchema6Definition } from "json-schema";
import { parseLegacySprite, parseSprite } from "../3d/sprite";
import { parseLegacySprite, parseSprite, parseTgaSprite } from "../3d/sprite";
import { pixelsToImageFile } from "../imgutils";
import { crc32, CrcBuilder } from "../libs/crc32util";
import { getModelHashes } from "../3d/modeltothree";
import { ParsedTexture } from "../3d/textures";
import { parseMusic } from "./musictrack";
import { legacyGroups, legacyMajors } from "../cache/legacycache";
import { classicGroups } from "../cache/classicloader";
type CacheFileId = {
@@ -34,12 +35,7 @@ async function filerange(source: CacheFileSource, startindex: FileId, endindex:
//bit silly since we download the files and then only return their ids
//however it doesn't matter that much since the entire cache is <20mb
let group: SubFile[] = [];
if (startindex.major == 0) {
group = await source.getArchiveById(startindex.major, minor);
} else {
await source.getFile(startindex.major, minor);
group = [{ buffer: null!, fileid: 0, namehash: null, offset: 0, size: 0 }];
}
group = await source.getArchiveById(startindex.major, minor);
let groupindex: CacheIndex = {
major: startindex.major,
minor,
@@ -400,7 +396,7 @@ const decodeLegacySprite = (minor: number): DecodeModeFactory => () => {
async read(b, id, source) {
let metafile = await source.findSubfileByName(legacyMajors.data, minor, "INDEX.DAT");
let img = parseLegacySprite(metafile!.buffer, b);
return pixelsToImageFile(img, "png", 1);
return pixelsToImageFile(img.img, "png", 1);
}
}
}
@@ -514,6 +510,8 @@ export const cacheFileJsonModes = constrainedMap<JsonBasedFile>()({
proctextures: { parser: parse.proctexture, lookup: noArchiveIndex(cacheMajors.texturesOldPng) },
oldproctextures: { parser: parse.oldproctexture, lookup: singleMinorIndex(cacheMajors.texturesOldPng, 0) },
classicmodels: { parser: parse.classicmodels, lookup: singleMinorIndex(0, classicGroups.models) },
indices: { parser: parse.cacheIndex, lookup: indexfileIndex() },
rootindex: { parser: parse.rootCacheIndex, lookup: rootindexfileIndex() }
});

View File

@@ -1,77 +0,0 @@
import { filesource, cliArguments, mapareasource } from "../cliparser";
import { run, command, number, option, string, boolean, Type, flag, oneOf } from "cmd-ts";
import * as fs from "fs";
import { parse } from "../opdecoder";
import { ParsemapOpts, parseMapsquare } from "../3d/mapsquare";
import { mapsquare_locations } from "../../generated/mapsquare_locations";
import { EngineCache } from "../3d/modeltothree";
import { makeImageData, pixelsToImageFile } from "../imgutils";
let cmd = command({
name: "download",
args: {
...filesource,
...mapareasource,
save: option({ long: "save", short: "s", type: string, defaultValue: () => "cache/mapmodels" }),
mode: option({ long: "mode", short: "m", type: oneOf(["model", "height", "objects", "floor"]), defaultValue: () => "model" as any })
},
handler: async (args) => {
let opts: ParsemapOpts = { invisibleLayers: false };
let filesource = await args.source();
let engine = await EngineCache.create(filesource);
let { chunks, grid } = await parseMapsquare(engine, args.area, opts);
fs.mkdirSync(args.save, { recursive: true });
if (args.mode == "model") {
//TODO
console.log("needs repimplementation");
}
if (args.mode == "objects") {
let locs: { squarex: number, squarez: number, locs: mapsquare_locations["locations"] }[] = [];
for (let chunk of chunks) {
let locationindex = chunk.cacheIndex.subindices.indexOf(0);
if (locationindex == -1) { return []; }
let locations = parse.mapsquareLocations.read(chunk.archive[locationindex].buffer, filesource).locations;
locs.push({
squarex: chunk.xoffset,
squarez: chunk.zoffset,
locs: locations
});
}
fs.writeFileSync(args.save + "/" + Date.now() + ".json", JSON.stringify(locs, undefined, "\t"));
}
if (args.mode == "floor") {
let alltiles = chunks.flatMap(q => q.tiles.map((t, i) => ({ $coord: `${i / 64 | 0}_${i % 64}`, ...t })));
let usedunderlays = new Set(alltiles.map(q => q.underlay).filter(q => typeof q != "undefined"));
let usedoverlays = new Set(alltiles.map(q => q.overlay).filter(q => typeof q != "undefined"));
let allunderlays = Object.fromEntries([...usedunderlays].map(q => [q, { $actualid: q! - 1, ...engine.mapUnderlays[q! - 1] }]));
let alloverlays = Object.fromEntries([...usedoverlays].map(q => [q, { $actualid: q! - 1, ...engine.mapOverlays[q! - 1] }]));
let r = {
allunderlays,
alloverlays,
tiles: alltiles
}
fs.writeFileSync(args.save + "/" + Date.now() + ".json", JSON.stringify(r, undefined, "\t"));
}
if (args.mode == "height") {
let imgw = args.area.xsize * 64;
let imgh = args.area.zsize * 64;
let data = new Uint8ClampedArray(imgw * imgh * 4);
for (let dz = 0; dz < args.area.zsize * 64; dz++) {
for (let dx = 0; dx < args.area.xsize * 64; dx++) {
let i = dx * 4 + dz * imgw * 4;
let tile = grid.getTile(args.area.x * 64 + dx, args.area.z * 64 + dz, 0);
if (!tile) { continue; }
//1/32=1/(tiledimensions*heightscale)
data[i + 0] = tile.y / 32 | 0;
data[i + 1] = tile.y / 32 | 0;
data[i + 2] = tile.y / 32 | 0;
data[i + 3] = 255;
}
}
let imgfile = await pixelsToImageFile(makeImageData(data, imgw, imgh), "png", 1);
fs.writeFileSync(args.save + "/" + Date.now() + ".png", imgfile);
}
}
});
run(cmd, cliArguments());

View File

@@ -30,6 +30,7 @@ export type Stream = {
export function cacheFilenameHash(name: string, oldhash: boolean) {
let hash = 0;
if (oldhash) {
name = name.toUpperCase();
for (let ch of name) {
hash = (Math.imul(hash, 61) + ch.charCodeAt(0) - 32) | 0;
}

View File

@@ -9,7 +9,7 @@ import { EngineCache, ThreejsSceneCache } from "../3d/modeltothree";
import { InputCommitted, StringInput, JsonDisplay, IdInput, LabeledInput, TabStrip, CanvasView, BlobImage, BlobAudio, CopyButton } from "./commoncontrols";
import { Openrs2CacheMeta, Openrs2CacheSource, validOpenrs2Caches } from "../cache/openrs2loader";
import { GameCacheLoader } from "../cache/sqlite";
import { DomWrap, UIScriptFile } from "./scriptsui";
import { CLIScriptFS, DomWrap, ScriptFS, UIScriptFile, UIScriptFS } from "./scriptsui";
import { DecodeErrorJson } from "../scripts/testdecode";
import prettyJson from "json-stringify-pretty-compact";
import { delay, drawTexture, TypedEmitter } from "../utils";
@@ -18,6 +18,7 @@ import { CacheDownloader } from "../cache/downloader";
import { parse } from "../opdecoder";
import * as path from "path";
import classNames from "classnames";
import { selectFsCache } from "../cache/autocache";
//work around typescript being weird when compiling for browser
const electron = require("electron/renderer");
@@ -26,7 +27,7 @@ const hasElectrion = !!electron.ipcRenderer;
export type SavedCacheSource = {
type: string
} & ({
type: "sqlitehandle",
type: "autohandle",
handle: FileSystemDirectoryHandle
} | {
type: "sqliteblobs",
@@ -35,7 +36,7 @@ export type SavedCacheSource = {
type: "openrs2",
cachename: string
} | {
type: "sqlitenodejs",
type: "autofs",
location: string
} | {
type: "live"
@@ -198,7 +199,7 @@ export class CacheSelector extends React.Component<{ onOpen: (c: SavedCacheSourc
@boundMethod
async clickOpen() {
let dir = await showDirectoryPicker();
this.props.onOpen({ type: "sqlitehandle", handle: dir });
this.props.onOpen({ type: "autohandle", handle: dir });
}
@boundMethod
@@ -206,7 +207,7 @@ export class CacheSelector extends React.Component<{ onOpen: (c: SavedCacheSourc
if (!hasElectrion) { return; }
let dir = await electron.ipcRenderer.invoke("openfolder", path.resolve(process.env.ProgramData!, "jagex/runescape"));
if (!dir.canceled) {
this.props.onOpen({ type: "sqlitenodejs", location: dir.filePaths[0] });
this.props.onOpen({ type: "autofs", location: dir.filePaths[0] });
}
}
@@ -219,7 +220,7 @@ export class CacheSelector extends React.Component<{ onOpen: (c: SavedCacheSourc
async clickReopen() {
if (!this.state.lastFolderOpen) { return; }
if (await this.state.lastFolderOpen.requestPermission() == "granted") {
this.props.onOpen({ type: "sqlitehandle", handle: this.state.lastFolderOpen });
this.props.onOpen({ type: "autohandle", handle: this.state.lastFolderOpen });
}
}
@@ -257,7 +258,7 @@ export class CacheSelector extends React.Component<{ onOpen: (c: SavedCacheSourc
if (folderhandles.length == 1 && filehandles.length == 0) {
console.log("stored folder " + folderhandles[0].name);
datastore.set("lastfolderopen", folderhandles[0]);
this.props.onOpen({ type: "sqlitehandle", handle: folderhandles[0] });
this.props.onOpen({ type: "autohandle", handle: folderhandles[0] });
} else {
console.log(`added ${Object.keys(files).length} files`);
this.props.onOpen({ type: "sqliteblobs", blobs: files });
@@ -386,35 +387,27 @@ export class UIContext extends TypedEmitter<{ openfile: UIScriptFile | null, sta
export async function openSavedCache(source: SavedCacheSource, remember: boolean) {
let handle: FileSystemDirectoryHandle | null = null;
let cache: CacheFileSource | null = null;
if (source.type == "sqliteblobs" || source.type == "sqlitehandle") {
let files: Record<string, Blob> = {};
if (source.type == "sqlitehandle") {
handle = source.handle;
if (await source.handle.queryPermission() != "granted") {
console.log("tried to open cache without permission");
return null;
}
// await source.handle.requestPermission();
for await (let handle of source.handle.values()) {
if (handle.kind == "file") {
files[handle.name] = await handle.getFile();
}
}
navigator.serviceWorker.ready.then(q => q.active?.postMessage({ type: "sethandle", handle }));
if (source.type == "sqliteblobs" || source.type == "autohandle") {
let wasmcache = new WasmGameCacheLoader();
if (source.type == "autohandle") {
let fs = new UIScriptFS(null);
await fs.setSaveDirHandle(source.handle);
cache = await selectFsCache(fs);
// await wasmcache.giveFsDirectory(source.handle);
navigator.serviceWorker.ready.then(q => q.active?.postMessage({ type: "sethandle", handle: source.handle }));
} else {
files = source.blobs;
wasmcache.giveBlobs(source.blobs);
}
cache = new WasmGameCacheLoader();
(cache as WasmGameCacheLoader).giveBlobs(files);
cache = wasmcache;
}
if (source.type == "openrs2") {
cache = await Openrs2CacheSource.fromId(+source.cachename);
}
if (hasElectrion && source.type == "sqlitenodejs") {
cache = new GameCacheLoader(source.location);
if (hasElectrion && source.type == "autofs") {
let fs = new CLIScriptFS(source.location);
cache = await selectFsCache(fs);
}
if (source.type == "live") {
cache = new CacheDownloader();

View File

@@ -1,8 +1,8 @@
import { ThreejsSceneCache, EngineCache } from '../3d/modeltothree';
import { ThreejsSceneCache, EngineCache, constModelsIds } from '../3d/modeltothree';
import { delay, packedHSL2HSL, HSL2RGB, RGB2HSL, HSL2packHSL, drawTexture, ModelModifications, stringToFileRange, stringToMapArea } from '../utils';
import { boundMethod } from 'autobind-decorator';
import { CacheFileSource } from '../cache';
import { MapRect, TileGrid, squareSize, CombinedTileGrid, getTileHeight } from '../3d/mapsquare';
import { MapRect, TileGrid, CombinedTileGrid, getTileHeight, rs2ChunkSize, classicChunkSize } from '../3d/mapsquare';
import { Euler, Quaternion, Vector3 } from "three";
import { cacheMajors } from "../constants";
import * as React from "react";
@@ -21,7 +21,7 @@ import { findImageBounds, makeImageData } from "../imgutils";
import { avataroverrides } from "../../generated/avataroverrides";
import { InputCommitted, StringInput, JsonDisplay, IdInput, LabeledInput, TabStrip, IdInputSearch, CanvasView, PasteButton, CopyButton } from "./commoncontrols";
import { items } from "../../generated/items";
import { itemToModel, locToModel, materialToModel, modelToModel, npcBodyToModel, npcToModel, playerDataToModel, playerToModel, RSMapChunk, RSMapChunkData, RSModel, SimpleModelDef, SimpleModelInfo, spotAnimToModel } from "../3d/modelnodes";
import { itemToModel, locToModel, modelToModel, npcBodyToModel, npcToModel, playerDataToModel, playerToModel, RSMapChunk, RSMapChunkData, RSModel, SimpleModelDef, SimpleModelInfo, spotAnimToModel } from "../3d/modelnodes";
import fetch from "node-fetch";
import { mapsquare_overlays } from '../../generated/mapsquare_overlays';
import { mapsquare_underlays } from '../../generated/mapsquare_underlays';
@@ -592,8 +592,8 @@ export class SceneScenario extends React.Component<LookupModeProps, ScenarioInte
let hasmap = Object.values(this.state.components).some(q => q.type == "map");
if (!hasmap || !this.mapoffset) {
this.mapoffset = {
x: (newcomp.mapRect.x + newcomp.mapRect.xsize / 2) * squareSize,
z: (newcomp.mapRect.z + newcomp.mapRect.zsize / 2) * squareSize
x: (newcomp.mapRect.x + newcomp.mapRect.xsize / 2) * rs2ChunkSize,
z: (newcomp.mapRect.z + newcomp.mapRect.zsize / 2) * rs2ChunkSize
};
}
newmodel.rootnode.position.set(-this.mapoffset.x * tiledimensions, 0, -this.mapoffset.z * tiledimensions);
@@ -656,10 +656,10 @@ export class SceneScenario extends React.Component<LookupModeProps, ScenarioInte
grids.push({
src: model.loaded.grid,
rect: {
x: model.rect.x * squareSize,
z: model.rect.z * squareSize,
xsize: model.rect.xsize * squareSize,
zsize: model.rect.zsize * squareSize
x: model.rect.x * rs2ChunkSize,
z: model.rect.z * rs2ChunkSize,
xsize: model.rect.xsize * rs2ChunkSize,
zsize: model.rect.zsize * rs2ChunkSize
}
});
}
@@ -1276,10 +1276,13 @@ async function materialIshToModel(sceneCache: ThreejsSceneCache, reqid: { mode:
if (matid != -1) {
let assetid = 93808;//"RuneTek_Asset" jagex test model
let assetid = constModelsIds.materialCube;
let mods: ModelModifications = {
replaceMaterials: [[4311, matid]],
replaceColors: [[20287, HSL2packHSL(...RGB2HSL(...color as [number, number, number]))]]
replaceMaterials: [[0, matid]],
replaceColors: [[
HSL2packHSL(...RGB2HSL(255, 255, 255)),
HSL2packHSL(...RGB2HSL(...color as [number, number, number]))
]]
};
let mat = sceneCache.engine.getMaterialData(matid);
for (let tex in mat.textures) {
@@ -1336,25 +1339,9 @@ function SceneMaterialIsh(p: LookupModeProps) {
function SceneRawModel(p: LookupModeProps) {
let initid = (typeof p.initialId == "number" ? p.initialId : 0);
let [data, model, id, setId] = useAsyncModelData(p.ctx, modelToModel);
let [preferOld, setPreferOld] = React.useState(false);
let hasbothmodels = !p.ctx || (p.ctx.sceneCache.engine.hasNewModels && p.ctx.sceneCache.engine.hasOldModels);
let oldcheckbox = (hasbothmodels ? preferOld : !!p.ctx && !p.ctx.sceneCache.engine.hasNewModels);
React.useEffect(() => {
if (!p.ctx) { return; }
let prevmode = p.ctx.sceneCache.useOldModels;
p.ctx.sceneCache.useOldModels = oldcheckbox;
if (typeof id == "number") { setId(id); }
return () => {
if (p.ctx?.sceneCache) { p.ctx!.sceneCache.useOldModels = prevmode; }
}
}, [oldcheckbox, p.ctx?.sceneCache])
return (
<React.Fragment>
<IdInput onChange={setId} initialid={id ?? initid} />
<label>
<input type="checkbox" disabled={!hasbothmodels} checked={oldcheckbox} onChange={e => setPreferOld(e.currentTarget.checked)} />
Use old model format
</label>
{id == null && (
<React.Fragment>
<p>Enter a model id.</p>
@@ -1664,9 +1651,10 @@ export class SceneMapModel extends React.Component<LookupModeProps, SceneMapStat
let center = this.state.center;
if (this.state.chunkgroups.length == 0) {
let chunksize = (sceneCache.engine.classicData ? classicChunkSize : rs2ChunkSize);
center = {
x: (rect.x + rect.xsize / 2) * 64 * 512,
z: (rect.z + rect.zsize / 2) * 64 * 512,
x: (rect.x + rect.xsize / 2) * chunksize * 512,
z: (rect.z + rect.zsize / 2) * chunksize * 512,
}
}
let chunkentry = { rect, chunk, background: "" };

View File

@@ -94,9 +94,9 @@ export class UIScriptFS extends TypedEmitter<{ writefile: undefined }> implement
files: UIScriptFile[] = [];
rootdirhandle: FileSystemDirectoryHandle | null = null;
outdirhandles = new Map<string, FileSystemDirectoryHandle | null>();
output: UIScriptOutput;
output: UIScriptOutput | null;
constructor(output: UIScriptOutput) {
constructor(output: UIScriptOutput | null) {
super();
this.output = output;
}
@@ -104,13 +104,13 @@ export class UIScriptFS extends TypedEmitter<{ writefile: undefined }> implement
async mkDir(name: string) {
this.outdirhandles.set(name, null);
this.emit("writefile", undefined);
this.output.emit("writefile", undefined);
this.output?.emit("writefile", undefined);
}
async writeFile(name: string, data: Buffer | string) {
this.files.push({ name, data });
if (this.rootdirhandle) { await this.saveLocalFile(name, data); }
this.emit("writefile", undefined);
this.output.emit("writefile", undefined);
this.output?.emit("writefile", undefined);
}
readFileBuffer(name: string): Promise<Buffer> {
throw new Error("not implemented");
@@ -137,7 +137,7 @@ export class UIScriptFS extends TypedEmitter<{ writefile: undefined }> implement
}
await Promise.all(this.files.map(q => this.saveLocalFile(q.name, q.data)));
}
this.output.emit("statechange", undefined);
this.output?.emit("statechange", undefined);
}
async mkdirLocal(path: string[]) {