<2013 cache investigation

This commit is contained in:
Skillbert
2023-03-04 18:03:52 +01:00
parent fbd2feadbb
commit d915af2f3f
8 changed files with 58 additions and 27 deletions

View File

@@ -4,7 +4,7 @@
export type cacheindex = {
format: number,
timestamp: number,
timestamp: (number|0),
flags: number,
indices: {
minor: number,

8
generated/proctexture.d.ts vendored Normal file
View File

@@ -0,0 +1,8 @@
// GENERATED DO NOT EDIT
// This source data is located at '..\src\opcodes\proctexture.jsonc'
// run `npm run filetypes` to rebuild
export type proctexture = {
op: number,
args: (Uint8Array),
}[];

View File

@@ -31,20 +31,21 @@ var cachelist: Promise<Openrs2CacheMeta[]> | null = null;
export function validOpenrs2Caches() {
if (!cachelist) {
cachelist = (async () => {
const openrs2Blacklist = [
423,//osrs cache wrongly labeled as rs3
const openrs2Blacklist: number[] = [
//some of these might actually be fine
423,//osrs cache wrongly labeled as rs3?
623,//seems to have different builds in it
693,//wrong timestamp?
621,619,618,620,617,//wrong timestamp/osrs?
621, 619, 618, 620, 617,//wrong timestamp/osrs?
840,//multiple builds
734, 736, 733,//don't have items index
20, 19, 17, 13, 10, 9, 8, 7, 6, 5,//don't have items index
];
let allcaches = await fetch(`${endpoint}/caches.json`).then(q => q.json());
let allcaches: Openrs2CacheMeta[] = await fetch(`${endpoint}/caches.json`).then(q => q.json());
let checkedcaches = allcaches.filter(q =>
q.language == "en" && q.environment == "live" && !openrs2Blacklist.includes(q.id)
&& q.game == "runescape" && q.timestamp && q.builds.length != 0
).sort((a, b) => +new Date(b.timestamp!) - +new Date(a.timestamp!));
).sort((a, b) => b.builds[0].major - a.builds[0].major || (b.builds[0].minor ?? 0) - (a.builds[0].minor ?? 0) || +new Date(b.timestamp!) - +new Date(a.timestamp!));
return checkedcaches;
})();

View File

@@ -1,9 +1,9 @@
["struct",
["format","unsigned byte"],
["timestamp","unsigned int"],
["timestamp",["match",["ref","format"],{">=6":"uint","other":0}]],
["flags","unsigned byte"],
["$minorindex","0"],
["indices",["chunkedarray","variable unsigned int",
["indices",["chunkedarray",["match",["ref","format"],{">=7":"varuint","other":"ushort"}],
[
["minor",["accum","$minorindex","unsigned short"]]
],

View File

@@ -0,0 +1,6 @@
["nullarray",["struct",
["op",["ref","$opcode"]],
["args",["match",["ref","$opcode"],{
"0x01":["buffer",8,"hex"]
}]]
]]

View File

@@ -115,6 +115,7 @@ function allParsers() {
params: FileParser.fromJson<import("../generated/params").params>(require("./opcodes/params.jsonc")),
particles_0: FileParser.fromJson<import("../generated/particles_0").particles_0>(require("./opcodes/particles_0.jsonc")),
particles_1: FileParser.fromJson<import("../generated/particles_1").particles_1>(require("./opcodes/particles_1.jsonc")),
audio: FileParser.fromJson<import("../generated/audio").audio>(require("./opcodes/audio.jsonc"))
audio: FileParser.fromJson<import("../generated/audio").audio>(require("./opcodes/audio.jsonc")),
proctexture: FileParser.fromJson<import("../generated/proctexture").proctexture>(require("./opcodes/proctexture.jsonc"))
}
}

View File

@@ -453,6 +453,7 @@ export const cacheFileJsonModes = constrainedMap<JsonBasedFile>()({
models: { parser: parse.models, lookup: noArchiveIndex(cacheMajors.models) },
oldmodels: { parser: parse.oldmodels, lookup: noArchiveIndex(cacheMajors.oldmodels) },
skeletons: { parser: parse.skeletalAnim, lookup: noArchiveIndex(cacheMajors.skeletalAnims) },
proctextures: { parser: parse.proctexture, lookup: noArchiveIndex(cacheMajors.texturesOldPng) },
indices: { parser: parse.cacheIndex, lookup: indexfileIndex() },
rootindex: { parser: parse.rootCacheIndex, lookup: rootindexfileIndex() }
@@ -503,7 +504,8 @@ export const cacheFileDecodeModes = constrainedMap<DecodeModeFactory>()({
npcmodels: npcmodels,
...(Object.fromEntries(Object.entries(cacheFileJsonModes).map(([k, v]) => [k, standardFile(v.parser, v.lookup)])) as Record<keyof typeof cacheFileJsonModes, DecodeModeFactory>)
...(Object.fromEntries(Object.entries(cacheFileJsonModes)
.map(([k, v]) => [k, standardFile(v.parser, v.lookup)])) as Record<keyof typeof cacheFileJsonModes, DecodeModeFactory>)
});
export async function extractCacheFiles(output: ScriptOutput, outdir: ScriptFS, source: CacheFileSource, args: { batched: boolean, batchlimit: number, mode: string, files: FileRange, edit: boolean, keepbuffers: boolean }) {

View File

@@ -5,11 +5,18 @@ import { ScriptOutput } from "../viewer/scriptsui";
export async function openrs2Ids(output: ScriptOutput, date: string, near: string, logcontents: boolean) {
let allids = await validOpenrs2Caches();
if (date) {
let m = date.match(/20\d\d/);
if (!m) { throw new Error("4 digit year expected"); }
let year = +m[0];
let enddate = new Date((year + 1) + "");
let startdate = new Date(year + "");
let startdate = new Date("");//nan
let enddate = new Date("");//nan
if (date.match(/^\d{4}$/)) {
startdate = new Date(date);
enddate = new Date((+date + 1) + "");
} else if (date.match(/-/)) {
let parts = date.split("-");
startdate = new Date(parts[0]);
enddate = new Date(parts[1]);
}
if (isNaN(+enddate)) { enddate = new Date("2100"); }
if (isNaN(+startdate)) { startdate = new Date("1900"); }
allids = allids.filter(q => q.timestamp && new Date(q.timestamp) >= startdate && new Date(q.timestamp) <= enddate);
}
if (near) {
@@ -34,24 +41,30 @@ export async function openrs2Ids(output: ScriptOutput, date: string, near: strin
}
let src = new Openrs2CacheSource(cache);
try {
if (cache.builds[0].major >= 410) {
let index = await src.getCacheIndex(cacheMajors.index);
for (let i = 0; i < index.length; i++) {
let config = index[i];
if (!config) {
line += " ".repeat(10);
} else {
let subcount = 0;
if (config.crc != 0 && config.subindexcount == 0) {
// if (cache.builds[0].major >= 410) {
let index = await src.getCacheIndex(cacheMajors.index);
for (let i = 0; i < index.length; i++) {
let config = index[i];
if (!config) {
line += " ".repeat(10);
} else {
let subcount = 0;
if (config.crc != 0 && config.subindexcount == 0) {
try {
let subindex = await src.getCacheIndex(config.minor);
subcount = subindex.reduce((a, v) => a + (v ? 1 : 0), 0);
} else {
subcount = config.subindexcount;
} catch (e) {
subcount = NaN;
}
line += ` ${subcount.toString().padStart(9)}`;
} else {
subcount = config.subindexcount;
}
line += ` ${subcount.toString().padStart(9)}`;
}
}
// }
} catch (e) {
line += ` Error ${e}`;
} finally {
src.close();
}