support fonts

This commit is contained in:
Skillbert
2025-12-08 14:01:29 +01:00
parent 000f6d5cb0
commit 5cbea1275d
5 changed files with 1532 additions and 1524 deletions

View File

@@ -1,245 +1,245 @@
import { makeImageData } from "../imgutils";
import { crc32 } from "../libs/crc32util";
import { Stream } from "../utils";
export type SubImageData = {
x: number,
y: number,
fullwidth: number,
fullheight: number,
img: ImageData
}
export function parseSubsprite(buf: Buffer, palette: Buffer, width: number, height: number, alpha: boolean, transposed: boolean) {
let imgsize = width * height;
let offset = 0;
let imgdata = new Uint8ClampedArray(imgsize * 4);
let indexoffset = offset;
let alphaoffset = offset + imgsize;
offset += imgsize + (alpha ? imgsize : 0);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let outoffset = x * 4 + y * width * 4;
let inoffset = (transposed ? y + x * height : x + y * width);
let pxindex = buf.readUInt8(indexoffset + inoffset);
if (pxindex == 0) {
imgdata[outoffset + 0] = 0;
imgdata[outoffset + 1] = 0;
imgdata[outoffset + 2] = 0;
imgdata[outoffset + 3] = 0;
} else {
let paletteoffset = (pxindex - 1) * 3;
imgdata[outoffset + 0] = palette[paletteoffset + 0];
imgdata[outoffset + 1] = palette[paletteoffset + 1];
imgdata[outoffset + 2] = palette[paletteoffset + 2];
imgdata[outoffset + 3] = alpha ? buf.readUInt8(alphaoffset + inoffset) : 255;
}
}
}
return {
img: makeImageData(imgdata, width, height),
bytesused: offset
}
}
export function parseLegacySprite(metafile: Buffer, buf: Buffer) {
let file = new Stream(buf);
let metaoffset = file.readUShort(true);
if (!metafile) { throw new Error("sprite meta file not found"); }
let meta = new Stream(metafile);
meta.skip(metaoffset);
let totalwidth = meta.readUShort(true);
let totalheight = meta.readUShort(true);
let palettecount = meta.readUByte() - 1;
let palette = meta.readBuffer(palettecount * 3);
let imgs: SubImageData[] = [];
while (!file.eof()) {
let offsetx = meta.readUByte();
let offsety = meta.readUByte();
let width = meta.readUShort(true);
let height = meta.readUShort(true);
let transpose = meta.readUByte() != 0;
let imgbytes = file.readBuffer(width * height);
imgs.push({
x: offsetx,
y: offsety,
fullwidth: totalwidth,
fullheight: totalheight,
img: parseSubsprite(imgbytes, palette, width, height, false, transpose).img
});
}
if (imgs.length != 1) {
console.log(imgs);
}
return imgs[0];
}
export function expandSprite(subimg: SubImageData) {
if (subimg.x == 0 && subimg.y == 0 && subimg.fullwidth == subimg.img.width && subimg.fullheight == subimg.img.height) {
return subimg.img;
}
let img = new ImageData(subimg.fullwidth, subimg.fullheight);
for (let dy = 0; dy < subimg.img.height; dy++) {
let instride = subimg.img.width * 4;
let inoffset = dy * instride;
let outstride = img.width * 4;
let outoffset = (dy + subimg.y) * outstride + subimg.x * 4;
img.data.set(subimg.img.data.subarray(inoffset, inoffset + instride), outoffset);
}
return img;
}
export function parseSprite(buf: Buffer) {
let data = buf.readUInt16BE(buf.length - 2);
let format = data >> 15;
let count = (data & 0x7FFF);
let spriteimgs: SubImageData[] = [];
if (format == 0) {
let footsize = 7 + 8 * count;
let offset = buf.length - footsize;
let maxwidth = buf.readUInt16BE(offset); offset += 2;
let maxheight = buf.readUInt16BE(offset); offset += 2;
let palette_count = buf.readUInt8(offset); offset++;
let subimgs: { x: number, y: number, width: number, height: number }[] = [];
for (let subimg = 0; subimg < count; subimg++) {
subimgs.push({
x: buf.readUInt16BE(offset + count * 0 + subimg * 2),
y: buf.readUInt16BE(offset + count * 2 + subimg * 2),
width: buf.readUInt16BE(offset + count * 4 + subimg * 2),
height: buf.readUInt16BE(offset + count * 6 + subimg * 2),
});
}
let palette = buf.slice(buf.length - footsize - 3 * palette_count, buf.length - footsize);
// if (palette[0] == 0 && palette[1] == 0 && palette[2] == 0) {
// palette[2] = 1;//yep, the game does this, i don't know why
// }
offset = 0;
for (let imgdef of subimgs) {
if (imgdef.width != 0 && imgdef.height != 0) {
let flags = buf.readUInt8(offset); offset++;
let transposed = (flags & 1) != 0;
let alpha = (flags & 2) != 0;
let subimg = parseSubsprite(buf.slice(offset), palette, imgdef.width, imgdef.height, alpha, transposed);
offset += subimg.bytesused;
spriteimgs.push({
x: imgdef.x,
y: imgdef.y,
fullwidth: maxwidth,
fullheight: maxheight,
img: subimg.img
});
}
}
} else {
let offset = 0;
let type = buf.readUInt8(offset); offset++;
if (type != 0) { throw new Error("unknown type"); }
let flags = buf.readUInt8(offset); offset++;
let alpha = (flags & 1) != 0;
let width = buf.readUInt16BE(offset); offset += 2;
let height = buf.readUInt16BE(offset); offset += 2;
let coloroffset = offset;
offset += width * height * 3;
let alphaoffset = offset;
offset += (alpha ? width * height : 0);
let imgdata = new Uint8ClampedArray(width * height * 4);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let outoffset = x * 4 + y * width * 4;
let inoffset = x + y * width;
imgdata[outoffset + 0] = buf.readUInt8(coloroffset + inoffset * 3 + 0);
imgdata[outoffset + 1] = buf.readUInt8(coloroffset + inoffset * 3 + 1);
imgdata[outoffset + 2] = buf.readUInt8(coloroffset + inoffset * 3 + 2);
imgdata[outoffset + 3] = alpha ? buf.readUInt8(alphaoffset + inoffset + 2) : 255;
}
}
spriteimgs.push({
x: 0,
y: 0,
fullwidth: width,
fullheight: height,
img: makeImageData(imgdata, width, height)
});
}
return spriteimgs;
}
export function parseTgaSprite(file: Buffer) {
let str = new Stream(file);
let idlength = str.readUByte();
let colormaptype = str.readUByte();
let datatypecode = str.readUByte();
let colormapoffset = str.readUShort(false);
let colormaplen = str.readUShort(false);
let colormapdepth = str.readUByte();
let originx = str.readUShort(false);
let originy = str.readUShort(false);
let width = str.readUShort(false);
let height = str.readUShort(false);
let bpp = str.readUByte();
let imgdescr = str.readUByte();
str.skip(idlength);//possible text content
if (colormaptype != 1 || bpp != 8) { throw new Error("only palette based uncompressed TGA supported"); }
if (colormapdepth != 24) { throw new Error("only 24bpp rgb TGA supported"); }
if (imgdescr != 0) { throw new Error("no fancy TGA's allowed"); }
let palette = str.readBuffer(colormaplen * 3);
let imgdata = new Uint8ClampedArray(width * height * 4);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let outoffset = x * 4 + y * width * 4;
let pxindex = str.readUByte();
let paletteoffset = pxindex * 3;
//bgr->rgb flip!!
imgdata[outoffset + 0] = palette[paletteoffset + 2];
imgdata[outoffset + 1] = palette[paletteoffset + 1];
imgdata[outoffset + 2] = palette[paletteoffset + 0];
imgdata[outoffset + 3] = 255;
//jagex treats 255,0,255 as transparent
if (imgdata[outoffset + 0] == 255 && imgdata[outoffset + 1] == 0 && imgdata[outoffset + 2] == 255) {
imgdata[outoffset + 0] = 0;
imgdata[outoffset + 1] = 0;
imgdata[outoffset + 2] = 0;
imgdata[outoffset + 3] = 0;
}
}
}
if (!str.eof) {
console.warn("didn't parse TGA sprite to completion");
}
let r: SubImageData = {
x: originx,
y: originy,
fullwidth: width,
fullheight: height,
img: makeImageData(imgdata, width, height)
};
return r;
}
export function spriteHash(img: ImageData) {
// copy since we need to modify it
const data = img.data.slice();
// for some reason 0 blue isn't possible in-game
for (let i = 0; i < data.length; i += 4) {
if (data[i + 2] == 0) { data[i + 2] = 1; }
}
return crc32(img.data);
import { makeImageData } from "../imgutils";
import { crc32 } from "../libs/crc32util";
import { Stream } from "../utils";
export type SubImageData = {
x: number,
y: number,
fullwidth: number,
fullheight: number,
img: ImageData
}
export function parseSubsprite(buf: Buffer, palette: Buffer, width: number, height: number, alpha: boolean, transposed: boolean) {
let imgsize = width * height;
let offset = 0;
let imgdata = new Uint8ClampedArray(imgsize * 4);
let indexoffset = offset;
let alphaoffset = offset + imgsize;
offset += imgsize + (alpha ? imgsize : 0);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let outoffset = x * 4 + y * width * 4;
let inoffset = (transposed ? y + x * height : x + y * width);
let pxindex = buf.readUInt8(indexoffset + inoffset);
if (pxindex == 0) {
imgdata[outoffset + 0] = 0;
imgdata[outoffset + 1] = 0;
imgdata[outoffset + 2] = 0;
imgdata[outoffset + 3] = 0;
} else {
let paletteoffset = (pxindex - 1) * 3;
imgdata[outoffset + 0] = palette[paletteoffset + 0];
imgdata[outoffset + 1] = palette[paletteoffset + 1];
imgdata[outoffset + 2] = palette[paletteoffset + 2];
imgdata[outoffset + 3] = alpha ? buf.readUInt8(alphaoffset + inoffset) : 255;
}
}
}
return {
img: makeImageData(imgdata, width, height),
bytesused: offset
}
}
export function parseLegacySprite(metafile: Buffer, buf: Buffer) {
let file = new Stream(buf);
let metaoffset = file.readUShort(true);
if (!metafile) { throw new Error("sprite meta file not found"); }
let meta = new Stream(metafile);
meta.skip(metaoffset);
let totalwidth = meta.readUShort(true);
let totalheight = meta.readUShort(true);
let palettecount = meta.readUByte() - 1;
let palette = meta.readBuffer(palettecount * 3);
let imgs: SubImageData[] = [];
while (!file.eof()) {
let offsetx = meta.readUByte();
let offsety = meta.readUByte();
let width = meta.readUShort(true);
let height = meta.readUShort(true);
let transpose = meta.readUByte() != 0;
let imgbytes = file.readBuffer(width * height);
imgs.push({
x: offsetx,
y: offsety,
fullwidth: totalwidth,
fullheight: totalheight,
img: parseSubsprite(imgbytes, palette, width, height, false, transpose).img
});
}
if (imgs.length != 1) {
console.log(imgs);
}
return imgs[0];
}
export function expandSprite(subimg: SubImageData) {
if (subimg.x == 0 && subimg.y == 0 && subimg.fullwidth == subimg.img.width && subimg.fullheight == subimg.img.height) {
return subimg.img;
}
let img = new ImageData(subimg.fullwidth, subimg.fullheight);
for (let dy = 0; dy < subimg.img.height; dy++) {
let instride = subimg.img.width * 4;
let inoffset = dy * instride;
let outstride = img.width * 4;
let outoffset = (dy + subimg.y) * outstride + subimg.x * 4;
img.data.set(subimg.img.data.subarray(inoffset, inoffset + instride), outoffset);
}
return img;
}
export function parseSprite(buf: Buffer) {
let data = buf.readUInt16BE(buf.length - 2);
let format = data >> 15;
let count = (data & 0x7FFF);
let spriteimgs: SubImageData[] = [];
if (format == 0) {
let footsize = 7 + 8 * count;
let offset = buf.length - footsize;
let maxwidth = buf.readUInt16BE(offset); offset += 2;
let maxheight = buf.readUInt16BE(offset); offset += 2;
let palette_count = buf.readUInt8(offset); offset++;
let subimgs: { x: number, y: number, width: number, height: number }[] = [];
for (let subimg = 0; subimg < count; subimg++) {
subimgs.push({
x: buf.readUInt16BE(offset + count * 0 + subimg * 2),
y: buf.readUInt16BE(offset + count * 2 + subimg * 2),
width: buf.readUInt16BE(offset + count * 4 + subimg * 2),
height: buf.readUInt16BE(offset + count * 6 + subimg * 2),
});
}
let palette = buf.slice(buf.length - footsize - 3 * palette_count, buf.length - footsize);
// if (palette[0] == 0 && palette[1] == 0 && palette[2] == 0) {
// palette[2] = 1;//yep, the game does this, i don't know why
// }
offset = 0;
for (let imgdef of subimgs) {
if (imgdef.width != 0 && imgdef.height != 0) {
let flags = buf.readUInt8(offset); offset++;
let transposed = (flags & 1) != 0;
let alpha = (flags & 2) != 0;
let subimg = parseSubsprite(buf.slice(offset), palette, imgdef.width, imgdef.height, alpha, transposed);
offset += subimg.bytesused;
spriteimgs.push({
x: imgdef.x,
y: imgdef.y,
fullwidth: maxwidth,
fullheight: maxheight,
img: subimg.img
});
}
}
} else {
let offset = 0;
let type = buf.readUInt8(offset); offset++;
if (type != 0) { throw new Error("unknown type"); }
let flags = buf.readUInt8(offset); offset++;
let alpha = (flags & 1) != 0;
let width = buf.readUInt16BE(offset); offset += 2;
let height = buf.readUInt16BE(offset); offset += 2;
let coloroffset = offset;
offset += width * height * 3;
let alphaoffset = offset;
offset += (alpha ? width * height : 0);
let imgdata = new Uint8ClampedArray(width * height * 4);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let outoffset = x * 4 + y * width * 4;
let inoffset = x + y * width;
imgdata[outoffset + 0] = buf.readUInt8(coloroffset + inoffset * 3 + 0);
imgdata[outoffset + 1] = buf.readUInt8(coloroffset + inoffset * 3 + 1);
imgdata[outoffset + 2] = buf.readUInt8(coloroffset + inoffset * 3 + 2);
imgdata[outoffset + 3] = alpha ? buf.readUInt8(alphaoffset + inoffset + 2) : 255;
}
}
spriteimgs.push({
x: 0,
y: 0,
fullwidth: width,
fullheight: height,
img: makeImageData(imgdata, width, height)
});
}
return spriteimgs;
}
export function parseTgaSprite(file: Buffer) {
let str = new Stream(file);
let idlength = str.readUByte();
let colormaptype = str.readUByte();
let datatypecode = str.readUByte();
let colormapoffset = str.readUShort(false);
let colormaplen = str.readUShort(false);
let colormapdepth = str.readUByte();
let originx = str.readUShort(false);
let originy = str.readUShort(false);
let width = str.readUShort(false);
let height = str.readUShort(false);
let bpp = str.readUByte();
let imgdescr = str.readUByte();
str.skip(idlength);//possible text content
if (colormaptype != 1 || bpp != 8) { throw new Error("only palette based uncompressed TGA supported"); }
if (colormapdepth != 24) { throw new Error("only 24bpp rgb TGA supported"); }
if (imgdescr != 0) { throw new Error("no fancy TGA's allowed"); }
let palette = str.readBuffer(colormaplen * 3);
let imgdata = new Uint8ClampedArray(width * height * 4);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let outoffset = x * 4 + y * width * 4;
let pxindex = str.readUByte();
let paletteoffset = pxindex * 3;
//bgr->rgb flip!!
imgdata[outoffset + 0] = palette[paletteoffset + 2];
imgdata[outoffset + 1] = palette[paletteoffset + 1];
imgdata[outoffset + 2] = palette[paletteoffset + 0];
imgdata[outoffset + 3] = 255;
//jagex treats 255,0,255 as transparent
if (imgdata[outoffset + 0] == 255 && imgdata[outoffset + 1] == 0 && imgdata[outoffset + 2] == 255) {
imgdata[outoffset + 0] = 0;
imgdata[outoffset + 1] = 0;
imgdata[outoffset + 2] = 0;
imgdata[outoffset + 3] = 0;
}
}
}
if (!str.eof) {
console.warn("didn't parse TGA sprite to completion");
}
let r: SubImageData = {
x: originx,
y: originy,
fullwidth: width,
fullheight: height,
img: makeImageData(imgdata, width, height)
};
return r;
}
export function spriteHash(img: ImageData) {
// copy since we need to modify it
const data = img.data.slice();
// for some reason 0 blue isn't possible in-game
for (let i = 0; i < data.length; i += 4) {
if (data[i + 2] == 0) { data[i + 2] = 1; }
}
return crc32(data);
}

View File

@@ -1,99 +1,100 @@
export const cacheMajors = {
framemaps: 1,
config: 2,
interfaces: 3,
mapsquares: 5,
oldmodels: 7,
sprites: 8,
clientscript: 12,
fontmetricsOld: 13,
sounds: 14,
objects: 16,
enums: 17,
npcs: 18,
items: 19,
sequences: 20,
spotanims: 21,
structs: 22,
quickchat: 24,
materials: 26,
particles: 27,
worldmap: 23,
music: 40,
models: 47,
frames: 48,
texturesOldPng: 9,
texturesOldCompoundPng: 37,
textures2015Png: 43,
textures2015CompoundPng: 44,
textures2015Dds: 45,
textures2015CompoundPngMips: 46,
textures2015CompoundDds: 50,
textures2015PngMips: 51,
texturesDds: 52,
texturesPng: 53,
texturesBmp: 54,
texturesKtx: 55,
skeletalAnims: 56,
achievements: 57,
fontmetrics: 58,
cutscenes: 66,
index: 255
} as const;
//represents the largest build number that this application is aware off
//is used as default value when a cache is considered "current"
//only needs to be updated when backward incompatible code paths are added
export const latestBuildNumber = 940;
export const cacheMapFiles = {
locations: 0,
squares: 3,
squaresWater: 4,
square_nxt: 5,
env: 6
} as const;
export const cacheConfigPages = {
mapunderlays: 1,
identityKit: 3,
mapoverlays: 4,
params: 11,
environments: 29,
animgroups: 32,
mapscenes: 34,
maplabels: 36,
dbtables: 40,
dbrows: 41,
varplayer: 60,
varnpc: 61,
varclient: 62,
varworld: 63,
varregion: 64,
varobject: 65,
varclan: 66,
varclansettings: 67,
varcampaign: 68,
varplayergroup: 75,
varbits: 69,
//used before 488 (feb 2008)
locs_old: 6,
npcs_old: 9,
items_old: 10,
spotanim_old: 13
} as const;
export const lastLegacyBuildnr = 377;
//unclear if there ended up being overlap with (public) rs2 since this was 12 years after rs2 release
//first known rs2 is 254
//TODO apparently there was some overlap with rs2 beta caches which are technically not possible to support because of this
export const cacheMajors = {
framemaps: 1,
config: 2,
interfaces: 3,
mapsquares: 5,
oldmodels: 7,
sprites: 8,
clientscript: 12,
fontmetricsOld: 13,
sounds: 14,
objects: 16,
enums: 17,
npcs: 18,
items: 19,
sequences: 20,
spotanims: 21,
structs: 22,
quickchat: 24,
materials: 26,
particles: 27,
worldmap: 23,
music: 40,
models: 47,
frames: 48,
texturesOldPng: 9,
texturesOldCompoundPng: 37,
textures2015Png: 43,
textures2015CompoundPng: 44,
textures2015Dds: 45,
textures2015CompoundPngMips: 46,
textures2015CompoundDds: 50,
textures2015PngMips: 51,
texturesDds: 52,
texturesPng: 53,
texturesBmp: 54,
texturesKtx: 55,
skeletalAnims: 56,
achievements: 57,
fontmetrics: 58,
vectorfonts: 59,
cutscenes: 66,
index: 255
} as const;
//represents the largest build number that this application is aware off
//is used as default value when a cache is considered "current"
//only needs to be updated when backward incompatible code paths are added
export const latestBuildNumber = 940;
export const cacheMapFiles = {
locations: 0,
squares: 3,
squaresWater: 4,
square_nxt: 5,
env: 6
} as const;
export const cacheConfigPages = {
mapunderlays: 1,
identityKit: 3,
mapoverlays: 4,
params: 11,
environments: 29,
animgroups: 32,
mapscenes: 34,
maplabels: 36,
dbtables: 40,
dbrows: 41,
varplayer: 60,
varnpc: 61,
varclient: 62,
varworld: 63,
varregion: 64,
varobject: 65,
varclan: 66,
varclansettings: 67,
varcampaign: 68,
varplayergroup: 75,
varbits: 69,
//used before 488 (feb 2008)
locs_old: 6,
npcs_old: 9,
items_old: 10,
spotanim_old: 13
} as const;
export const lastLegacyBuildnr = 377;
//unclear if there ended up being overlap with (public) rs2 since this was 12 years after rs2 release
//first known rs2 is 254
//TODO apparently there was some overlap with rs2 beta caches which are technically not possible to support because of this
export const lastClassicBuildnr = 235;

View File

@@ -1,253 +1,253 @@
import { CacheFileSource, CacheIndex, SubFile } from "../cache";
import { GameCacheLoader } from "../cache/sqlite";
import { FileRange, getOrInsert } from "../utils";
import { ScriptFS, ScriptOutput } from "../scriptrunner";
import { cacheFileDecodeModes, DecodeMode, DecodeModeFactory } from "./filetypes";
export async function extractCacheFiles(output: ScriptOutput, outdir: ScriptFS, source: CacheFileSource, args: { batched: boolean, batchlimit: number, mode: string, files: FileRange[], edit: boolean, skipread: boolean }, decoderflags: Record<string, string>) {
let modeconstr: DecodeModeFactory = cacheFileDecodeModes[args.mode];
if (!modeconstr) { throw new Error("unknown mode"); }
let flags = { ...decoderflags };
if (args.batched || args.batchlimit != -1) { flags.batched = "true"; }
let mode = modeconstr(flags);
await mode.prepareDump(outdir, source);
let batchMaxFiles = args.batchlimit;
let batchSubfile = args.batched;
let ranges = args.files;
let allfiles = (await Promise.all(ranges.map(q => mode.logicalRangeToFiles(source, q.start, q.end))))
.flat()
.sort((a, b) => a.index.major != b.index.major ? a.index.major - b.index.major : a.index.minor != b.index.minor ? a.index.minor - b.index.minor : a.subindex - b.subindex);
if (!args.skipread) {
let lastarchive: null | { index: CacheIndex, subfiles: SubFile[], error: Error | null } = null;
let currentBatch: { name: string, startIndex: CacheIndex, arch: SubFile[], outputs: (string | Buffer)[], batchchunknr: number } | null = null;
let flushbatch = () => {
if (currentBatch) {
//return promise instead of async function so we only switch stacks when actually doing anything
return (async () => {
let filename = `${args.mode}-${currentBatch.startIndex.major}_${currentBatch.startIndex.minor}.batch`;
if (batchMaxFiles != -1) { filename += "." + currentBatch.batchchunknr; }
filename += `.${mode.ext}`;
outdir.writeFile(filename, mode.combineSubs(currentBatch.outputs));
currentBatch = null;
})();
}
}
for (let fileid of allfiles) {
if (output.state != "running") { break; }
let arch: SubFile[];
if (lastarchive && lastarchive.index == fileid.index) {
arch = lastarchive.subfiles;
} else {
let err: Error | null = null;
try {
arch = await source.getFileArchive(fileid.index);
} catch (e) {
err = e;
arch = [];
}
lastarchive = { index: fileid.index, subfiles: arch, error: err };
}
let file = arch[fileid.subindex];
if (!file) {
output.log(`skipped ${mode.fileToLogical(source, fileid.index.major, fileid.index.minor, fileid.subindex).join(".")} due to error: ${lastarchive.error}`);
continue;
}
let logicalid = mode.fileToLogical(source, fileid.index.major, fileid.index.minor, file.fileid);
try {
var res = mode.read(file.buffer, logicalid, source);
if (res instanceof Promise) { res = await res; }
} catch (e) {
output.log(`file ${logicalid.join(".")}: ${e}`);
continue;
}
if (batchSubfile || batchMaxFiles != -1) {
let maxedbatchsize = currentBatch && batchMaxFiles != -1 && currentBatch.outputs.length >= batchMaxFiles;
let newarch = currentBatch && currentBatch.arch != arch
if (!currentBatch || maxedbatchsize || (batchSubfile && newarch)) {
let nextbatchchunknr = (newarch || !maxedbatchsize || !currentBatch ? 0 : currentBatch.batchchunknr + 1);
let p = flushbatch();
if (p) { await p; }
currentBatch = {
name: "",
startIndex: fileid.index,
arch,
outputs: [],
batchchunknr: nextbatchchunknr
};
}
currentBatch.outputs.push(res);
} else {
let filename = `${args.mode}${logicalid.length == 0 ? "" : "-" + logicalid.join("_")}.${mode.ext}`;
await outdir.writeFile(filename, res);
}
}
flushbatch();
}
if (args.edit) {
output.log("press enter to save edits");
await new Promise<any>(d => process.stdin.once('data', d));
let lastarchive: null | { index: CacheIndex, subfiles: SubFile[], error: Error | null } = null;
let archedited = () => {
if (!(source instanceof GameCacheLoader)) { throw new Error("can only do this on file source of type gamecacheloader"); }
if (lastarchive) {
console.log("writing archive", lastarchive.index.major, lastarchive.index.minor, "files", lastarchive.subfiles.length);
console.log(lastarchive.index);
// let arch = new Archive(lastarchive.subfiles.map(q => q.buffer));
// arch.forgecrc(lastarchive.index.uncompressed_crc, lastarchive.index.subindices.indexOf(3), 10);
// return source.writeFile(lastarchive.index.major, lastarchive.index.minor, arch.packSqlite());
return source.writeFileArchive(lastarchive.index.major, lastarchive.index.minor, lastarchive.subfiles.map(q => q.buffer));
}
}
for (let fileid of allfiles) {
let arch: SubFile[];
if (lastarchive && lastarchive.index == fileid.index) {
arch = lastarchive.subfiles;
} else {
await archedited();
arch = await source.getFileArchive(fileid.index);
lastarchive = { index: fileid.index, subfiles: arch, error: null };
}
let logicalid = mode.fileToLogical(source, fileid.index.major, fileid.index.minor, arch[fileid.subindex].fileid);
let newfile = await outdir.readFileBuffer(`${args.mode}-${logicalid.join("_")}.${mode.ext}`);
arch[fileid.subindex].buffer = await mode.write(newfile, logicalid, source);
}
await archedited();
}
output.log("done");
}
export async function writeCacheFiles(output: ScriptOutput, source: CacheFileSource, inputdir: ScriptFS | undefined, inputfiles: { name: string, file: Buffer }[]) {
let cachedmodes: Record<string, DecodeMode> = {};
let incompletearchs: Map<number, Map<number, { fetchsiblings: boolean, files: { subid: number, file: Buffer }[] }>> = new Map();
let getmode = async (str: string) => {
let mode = cachedmodes[str]
if (!mode) {
let modecontr = cacheFileDecodeModes[str as keyof typeof cacheFileDecodeModes];
if (!modecontr) { throw new Error(`cache decode mode "${str}" not recognized`); }
mode = cacheFileDecodeModes[str as keyof typeof cacheFileDecodeModes]({});
cachedmodes[str] = mode;
await mode.prepareWrite(source);
}
return mode;
}
let getarch = (major: number, minor: number, mode: DecodeMode, fetchsiblings = mode.usesArchieves) => {
let majormap = getOrInsert(incompletearchs, major, () => new Map());
let group = getOrInsert(majormap, minor, () => ({ fetchsiblings, files: [] }));
return group;
}
let processfile = async (filename: string, file: Buffer) => {
let singlematch = filename.match(/(^|\/)(\w+)-([\d_]+)\.(\w+)$/);
if (singlematch) {
let logicalid = singlematch[3].split(/_/g).map(q => +q);
let mode = await getmode(singlematch[2]);
let archid = mode.logicalToFile(source, logicalid);
let arch = getarch(archid.major, archid.minor, mode);
let buf = await mode.write(file, logicalid, source);
arch.files.push({ subid: archid.subid, file: buf });
return;
}
let batchjson = filename.match(/(^|\/)(\w+)-([\d_]+)\.batch\.json$/);
if (batchjson) {
let mode = await getmode(batchjson[2]);
let raw: { files: any[] } = JSON.parse(file.toString("utf-8"));
if (!mode.parser) { throw new Error(`batch files only supported for json based modes, mode ${batchjson[2]} does not have a json parser`); }
for (let file of raw.files) {
let archid = mode.logicalToFile(source, file.$fileid);
let arch = getarch(archid.major, archid.minor, mode);
let buf = mode.parser!.write(file, source.getDecodeArgs());
arch.files.push({ subid: archid.subid, file: buf });
}
return;
}
output.log("can't interpret file: " + filename);
}
let processdir = async (inputdir: ScriptFS, node: string) => {
let files = await inputdir.readDir(node);
let base = (node == "." ? "" : node + "/")
for (let file of files) {
//ignore dotfiles
if (file.name.match(/(^|\/)\.[^\/]*$/)) { continue; }
let filename = base + file.name;
if (file.kind == "file") { await processfile(filename, await inputdir.readFileBuffer(filename)); }
if (file.kind == "directory") { await processdir(inputdir, filename); }
}
}
if (inputdir) { await processdir(inputdir, "."); }
for (let file of inputfiles) {
await processfile(file.name, file.file);
}
for (let [major, majormap] of incompletearchs) {
let indexfile = await source.getCacheIndex(major);
for (let [minor, group] of majormap) {
let index = indexfile[minor] as CacheIndex | undefined;
let prevarch: SubFile[] = [];
if (index && group.fetchsiblings) {
prevarch = await source.getFileArchive(index);
}
let newfiles = group.files;
newfiles.sort((a, b) => a.subid - b.subid);
let p = 0, a = 0;
let fileids: number[] = [];
let files: Buffer[] = [];
while (true) {
let hasold = p < prevarch.length;
let hasnew = a < newfiles.length;
if (hasnew && (!hasold || newfiles[a].subid <= prevarch[p].fileid)) {
fileids.push(newfiles[a].subid);
files.push(newfiles[a].file);
if (hasold && prevarch[p].fileid == newfiles[a].subid) {
p++;
}
a++;
} else if (hasold) {
fileids.push(prevarch[p].fileid);
files.push(prevarch[p].buffer);
p++;
} else {
break;
}
}
let matches = true;
if (!index) {
output.log(`group ${major}.${minor} does not have an index entry, writing anyway`);
} else if (files.length != index.subindices.length) {
matches = false;
} else {
for (let a = 0; a < files.length; a++) {
if (fileids[a] != index.subindices[a]) {
matches = false;
}
}
}
if (!matches) {
throw new Error("tried to replace archive with different subfile ids, need to rewrite index file to make this work");
}
console.log("writing", major, minor, fileids);
await source.writeFileArchive(major, minor, files);
}
}
}
import { CacheFileSource, CacheIndex, SubFile } from "../cache";
import { GameCacheLoader } from "../cache/sqlite";
import { FileRange, getOrInsert } from "../utils";
import { ScriptFS, ScriptOutput } from "../scriptrunner";
import { cacheFileDecodeModes, DecodeMode, DecodeModeFactory } from "./filetypes";
export async function extractCacheFiles(output: ScriptOutput, outdir: ScriptFS, source: CacheFileSource, args: { batched: boolean, batchlimit: number, mode: string, files: FileRange[], edit: boolean, skipread: boolean }, decoderflags: Record<string, string>) {
let modeconstr: DecodeModeFactory = cacheFileDecodeModes[args.mode];
if (!modeconstr) { throw new Error("unknown mode"); }
let flags = { ...decoderflags };
if (args.batched || args.batchlimit != -1) { flags.batched = "true"; }
let mode = modeconstr(flags);
await mode.prepareDump(outdir, source);
let batchMaxFiles = args.batchlimit;
let batchSubfile = args.batched;
let ranges = args.files;
let allfiles = (await Promise.all(ranges.map(q => mode.logicalRangeToFiles(source, q.start, q.end))))
.flat()
.sort((a, b) => a.index.major != b.index.major ? a.index.major - b.index.major : a.index.minor != b.index.minor ? a.index.minor - b.index.minor : a.subindex - b.subindex);
if (!args.skipread) {
let lastarchive: null | { index: CacheIndex, subfiles: SubFile[], error: Error | null } = null;
let currentBatch: { name: string, startIndex: CacheIndex, arch: SubFile[], outputs: (string | Buffer)[], batchchunknr: number } | null = null;
let flushbatch = () => {
if (currentBatch) {
//return promise instead of async function so we only switch stacks when actually doing anything
return (async () => {
let filename = `${args.mode}-${currentBatch.startIndex.major}_${currentBatch.startIndex.minor}.batch`;
if (batchMaxFiles != -1) { filename += "." + currentBatch.batchchunknr; }
filename += `.${mode.ext}`;
outdir.writeFile(filename, mode.combineSubs(currentBatch.outputs));
currentBatch = null;
})();
}
}
for (let fileid of allfiles) {
if (output.state != "running") { break; }
let arch: SubFile[];
if (lastarchive && lastarchive.index == fileid.index) {
arch = lastarchive.subfiles;
} else {
let err: Error | null = null;
try {
arch = await source.getFileArchive(fileid.index);
} catch (e) {
err = e;
arch = [];
}
lastarchive = { index: fileid.index, subfiles: arch, error: err };
}
let file = arch[fileid.subindex];
if (!file) {
output.log(`skipped ${mode.fileToLogical(source, fileid.index.major, fileid.index.minor, fileid.subindex).join(".")} due to error: ${lastarchive.error}`);
continue;
}
let logicalid = mode.fileToLogical(source, fileid.index.major, fileid.index.minor, file.fileid);
try {
var res = mode.read(file.buffer, logicalid, source);
if (res instanceof Promise) { res = await res; }
} catch (e) {
output.log(`file ${logicalid.join(".")}: ${e}`);
continue;
}
if (batchSubfile || batchMaxFiles != -1) {
let maxedbatchsize = currentBatch && batchMaxFiles != -1 && currentBatch.outputs.length >= batchMaxFiles;
let newarch = currentBatch && currentBatch.arch != arch
if (!currentBatch || maxedbatchsize || (batchSubfile && newarch)) {
let nextbatchchunknr = (newarch || !maxedbatchsize || !currentBatch ? 0 : currentBatch.batchchunknr + 1);
let p = flushbatch();
if (p) { await p; }
currentBatch = {
name: "",
startIndex: fileid.index,
arch,
outputs: [],
batchchunknr: nextbatchchunknr
};
}
currentBatch.outputs.push(res);
} else {
let filename = `${args.mode}${logicalid.length == 0 ? "" : "-" + logicalid.join("_")}.${mode.ext}`;
await outdir.writeFile(filename, res);
}
}
flushbatch();
}
if (args.edit) {
output.log("press enter to save edits");
await new Promise<any>(d => process.stdin.once('data', d));
let lastarchive: null | { index: CacheIndex, subfiles: SubFile[], error: Error | null } = null;
let archedited = () => {
if (!(source instanceof GameCacheLoader)) { throw new Error("can only do this on file source of type gamecacheloader"); }
if (lastarchive) {
console.log("writing archive", lastarchive.index.major, lastarchive.index.minor, "files", lastarchive.subfiles.length);
console.log(lastarchive.index);
// let arch = new Archive(lastarchive.subfiles.map(q => q.buffer));
// arch.forgecrc(lastarchive.index.uncompressed_crc, lastarchive.index.subindices.indexOf(3), 10);
// return source.writeFile(lastarchive.index.major, lastarchive.index.minor, arch.packSqlite());
return source.writeFileArchive(lastarchive.index.major, lastarchive.index.minor, lastarchive.subfiles.map(q => q.buffer));
}
}
for (let fileid of allfiles) {
let arch: SubFile[];
if (lastarchive && lastarchive.index == fileid.index) {
arch = lastarchive.subfiles;
} else {
await archedited();
arch = await source.getFileArchive(fileid.index);
lastarchive = { index: fileid.index, subfiles: arch, error: null };
}
let logicalid = mode.fileToLogical(source, fileid.index.major, fileid.index.minor, arch[fileid.subindex].fileid);
let newfile = await outdir.readFileBuffer(`${args.mode}-${logicalid.join("_")}.${mode.ext}`);
arch[fileid.subindex].buffer = await mode.write(newfile, logicalid, source);
}
await archedited();
}
output.log("done");
}
export async function writeCacheFiles(output: ScriptOutput, source: CacheFileSource, inputdir: ScriptFS | undefined, inputfiles: { name: string, file: Buffer }[]) {
let cachedmodes: Record<string, DecodeMode> = {};
let incompletearchs: Map<number, Map<number, { fetchsiblings: boolean, files: { subid: number, file: Buffer }[] }>> = new Map();
let getmode = async (str: string) => {
let mode = cachedmodes[str]
if (!mode) {
let modecontr = cacheFileDecodeModes[str as keyof typeof cacheFileDecodeModes];
if (!modecontr) { throw new Error(`cache decode mode "${str}" not recognized`); }
mode = cacheFileDecodeModes[str as keyof typeof cacheFileDecodeModes]({});
cachedmodes[str] = mode;
await mode.prepareWrite(source);
}
return mode;
}
let getarch = (major: number, minor: number, mode: DecodeMode, fetchsiblings = mode.usesArchieves) => {
let majormap = getOrInsert(incompletearchs, major, () => new Map());
let group = getOrInsert(majormap, minor, () => ({ fetchsiblings, files: [] }));
return group;
}
let processfile = async (filename: string, file: Buffer) => {
let singlematch = filename.match(/(^|\/)(\w+)-([\d_]+)\.(\w+)$/);
if (singlematch) {
let logicalid = singlematch[3].split(/_/g).map(q => +q);
let mode = await getmode(singlematch[2]);
let archid = mode.logicalToFile(source, logicalid);
let arch = getarch(archid.major, archid.minor, mode);
let buf = await mode.write(file, logicalid, source);
arch.files.push({ subid: archid.subid, file: buf });
return;
}
let batchjson = filename.match(/(^|\/)(\w+)-([\d_]+)\.batch\.json$/);
if (batchjson) {
let mode = await getmode(batchjson[2]);
let raw: { files: any[] } = JSON.parse(file.toString("utf-8"));
if (!mode.parser) { throw new Error(`batch files only supported for json based modes, mode ${batchjson[2]} does not have a json parser`); }
for (let file of raw.files) {
let archid = mode.logicalToFile(source, file.$fileid);
let arch = getarch(archid.major, archid.minor, mode);
let buf = mode.parser!.write(file, source.getDecodeArgs());
arch.files.push({ subid: archid.subid, file: buf });
}
return;
}
output.log("can't interpret file: " + filename);
}
let processdir = async (inputdir: ScriptFS, node: string) => {
let files = await inputdir.readDir(node);
let base = (node == "." ? "" : node + "/")
for (let file of files) {
//ignore dotfiles
if (file.name.match(/(^|\/)\.[^\/]*$/)) { continue; }
let filename = base + file.name;
if (file.kind == "file") { await processfile(filename, await inputdir.readFileBuffer(filename)); }
if (file.kind == "directory") { await processdir(inputdir, filename); }
}
}
if (inputdir) { await processdir(inputdir, "."); }
for (let file of inputfiles) {
await processfile(file.name, file.file);
}
for (let [major, majormap] of incompletearchs) {
let indexfile = await source.getCacheIndex(major);
for (let [minor, group] of majormap) {
let index = indexfile[minor] as CacheIndex | undefined;
let prevarch: SubFile[] = [];
if (index && group.fetchsiblings) {
prevarch = await source.getFileArchive(index);
}
let newfiles = group.files;
newfiles.sort((a, b) => a.subid - b.subid);
let p = 0, a = 0;
let fileids: number[] = [];
let files: Buffer[] = [];
while (true) {
let hasold = p < prevarch.length;
let hasnew = a < newfiles.length;
if (hasnew && (!hasold || newfiles[a].subid <= prevarch[p].fileid)) {
fileids.push(newfiles[a].subid);
files.push(newfiles[a].file);
if (hasold && prevarch[p].fileid == newfiles[a].subid) {
p++;
}
a++;
} else if (hasold) {
fileids.push(prevarch[p].fileid);
files.push(prevarch[p].buffer);
p++;
} else {
break;
}
}
let matches = true;
if (!index) {
output.log(`group ${major}.${minor} does not have an index entry, writing anyway`);
} else if (files.length != index.subindices.length) {
matches = false;
} else {
for (let a = 0; a < files.length; a++) {
if (fileids[a] != index.subindices[a]) {
matches = false;
}
}
}
if (!matches) {
throw new Error("tried to replace archive with different subfile ids, need to rewrite index file to make this work");
}
console.log("writing", major, minor, fileids);
await source.writeFileArchive(major, minor, files);
}
}
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,146 +1,153 @@
import { parseSprite, spriteHash } from "../3d/sprite";
import { CacheFileSource } from "../cache";
import { cacheMajors } from "../constants";
import { pixelsToDataUrl, sliceImage } from "../imgutils";
import { parse } from "../opdecoder";
export type ParsedFontJson = {
characters: (FontCharacter | null)[],
median: number,
baseline: number,
maxascent: number,
maxdescent: number,
scale: number,
sheethash: number,
sheetwidth: number,
sheetheight: number,
sheet: string
}
export type FontCharacter = {
name: string,
x: number,
y: number,
width: number,
height: number,
bearingy: number,
hash: number
}
export async function loadFontMetrics(cache: CacheFileSource, buf: Buffer) {
let fontdata = parse.fontmetrics.read(buf, cache);
if (!fontdata.sprite) {
throw new Error("fontmetrics missing sprite data");
}
let sprite = await cache.getFileById(cacheMajors.sprites, fontdata.sprite.sourceid);
let imgs = parseSprite(sprite);
if (imgs.length != 1) {
throw new Error("fontmetrics sprite did not contain exactly 1 image");
}
let img = imgs[0];
if (img.fullwidth != fontdata.sprite.sheetwidth || img.fullheight != fontdata.sprite.sheetheight) {
throw new Error("fontmetrics sprite image dimensions do not match metadata");
}
let font: ParsedFontJson = {
characters: [],
median: fontdata.sprite.median,
baseline: fontdata.sprite.baseline,
maxascent: fontdata.sprite.maxascent,
maxdescent: fontdata.sprite.maxdescent,
scale: fontdata.sprite.scale,
sheethash: spriteHash(img.img),
sheetwidth: fontdata.sprite.sheetwidth,
sheetheight: fontdata.sprite.sheetheight,
sheet: await pixelsToDataUrl(img.img)
};
for (let i = 0; i < fontdata.sprite.positions.length; i++) {
let pos = fontdata.sprite.positions[i];
let size = fontdata.sprite.chars[i];
if (size.width === 0 || size.height === 0) {
font.characters.push(null);
continue;
}
let subimg = sliceImage(img.img, { x: pos.x, y: pos.y, width: size.width, height: size.height });
font.characters.push({
name: String.fromCharCode(i),
x: pos.x,
y: pos.y,
width: size.width,
height: size.height,
bearingy: size.bearingy,
hash: spriteHash(subimg)
});
}
return font;
}
export function measureFontText(font: ParsedFontJson, text: string) {
let width = 0;
let height = font.baseline + font.maxdescent;
let x = 0;
for (let i = 0; i < text.length; i++) {
if (text[i] == "\n") {
height += font.baseline;
x = 0;
continue;
}
let fontchar = font.characters[text.charCodeAt(i)];
if (fontchar) {
x += fontchar.width;
width = Math.max(width, x);
}
}
return { width, height };
}
export function fontTextCanvas(font: ParsedFontJson, sheet: HTMLImageElement, text: string, scale: number) {
let { width, height } = measureFontText(font, text);
let canvas = document.createElement("canvas");
canvas.width = Math.max(1, width * scale);
canvas.height = Math.max(1, height * scale);
let ctx = canvas.getContext("2d")!;
ctx.scale(scale, scale);
let x = 0;
let y = 0;
for (let i = 0; i < text.length; i++) {
if (text[i] == "\n") {
y += font.baseline;
x = 0;
continue;
}
let fontchar = font.characters[text.charCodeAt(i)];
if (fontchar) {
let dy = fontchar.bearingy;
ctx.drawImage(sheet, fontchar.x, fontchar.y, fontchar.width, fontchar.height, x, y + dy, fontchar.width, fontchar.height);
x += fontchar.width;
}
}
return canvas;
}
export function composeTexts(cnv: HTMLCanvasElement, color: string, shadow: boolean) {
let tmp = document.createElement("canvas");
tmp.width = cnv.width + (shadow ? 1 : 0);
tmp.height = cnv.height + (shadow ? 1 : 0);
// gotto do some sorcery to colorize the font while preserving alpha because canvas "multiply" messes with alpha
let ctx = tmp.getContext("2d")!;
ctx.fillStyle = color;
ctx.fillRect(0, 0, tmp.width, tmp.height);
ctx.globalCompositeOperation = "multiply";
ctx.drawImage(cnv, 0, 0);
ctx.globalCompositeOperation = "destination-in";
ctx.drawImage(cnv, 0, 0);
if (shadow) {
ctx.filter = "drop-shadow(1px 1px 0px black)";
ctx.globalCompositeOperation = "copy";
ctx.drawImage(tmp, 0, 0);
}
return tmp;
}
import { parseSprite, spriteHash } from "../3d/sprite";
import { CacheFileSource } from "../cache";
import { cacheMajors } from "../constants";
import { pixelsToDataUrl, sliceImage } from "../imgutils";
import { parse } from "../opdecoder";
export type FontCharacterJson = {
chr: string,
charcode: number,
x: number,
y: number,
width: number,
height: number,
bearingy: number,
hash: number
}
export type ParsedFontJson = {
fontid: number,
spriteid: number,
characters: (FontCharacterJson | null)[],
median: number,
baseline: number,
maxascent: number,
maxdescent: number,
scale: number,
sheethash: number,
sheetwidth: number,
sheetheight: number,
sheet: string
}
export async function loadFontMetrics(cache: CacheFileSource, buf: Buffer, fontid: number) {
let fontdata = parse.fontmetrics.read(buf, cache);
if (!fontdata.sprite) {
throw new Error("fontmetrics missing sprite data");
}
let sprite = await cache.getFileById(cacheMajors.sprites, fontdata.sprite.sourceid);
let imgs = parseSprite(sprite);
if (imgs.length != 1) {
throw new Error("fontmetrics sprite did not contain exactly 1 image");
}
let img = imgs[0];
if (img.fullwidth != fontdata.sprite.sheetwidth || img.fullheight != fontdata.sprite.sheetheight) {
throw new Error("fontmetrics sprite image dimensions do not match metadata");
}
let font: ParsedFontJson = {
fontid: fontid,
spriteid: fontdata.sprite.sourceid,
characters: [],
median: fontdata.sprite.median,
baseline: fontdata.sprite.baseline,
maxascent: fontdata.sprite.maxascent,
maxdescent: fontdata.sprite.maxdescent,
scale: fontdata.sprite.scale,
sheethash: spriteHash(img.img),
sheetwidth: fontdata.sprite.sheetwidth,
sheetheight: fontdata.sprite.sheetheight,
// sheet: await pixelsToDataUrl(img.img)
sheet: ""
};
for (let i = 0; i < fontdata.sprite.positions.length; i++) {
let pos = fontdata.sprite.positions[i];
let size = fontdata.sprite.chars[i];
if (size.width === 0 || size.height === 0) {
font.characters.push(null);
continue;
}
let subimg = sliceImage(img.img, { x: pos.x, y: pos.y, width: size.width, height: size.height });
font.characters.push({
chr: String.fromCharCode(i),
charcode: i,
x: pos.x,
y: pos.y,
width: size.width,
height: size.height,
bearingy: size.bearingy,
hash: spriteHash(subimg)
});
}
return font;
}
export function measureFontText(font: ParsedFontJson, text: string) {
let width = 0;
let height = font.baseline + font.maxdescent;
let x = 0;
for (let i = 0; i < text.length; i++) {
if (text[i] == "\n") {
height += font.baseline;
x = 0;
continue;
}
let fontchar = font.characters[text.charCodeAt(i)];
if (fontchar) {
x += fontchar.width;
width = Math.max(width, x);
}
}
return { width, height };
}
export function fontTextCanvas(font: ParsedFontJson, sheet: HTMLImageElement, text: string, scale: number) {
let { width, height } = measureFontText(font, text);
let canvas = document.createElement("canvas");
canvas.width = Math.max(1, width * scale);
canvas.height = Math.max(1, height * scale);
let ctx = canvas.getContext("2d")!;
ctx.scale(scale, scale);
let x = 0;
let y = 0;
for (let i = 0; i < text.length; i++) {
if (text[i] == "\n") {
y += font.baseline;
x = 0;
continue;
}
let fontchar = font.characters[text.charCodeAt(i)];
if (fontchar) {
let dy = fontchar.bearingy;
ctx.drawImage(sheet, fontchar.x, fontchar.y, fontchar.width, fontchar.height, x, y + dy, fontchar.width, fontchar.height);
x += fontchar.width;
}
}
return canvas;
}
export function composeTexts(cnv: HTMLCanvasElement, color: string, shadow: boolean) {
let tmp = document.createElement("canvas");
tmp.width = cnv.width + (shadow ? 1 : 0);
tmp.height = cnv.height + (shadow ? 1 : 0);
// gotto do some sorcery to colorize the font while preserving alpha because canvas "multiply" messes with alpha
let ctx = tmp.getContext("2d")!;
ctx.fillStyle = color;
ctx.fillRect(0, 0, tmp.width, tmp.height);
ctx.globalCompositeOperation = "multiply";
ctx.drawImage(cnv, 0, 0);
ctx.globalCompositeOperation = "destination-in";
ctx.drawImage(cnv, 0, 0);
if (shadow) {
ctx.filter = "drop-shadow(1px 1px 0px black)";
ctx.globalCompositeOperation = "copy";
ctx.drawImage(tmp, 0, 0);
}
return tmp;
}