mirror of
https://github.com/spacedriveapp/spacedrive.git
synced 2026-02-20 15:43:58 -05:00
* custom updater with toasts * new state management + updated router route * tauri-specific update route * ref * update in prod only * change 'Install' to 'Update' * fix tsconfig * desktop tauri * remove tauri patch * tauri 1.5 * tauri 1.5 * use tauri script * native-deps * Rework preprep and tauri script to better support tauri 1.5 * Update to tauri 1.5.1 - Update workspace and apps/desktop dependencies - Fix mustache import, @types/mustache is not compatible with ES imports - Replace arm64 with aarch64 in machineID, they should be treated the same and this simplyfies the code * Fix tauri updater not building due to missing key - Fix dmg background not being found - Generate an adhoc key for tauri updater with it is enabled and the user is doing a prod build * Fix ctrl+c/ctrl+v typo * Normalie @tanstack/react-query version through workspace - Use undici in scripts instead of global fetch - Fix typecheck * Fix linux prod and dev builds - Improve error handling in tauri.mjs * Normalize dev deps in workspace - Improve linux shared libs setup * Fix CI and server docker * Fix windows - Remove superfluous envvar * Attempt to fix server, mobile, deb and release updater * Attempt to fix deb and mobile again - Fix type on deb dependency - Enable release deb for aarch64-unknown-linux-gnu * Github doesn't have arm runners - Fix typo in server Dockerfile * Publish deb and updater artifacts * remove version from asset name * update commands * log release * Some logs on updater errors * show updater errors on frontend * fix desktop ui caching --------- Co-authored-by: Vítor Vasconcellos <vasconcellos.dev@gmail.com> Co-authored-by: Ericson Fogo Soares <ericson.ds999@gmail.com>
199 lines
5.1 KiB
JavaScript
199 lines
5.1 KiB
JavaScript
import * as fs from 'node:fs/promises'
|
|
import * as os from 'node:os'
|
|
import * as path from 'node:path'
|
|
import { env } from 'node:process'
|
|
|
|
import { extractTo } from 'archive-wasm/src/fs.mjs'
|
|
|
|
import {
|
|
FFMPEG_SUFFFIX,
|
|
FFMPEG_WORKFLOW,
|
|
getConst,
|
|
getSuffix,
|
|
LIBHEIF_SUFFIX,
|
|
LIBHEIF_WORKFLOW,
|
|
PDFIUM_SUFFIX,
|
|
PROTOC_SUFFIX,
|
|
} from './consts.mjs'
|
|
import {
|
|
getGh,
|
|
getGhArtifactContent,
|
|
getGhReleasesAssets,
|
|
getGhWorkflowRunArtifacts,
|
|
} from './github.mjs'
|
|
import { which } from './which.mjs'
|
|
|
|
const noop = () => {}
|
|
|
|
const __debug = env.NODE_ENV === 'debug'
|
|
const __osType = os.type()
|
|
|
|
// Github repos
|
|
const PDFIUM_REPO = 'bblanchon/pdfium-binaries'
|
|
const PROTOBUF_REPO = 'protocolbuffers/protobuf'
|
|
const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive'
|
|
|
|
/**
|
|
* Download and extract protobuff compiler binary
|
|
* @param {string[]} machineId
|
|
* @param {string} nativeDeps
|
|
*/
|
|
export async function downloadProtc(machineId, nativeDeps) {
|
|
if (await which('protoc')) return
|
|
|
|
console.log('Downloading protoc...')
|
|
|
|
const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId)
|
|
if (protocSuffix == null) throw new Error('NO_PROTOC')
|
|
|
|
let found = false
|
|
for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) {
|
|
if (!protocSuffix.test(release.name)) continue
|
|
try {
|
|
await extractTo(await getGh(release.downloadUrl), nativeDeps, {
|
|
chmod: 0o600,
|
|
overwrite: true,
|
|
})
|
|
found = true
|
|
break
|
|
} catch (error) {
|
|
console.warn('Failed to download protoc, re-trying...')
|
|
if (__debug) console.error(error)
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_PROTOC')
|
|
|
|
// cleanup
|
|
await fs.unlink(path.join(nativeDeps, 'readme.txt')).catch(__debug ? console.error : noop)
|
|
}
|
|
|
|
/**
|
|
* Download and extract pdfium library for generating PDFs thumbnails
|
|
* @param {string[]} machineId
|
|
* @param {string} nativeDeps
|
|
*/
|
|
export async function downloadPDFium(machineId, nativeDeps) {
|
|
console.log('Downloading pdfium...')
|
|
|
|
const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId)
|
|
if (pdfiumSuffix == null) throw new Error('NO_PDFIUM')
|
|
|
|
let found = false
|
|
for await (const release of getGhReleasesAssets(PDFIUM_REPO)) {
|
|
if (!pdfiumSuffix.test(release.name)) continue
|
|
try {
|
|
await extractTo(await getGh(release.downloadUrl), nativeDeps, {
|
|
chmod: 0o600,
|
|
overwrite: true,
|
|
})
|
|
found = true
|
|
break
|
|
} catch (error) {
|
|
console.warn('Failed to download pdfium, re-trying...')
|
|
if (__debug) console.error(error)
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_PDFIUM')
|
|
|
|
// cleanup
|
|
const cleanup = [
|
|
fs.rename(path.join(nativeDeps, 'LICENSE'), path.join(nativeDeps, 'LICENSE.pdfium')),
|
|
...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map(file =>
|
|
fs.unlink(path.join(nativeDeps, file)).catch(__debug ? console.error : noop)
|
|
),
|
|
]
|
|
|
|
switch (__osType) {
|
|
case 'Linux':
|
|
cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.so'), 0o750))
|
|
break
|
|
case 'Darwin':
|
|
cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.dylib'), 0o750))
|
|
break
|
|
}
|
|
|
|
await Promise.all(cleanup)
|
|
}
|
|
|
|
/**
|
|
* Download and extract ffmpeg libs for video thumbnails
|
|
* @param {string[]} machineId
|
|
* @param {string} nativeDeps
|
|
* @param {string[]} branches
|
|
*/
|
|
export async function downloadFFMpeg(machineId, nativeDeps, branches) {
|
|
const workflow = getConst(FFMPEG_WORKFLOW, machineId)
|
|
if (workflow == null) {
|
|
console.log('Checking FFMPeg...')
|
|
if (await which('ffmpeg')) {
|
|
// TODO: check ffmpeg version match what we need
|
|
return
|
|
} else {
|
|
throw new Error('NO_FFMPEG')
|
|
}
|
|
}
|
|
|
|
console.log('Downloading FFMPeg...')
|
|
|
|
const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId)
|
|
if (ffmpegSuffix == null) throw new Error('NO_FFMPEG')
|
|
|
|
let found = false
|
|
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
|
|
if (!ffmpegSuffix.test(artifact.name)) continue
|
|
try {
|
|
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id)
|
|
await extractTo(data, nativeDeps, {
|
|
chmod: 0o600,
|
|
recursive: true,
|
|
overwrite: true,
|
|
})
|
|
found = true
|
|
break
|
|
} catch (error) {
|
|
console.warn('Failed to download FFMpeg, re-trying...')
|
|
if (__debug) console.error(error)
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_FFMPEG')
|
|
}
|
|
|
|
/**
|
|
* Download and extract libheif libs for heif thumbnails
|
|
* @param {string[]} machineId
|
|
* @param {string} nativeDeps
|
|
* @param {string[]} branches
|
|
*/
|
|
export async function downloadLibHeif(machineId, nativeDeps, branches) {
|
|
const workflow = getConst(LIBHEIF_WORKFLOW, machineId)
|
|
if (workflow == null) return
|
|
|
|
console.log('Downloading LibHeif...')
|
|
|
|
const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId)
|
|
if (libHeifSuffix == null) throw new Error('NO_LIBHEIF')
|
|
|
|
let found = false
|
|
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
|
|
if (!libHeifSuffix.test(artifact.name)) continue
|
|
try {
|
|
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id)
|
|
await extractTo(data, nativeDeps, {
|
|
chmod: 0o600,
|
|
recursive: true,
|
|
overwrite: true,
|
|
})
|
|
found = true
|
|
break
|
|
} catch (error) {
|
|
console.warn('Failed to download LibHeif, re-trying...')
|
|
if (__debug) console.error(error)
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_LIBHEIF')
|
|
}
|