diff --git a/.cargo/config.toml.mustache b/.cargo/config.toml.mustache index 0ff5b5d11..42bb1b1b8 100644 --- a/.cargo/config.toml.mustache +++ b/.cargo/config.toml.mustache @@ -2,48 +2,48 @@ {{#protoc}} PROTOC = "{{{protoc}}}" {{/protoc}} -{{#ffmpeg}} -FFMPEG_DIR = "{{{ffmpeg}}}" -{{/ffmpeg}} +{{^isLinux}} +FFMPEG_DIR = "{{{nativeDeps}}}" +{{/isLinux}} {{#isMacOS}} [target.x86_64-apple-darwin] -rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"] +rustflags = ["-L", "{{{nativeDeps}}}/lib"] [target.x86_64-apple-darwin.heif] -rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] +rustc-link-search = ["{{{nativeDeps}}}/lib"] rustc-link-lib = ["heif"] [target.aarch64-apple-darwin] -rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"] +rustflags = ["-L", "{{{nativeDeps}}}/lib"] [target.aarch64-apple-darwin.heif] -rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] +rustc-link-search = ["{{{nativeDeps}}}/lib"] rustc-link-lib = ["heif"] {{/isMacOS}} {{#isWin}} [target.x86_64-pc-windows-msvc] -rustflags = ["-L", "{{{projectRoot}}}\\target\\Frameworks\\lib"] +rustflags = ["-L", "{{{nativeDeps}}}\\lib"] [target.x86_64-pc-windows-msvc.heif] -rustc-link-search = ["{{{projectRoot}}}\\target\\Frameworks\\lib"] +rustc-link-search = ["{{{nativeDeps}}}\\lib"] rustc-link-lib = ["heif"] {{/isWin}} {{#isLinux}} [target.x86_64-unknown-linux-gnu] -rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"] +rustflags = ["-L", "{{{nativeDeps}}}/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"] [target.x86_64-unknown-linux-gnu.heif] -rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] +rustc-link-search = ["{{{nativeDeps}}}/lib"] rustc-link-lib = ["heif"] [target.aarch64-unknown-linux-gnu] -rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"] +rustflags = ["-L", "{{{nativeDeps}}}/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"] [target.aarch64-unknown-linux-gnu.heif] -rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] +rustc-link-search = ["{{{nativeDeps}}}/lib"] rustc-link-lib = ["heif"] {{/isLinux}} diff --git a/.github/actions/publish-artifacts/action.yaml b/.github/actions/publish-artifacts/action.yaml index e6443a6e0..76a0ffeb2 100644 --- a/.github/actions/publish-artifacts/action.yaml +++ b/.github/actions/publish-artifacts/action.yaml @@ -23,14 +23,14 @@ runs: if-no-files-found: error retention-days: 1 - # - name: Publish artifacts (Debian - deb) - # if: ${{ matrix.settings.host == 'ubuntu-20.04' }} - # uses: actions/upload-artifact@v3 - # with: - # name: Spacedrive-deb-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }} - # path: target/${{ inputs.target }}/${{ inputs.profile }}/bundle/deb/*.deb - # if-no-files-found: error - # retention-days: 1 + - name: Publish artifacts (Debian - deb) + if: ${{ matrix.settings.host == 'ubuntu-20.04' }} + uses: actions/upload-artifact@v3 + with: + name: Spacedrive-deb-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }} + path: target/${{ inputs.target }}/${{ inputs.profile }}/bundle/deb/*.deb + if-no-files-found: error + retention-days: 1 - name: Publish artifacts (Windows - msi) if: ${{ matrix.settings.host == 'windows-latest' }} @@ -50,13 +50,13 @@ runs: if-no-files-found: error retention-days: 1 - # - name: Publish updater binaries - # uses: actions/upload-artifact@v3 - # with: - # name: Spacedrive-Updaters-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }} - # path: | - # target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.tar.gz* - # target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.zip* - # !target/**/deb/**/*.tar.gz - # if-no-files-found: error - # retention-days: 1 + - name: Publish updater binaries + uses: actions/upload-artifact@v3 + with: + name: Spacedrive-Updater-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }} + path: | + target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.tar.gz* + target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.zip* + !target/**/deb/**/*.tar.gz + if-no-files-found: error + retention-days: 1 diff --git a/.github/actions/setup-system/action.yml b/.github/actions/setup-system/action.yml index 806762a92..678fe3b12 100644 --- a/.github/actions/setup-system/action.yml +++ b/.github/actions/setup-system/action.yml @@ -64,7 +64,7 @@ runs: TARGET_TRIPLE: ${{ inputs.target }} GITHUB_TOKEN: ${{ inputs.token }} run: | - pushd .. - npm i archive-wasm mustache + pushd scripts + npm i --production popd node scripts/preprep.mjs diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f81a9415a..0f7670a89 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -24,11 +24,12 @@ jobs: # target: aarch64-pc-windows-msvc - host: ubuntu-20.04 target: x86_64-unknown-linux-gnu - bundles: appimage + bundles: appimage,deb # - host: ubuntu-20.04 # target: x86_64-unknown-linux-musl # - host: ubuntu-20.04 # target: aarch64-unknown-linux-gnu + # bundles: deb # no appimage for now unfortunetly # - host: ubuntu-20.04 # target: aarch64-unknown-linux-musl # - host: ubuntu-20.04 @@ -95,7 +96,7 @@ jobs: - name: Build run: | - pnpm tauri build --ci -v --target ${{ matrix.settings.target }} --bundles ${{ matrix.settings.bundles }} + pnpm tauri build --ci -v --target ${{ matrix.settings.target }} --bundles ${{ matrix.settings.bundles }},updater env: TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }} TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }} @@ -107,7 +108,6 @@ jobs: APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }} APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} - NODE_OPTIONS: --max-old-space-size=4096 - name: Publish Artifacts uses: ./.github/actions/publish-artifacts diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 3172ded67..e25eb4d03 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -61,7 +61,6 @@ jobs: build-args: | REPO=${{ steps.image_info.outputs.repo }} REPO_REF=${{ steps.image_info.outputs.repo_ref }} - NODE_OPTIONS: "--max-old-space-size=4096" containerfiles: | ./apps/server/docker/Dockerfile diff --git a/.gitignore b/.gitignore index ab416e845..9641838ee 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,7 @@ packages/*/node_modules packages/*/data apps/*/data apps/*/stats.html +apps/.deps apps/releases/.vscode apps/desktop/src-tauri/tauri.conf.patch.json apps/desktop/src-tauri/*.dll @@ -78,7 +79,6 @@ dev.db-journal sd_init.json spacedrive -scripts/.tmp .cargo/config .cargo/config.toml .github/scripts/deps diff --git a/Cargo.lock b/Cargo.lock index cbe9c1191..3f85c7d70 100644 Binary files a/Cargo.lock and b/Cargo.lock differ diff --git a/apps/desktop/package.json b/apps/desktop/package.json index a325ef541..332263e06 100644 --- a/apps/desktop/package.json +++ b/apps/desktop/package.json @@ -1,14 +1,11 @@ { "name": "@sd/desktop", - "version": "1.0.0", - "main": "index.ts", - "license": "GPL-3.0-only", "private": true, "scripts": { "vite": "vite", "dev": "vite dev", "build": "vite build", - "tauri": "node ./src-tauri/scripts/tauri.js", + "tauri": "pnpm --filter @sd/scripts -- tauri", "dmg": "open ../../target/release/bundle/dmg/", "typecheck": "tsc -b", "lint": "eslint src --cache" @@ -19,27 +16,26 @@ "@sd/client": "workspace:*", "@sd/interface": "workspace:*", "@sd/ui": "workspace:*", - "@sentry/vite-plugin": "^2.7.0", - "@tanstack/react-query": "^4.24.4", - "@tauri-apps/api": "1.3.0", + "@sentry/vite-plugin": "^2.7.1", + "@tanstack/react-query": "^4.35", + "@tauri-apps/api": "1.5.0", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-router-dom": "6.9.0" + "react-router-dom": "6.9.0", + "sonner": "^1.0.3" }, "devDependencies": { - "@iarna/toml": "^2.2.5", "@sd/config": "workspace:*", - "@tauri-apps/cli": "1.3.1", - "@types/babel__core": "^7.20.1", - "@types/react": "^18.0.21", - "@types/react-dom": "^18.0.6", - "@vitejs/plugin-react": "^2.1.0", - "sass": "^1.55.0", - "semver": "^7.5.0", - "typescript": "^5.0.4", - "vite": "^4.0.4", - "vite-plugin-html": "^3.2.0", - "vite-plugin-svgr": "^2.2.1", - "vite-tsconfig-paths": "^4.0.3" + "@tauri-apps/cli": "^1.5", + "@types/babel__core": "^7.20", + "@types/react": "^18.2.0", + "@types/react-dom": "^18.2.0", + "@vitejs/plugin-react": "^4.1", + "sass": "^1.68", + "typescript": "^5.2", + "vite": "^4.4", + "vite-plugin-html": "^3.2", + "vite-plugin-svgr": "^3.3", + "vite-tsconfig-paths": "^4.2" } } diff --git a/apps/desktop/src-tauri/Cargo.toml b/apps/desktop/src-tauri/Cargo.toml index e8231ad1f..fe296e8d4 100644 --- a/apps/desktop/src-tauri/Cargo.toml +++ b/apps/desktop/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sd-desktop" -version = "0.1.0" +version = "0.0.1" description = "The universal file manager." authors = ["Spacedrive Technology Inc."] default-run = "sd-desktop" @@ -9,7 +9,8 @@ repository = { workspace = true } edition = { workspace = true } [dependencies] -tauri = { version = "=1.3.0", features = ["dialog-all", "linux-protocol-headers", "macos-private-api", "os-all", "path-all", "protocol-all", "shell-all", "window-all"] } +tauri = { version = "1.5.1", features = ["dialog-all", "linux-protocol-headers", "macos-private-api", "os-all", "path-all", "protocol-all", "shell-all", "updater", "window-all"] } + rspc = { workspace = true, features = ["tauri"] } sd-core = { path = "../../../core", features = [ "ffmpeg", @@ -44,9 +45,8 @@ sd-desktop-macos = { path = "../crates/macos" } sd-desktop-windows = { path = "../crates/windows" } [build-dependencies] -tauri-build = { version = "1.4.0", features = [] } +tauri-build = { version = "1.5.0", features = [] } [features] default = ["custom-protocol"] custom-protocol = ["tauri/custom-protocol"] -updater = ["tauri/updater"] diff --git a/apps/desktop/src-tauri/scripts/spawn.js b/apps/desktop/src-tauri/scripts/spawn.js deleted file mode 100644 index b426b8625..000000000 --- a/apps/desktop/src-tauri/scripts/spawn.js +++ /dev/null @@ -1,30 +0,0 @@ -const { spawn } = require('node:child_process'); - -module.exports.spawn = (command, args) => { - if (typeof command !== 'string' || command.length === 0) - throw new Error('Command must be a string and not empty'); - - if (args == null) args = []; - else if (!Array.isArray(args) || args.some((arg) => typeof arg !== 'string')) - throw new Error('Args must be an array of strings'); - - return new Promise((resolve, reject) => { - const child = spawn(command, args, { shell: true, stdio: 'inherit' }); - process.on('SIGTERM', () => child.kill('SIGTERM')); - process.on('SIGINT', () => child.kill('SIGINT')); - process.on('SIGBREAK', () => child.kill('SIGBREAK')); - process.on('SIGHUP', () => child.kill('SIGHUP')); - child.on('error', (error) => { - console.error(error); - reject(1); - }); - child.on('exit', (code, signal) => { - if (code === null) code = signal === 'SIGINT' ? 0 : 1; - if (code === 0) { - resolve(); - } else { - reject(code); - } - }); - }); -}; diff --git a/apps/desktop/src-tauri/scripts/tauri.js b/apps/desktop/src-tauri/scripts/tauri.js deleted file mode 100644 index 8ead83e76..000000000 --- a/apps/desktop/src-tauri/scripts/tauri.js +++ /dev/null @@ -1,193 +0,0 @@ -const fs = require('node:fs'); -const path = require('node:path'); - -const toml = require('@iarna/toml'); -const semver = require('semver'); - -const { spawn } = require('./spawn.js'); - -const workspace = path.resolve(__dirname, '../../../../'); -const cargoConfig = toml.parse( - fs.readFileSync(path.resolve(workspace, '.cargo/config.toml'), { encoding: 'binary' }) -); -if (cargoConfig.env && typeof cargoConfig.env === 'object') - for (const [name, value] of Object.entries(cargoConfig.env)) - if (!process.env[name]) process.env[name] = value; - -const toRemove = []; -const [_, __, ...args] = process.argv; - -if (args.length === 0) args.push('build'); - -const tauriConf = JSON.parse( - fs.readFileSync(path.resolve(__dirname, '..', 'tauri.conf.json'), 'utf-8') -); - -const framework = path.join(workspace, 'target/Frameworks'); - -switch (args[0]) { - case 'dev': { - if (process.platform === 'win32') setupSharedLibs('dll', path.join(framework, 'bin'), true); - break; - } - case 'build': { - if ( - !process.env.NODE_OPTIONS || - !process.env.NODE_OPTIONS.includes('--max_old_space_size') - ) { - process.env.NODE_OPTIONS = `--max_old_space_size=4096 ${ - process.env.NODE_OPTIONS ?? '' - }`; - } - - if (args.findIndex((e) => e === '-c' || e === '--config') !== -1) { - throw new Error('Custom tauri build config is not supported.'); - } - - const targets = args - .filter((_, index, args) => { - if (index === 0) return false; - const previous = args[index - 1]; - return previous === '-t' || previous === '--target'; - }) - .flatMap((target) => target.split(',')); - - const tauriPatch = { - tauri: { bundle: { macOS: {}, resources: [] } } - }; - - switch (process.platform) { - case 'darwin': { - // ARM64 support was added in macOS 11, but we need at least 11.2 due to our ffmpeg build - let macOSMinimumVersion = tauriConf?.tauri?.bundle?.macOS?.minimumSystemVersion; - let macOSArm64MinimumVersion = '11.2'; - if ( - (targets.includes('aarch64-apple-darwin') || - (targets.length === 0 && process.arch === 'arm64')) && - (macOSMinimumVersion == null || - semver.lt( - semver.coerce(macOSMinimumVersion), - semver.coerce(macOSArm64MinimumVersion) - )) - ) { - macOSMinimumVersion = macOSArm64MinimumVersion; - console.log( - `aarch64-apple-darwin target detected, setting minimum system version to ${macOSMinimumVersion}` - ); - } - - if (macOSMinimumVersion) { - process.env.MACOSX_DEPLOYMENT_TARGET = macOSMinimumVersion; - tauriPatch.tauri.bundle.macOS.minimumSystemVersion = macOSMinimumVersion; - } - - // Point tauri to our ffmpeg framework - tauriPatch.tauri.bundle.macOS.frameworks = [ - path.join(workspace, 'target/Frameworks/FFMpeg.framework') - ]; - - // Configure DMG background - process.env.BACKGROUND_FILE = path.resolve(__dirname, '..', 'dmg-background.png'); - process.env.BACKGROUND_FILE_NAME = path.basename(process.env.BACKGROUND_FILE); - process.env.BACKGROUND_CLAUSE = `set background picture of opts to file ".background:${process.env.BACKGROUND_FILE_NAME}"`; - - if (!fs.existsSync(process.env.BACKGROUND_FILE)) - console.warn( - `WARNING: DMG background file not found at ${process.env.BACKGROUND_FILE}` - ); - - break; - } - case 'linux': - fs.rmSync(path.join(workspace, 'target/release/bundle/appimage'), { - recursive: true, - force: true - }); - // Point tauri to the ffmpeg DLLs - tauriPatch.tauri.bundle.resources.push( - ...setupSharedLibs('so', path.join(framework, 'lib')) - ); - break; - case 'win32': - // Point tauri to the ffmpeg DLLs - tauriPatch.tauri.bundle.resources.push( - ...setupSharedLibs('dll', path.join(framework, 'bin')) - ); - break; - } - - toRemove.push( - ...tauriPatch.tauri.bundle.resources.map((file) => - path.join(workspace, 'apps/desktop/src-tauri', file) - ) - ); - - const tauriPatchConf = path.resolve(__dirname, '..', 'tauri.conf.patch.json'); - fs.writeFileSync(tauriPatchConf, JSON.stringify(tauriPatch, null, 2)); - - toRemove.push(tauriPatchConf); - args.splice(1, 0, '-c', tauriPatchConf); - } -} - -process.on('SIGINT', () => { - for (const file of toRemove) - try { - fs.unlinkSync(file); - } catch (e) {} -}); - -let code = 0; -spawn('pnpm', ['exec', 'tauri', ...args]) - .catch((exitCode) => { - if (args[0] === 'build' || process.platform === 'linux') { - // Work around appimage buindling not working sometimes - appimageDir = path.join(workspace, 'target/release/bundle/appimage'); - appDir = path.join(appimageDir, 'spacedrive.AppDir'); - if ( - fs.existsSync(path.join(appimageDir, 'build_appimage.sh')) && - fs.existsSync(appDir) && - !fs.readdirSync(appimageDir).filter((file) => file.endsWith('.AppImage')).length - ) { - process.chdir(appimageDir); - fs.rmSync(appDir, { recursive: true, force: true }); - return spawn('bash', ['build_appimage.sh']).catch((exitCode) => { - code = exitCode; - console.error(`tauri ${args[0]} failed with exit code ${exitCode}`); - }); - } - } - - code = exitCode; - console.error(`tauri ${args[0]} failed with exit code ${exitCode}`); - console.error( - `If you got an error related to FFMpeg or Protoc/Protobuf you may need to re-run \`pnpm i\`` - ); - }) - .finally(() => { - for (const file of toRemove) - try { - fs.unlinkSync(file); - } catch (e) {} - - process.exit(code); - }); - -function setupSharedLibs(sufix, binDir, dev = false) { - const sharedLibs = fs - .readdirSync(binDir) - .filter((file) => file.endsWith(`.${sufix}`) || file.includes(`.${sufix}.`)); - - let targetDir = path.join(workspace, 'apps/desktop/src-tauri'); - if (dev) { - targetDir = path.join(workspace, 'target/debug'); - // Ensure the target/debug directory exists - fs.mkdirSync(targetDir, { recursive: true }); - } - - // Copy all shared libs to targetDir - for (const dll of sharedLibs) - fs.copyFileSync(path.join(binDir, dll), path.join(targetDir, dll)); - - return sharedLibs; -} diff --git a/apps/desktop/src-tauri/src/main.rs b/apps/desktop/src-tauri/src/main.rs index df11de836..d4ccbbb42 100644 --- a/apps/desktop/src-tauri/src/main.rs +++ b/apps/desktop/src-tauri/src/main.rs @@ -18,6 +18,7 @@ mod theme; mod file; mod menu; +mod updater; #[tauri::command(async)] #[specta::specta] @@ -133,9 +134,6 @@ async fn main() -> tauri::Result<()> { let app = app .setup(|app| { - #[cfg(feature = "updater")] - tauri::updater::builder(app.handle()).should_install(|_current, _latest| true); - let app = app.handle(); app.windows().iter().for_each(|(_, window)| { @@ -178,6 +176,7 @@ async fn main() -> tauri::Result<()> { }) .on_menu_event(menu::handle_menu_event) .menu(menu::get_menu()) + .manage(updater::State::default()) .invoke_handler(tauri_handlers![ app_ready, reset_spacedrive, @@ -189,7 +188,9 @@ async fn main() -> tauri::Result<()> { file::open_file_path_with, file::open_ephemeral_file_with, file::reveal_items, - theme::lock_app_theme + theme::lock_app_theme, + updater::check_for_update, + updater::install_update ]) .build(tauri::generate_context!())?; diff --git a/apps/desktop/src-tauri/src/updater.rs b/apps/desktop/src-tauri/src/updater.rs new file mode 100644 index 000000000..720cfeb06 --- /dev/null +++ b/apps/desktop/src-tauri/src/updater.rs @@ -0,0 +1,95 @@ +use tauri::Manager; +use tokio::sync::Mutex; +use tracing::{error, warn}; + +#[derive(Debug, Clone, specta::Type, serde::Serialize)] +pub struct Update { + pub version: String, + pub body: Option, +} + +impl Update { + fn new(update: &tauri::updater::UpdateResponse) -> Self { + Self { + version: update.latest_version().to_string(), + body: update.body().map(|b| b.to_string()), + } + } +} + +#[derive(Default)] +pub struct State { + install_lock: Mutex<()>, +} + +async fn get_update( + app: tauri::AppHandle, +) -> Result, String> { + tauri::updater::builder(app) + .header("X-Spacedrive-Version", "stable") + .map_err(|e| e.to_string())? + .check() + .await + .map_err(|e| e.to_string()) +} + +#[derive(Clone, serde::Serialize, specta::Type)] +#[serde(rename_all = "camelCase", tag = "status")] +pub enum UpdateEvent { + Loading, + Error(String), + UpdateAvailable { update: Update }, + NoUpdateAvailable, + Installing, +} + +#[tauri::command] +#[specta::specta] +pub async fn check_for_update(app: tauri::AppHandle) -> Result, String> { + app.emit_all("updater", UpdateEvent::Loading).ok(); + + let update = match get_update(app.clone()).await { + Ok(update) => update, + Err(e) => { + app.emit_all("updater", UpdateEvent::Error(e.clone())).ok(); + return Err(e); + } + }; + + let update = update.is_update_available().then(|| Update::new(&update)); + + app.emit_all( + "updater", + update + .clone() + .map(|update| UpdateEvent::UpdateAvailable { update }) + .unwrap_or(UpdateEvent::NoUpdateAvailable), + ) + .ok(); + + Ok(update) +} + +#[tauri::command] +#[specta::specta] +pub async fn install_update( + app: tauri::AppHandle, + state: tauri::State<'_, State>, +) -> Result<(), String> { + let lock = match state.install_lock.try_lock() { + Ok(lock) => lock, + Err(_) => return Err("Update already installing".into()), + }; + + app.emit_all("updater", UpdateEvent::Installing).ok(); + + get_update(app.clone()) + .await? + .download_and_install() + .await + .map_err(|e| e.to_string())?; + + drop(lock); + + Ok(()) +} diff --git a/apps/desktop/src-tauri/tauri.conf.json b/apps/desktop/src-tauri/tauri.conf.json index 52ae00ae0..fb792d00e 100644 --- a/apps/desktop/src-tauri/tauri.conf.json +++ b/apps/desktop/src-tauri/tauri.conf.json @@ -1,13 +1,12 @@ { "package": { - "productName": "Spacedrive", - "version": "0.1.0" + "productName": "Spacedrive" }, "build": { "distDir": "../dist", "devPath": "http://localhost:8001", "beforeDevCommand": "pnpm dev", - "beforeBuildCommand": "pnpm turbo run build --filter @sd/desktop" + "beforeBuildCommand": "pnpm turbo run build --filter=@sd/desktop..." }, "tauri": { "macOSPrivateApi": true, @@ -31,10 +30,16 @@ "shortDescription": "The universal file manager.", "longDescription": "A cross-platform universal file explorer, powered by an open-source virtual distributed filesystem.", "deb": { - "depends": [] + "depends": [ + "ffmpeg", + "gstreamer1.0-plugins-bad", + "gstreamer1.0-plugins-ugly", + "gstreamer1.0-gtk3", + "gstreamer1.0-libav" + ] }, "macOS": { - "frameworks": [], + "frameworks": ["../../.deps/FFMpeg.framework"], "minimumSystemVersion": "10.15", "exceptionDomain": "", "entitlements": null @@ -50,9 +55,12 @@ } }, "updater": { - "active": false, + "active": true, + "dialog": false, "pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEZBMURCMkU5NEU3NDAyOEMKUldTTUFuUk82YklkK296dlkxUGkrTXhCT3ZMNFFVOWROcXNaS0RqWU1kMUdRV2tDdFdIS0Y3YUsK", - "endpoints": ["https://spacedrive.com/api/releases/alpha/{{target}}/{{arch}}"] + "endpoints": [ + "https://spacedrive-landing-git-eng-927-fix-updater-spacedrive.vercel.app/api/releases/tauri/{{target}}/{{arch}}" + ] }, "allowlist": { "all": false, diff --git a/apps/desktop/src/App.tsx b/apps/desktop/src/App.tsx index b85c572fa..59025553d 100644 --- a/apps/desktop/src/App.tsx +++ b/apps/desktop/src/App.tsx @@ -22,6 +22,7 @@ import { getSpacedropState } from '@sd/interface/hooks/useSpacedropState'; import '@sd/ui/style'; import * as commands from './commands'; +import { updater, useUpdater } from './updater'; // TODO: Bring this back once upstream is fixed up. // const client = hooks.createClient({ @@ -57,7 +58,7 @@ if (customUriServerUrl && !customUriServerUrl?.endsWith('/')) { } const queryParams = customUriAuthToken ? `?token=${encodeURIComponent(customUriAuthToken)}` : ''; -const platform: Platform = { +const platform = { platform: 'tauri', getThumbnailUrlByThumbKey: (keyParts) => `${customUriServerUrl}thumbnail/${keyParts @@ -75,13 +76,14 @@ const platform: Platform = { showDevtools: () => invoke('show_devtools'), confirm: (msg, cb) => confirm(msg).then(cb), userHomeDir: homeDir, + updater, auth: { start(url) { open(url); } }, ...commands -}; +} satisfies Platform; const queryClient = new QueryClient({ defaultOptions: { @@ -119,6 +121,8 @@ export default function App() { }; }, []); + useUpdater(); + return ( diff --git a/apps/desktop/src/commands.ts b/apps/desktop/src/commands.ts index 11563f250..70485e77c 100644 --- a/apps/desktop/src/commands.ts +++ b/apps/desktop/src/commands.ts @@ -54,6 +54,15 @@ export function lockAppTheme(themeType: AppThemeType) { return invoke()("lock_app_theme", { themeType }) } +export function checkForUpdate() { + return invoke()("check_for_update") +} + +export function installUpdate() { + return invoke()("install_update") +} + +export type Update = { version: string; body: string | null } export type OpenWithApplication = { url: string; name: string } export type AppThemeType = "Auto" | "Light" | "Dark" export type EphemeralFileOpenResult = { t: "Ok"; c: string } | { t: "Err"; c: string } diff --git a/apps/desktop/src/index.html b/apps/desktop/src/index.html index 011a80c58..9e256ff06 100644 --- a/apps/desktop/src/index.html +++ b/apps/desktop/src/index.html @@ -1,13 +1,16 @@ - - - - - Spacedrive - - -
- - + + + + + + Spacedrive + + + +
+ + + diff --git a/apps/desktop/src/updater.tsx b/apps/desktop/src/updater.tsx new file mode 100644 index 000000000..800a99e58 --- /dev/null +++ b/apps/desktop/src/updater.tsx @@ -0,0 +1,87 @@ +import { listen } from '@tauri-apps/api/event'; +import { useEffect, useRef } from 'react'; +import { proxy, useSnapshot } from 'valtio'; +import { UpdateStore } from '@sd/interface'; +import { toast, ToastId } from '@sd/ui'; + +import * as commands from './commands'; + +export const updateStore = proxy({ + status: 'idle' +}); + +listen('updater', (e) => { + Object.assign(updateStore, e.payload); + console.log(updateStore); +}); + +const onInstallCallbacks = new Set<() => void>(); + +export const updater = { + useSnapshot: () => useSnapshot(updateStore), + checkForUpdate: commands.checkForUpdate, + installUpdate: () => { + for (const cb of onInstallCallbacks) { + cb(); + } + + const promise = commands.installUpdate(); + + toast.promise(promise, { + loading: 'Downloading Update', + success: 'Update Downloaded. Restart Spacedrive to install', + error: (e: any) => ( + <> +

Failed to download update

+

Error: {e.toString()}

+ + ) + }); + + return promise; + } +}; + +async function checkForUpdate() { + const update = await updater.checkForUpdate(); + + if (!update) return; + + let id: ToastId | null = null; + + const cb = () => { + if (id !== null) toast.dismiss(id); + }; + + onInstallCallbacks.add(cb); + + toast.info( + (_id) => { + id = _id; + + return { + title: 'New Update Available', + body: `Version ${update.version}` + }; + }, + { + onClose() { + onInstallCallbacks.delete(cb); + }, + duration: 10 * 1000, + action: { + label: 'Update', + onClick: () => updater.installUpdate() + } + } + ); +} + +export function useUpdater() { + const alreadyChecked = useRef(false); + + useEffect(() => { + if (!alreadyChecked.current && import.meta.env.PROD) checkForUpdate(); + alreadyChecked.current = true; + }, []); +} diff --git a/apps/mobile/modules/sd-core/ios/build-rust.sh b/apps/mobile/modules/sd-core/ios/build-rust.sh index 2e57e09b2..ae697c85a 100755 --- a/apps/mobile/modules/sd-core/ios/build-rust.sh +++ b/apps/mobile/modules/sd-core/ios/build-rust.sh @@ -1,40 +1,49 @@ -#!/usr/bin/env zsh +#!/usr/bin/env sh -set -e +set -eu -echo "Building \'sd-mobile-ios\' library..." +if [ "${CI:-}" = "true" ]; then + set -x +fi + +if [ -z "${HOME:-}" ]; then + HOME="$(CDPATH='' cd -- "$(osascript -e 'set output to (POSIX path of (path to home folder))')" && pwd)" + export HOME +fi + +echo "Building 'sd-mobile-ios' library..." __dirname="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd)" -TARGET_DIRECTORY="$(CDPATH='' cd -- "${__dirname}/../../../../../target" && pwd)" -if [[ $CONFIGURATION != "Debug" ]]; then +# Ensure target dir exists +TARGET_DIRECTORY="${__dirname}/../../../../../target" +mkdir -p "$TARGET_DIRECTORY" +TARGET_DIRECTORY="$(CDPATH='' cd -- "$TARGET_DIRECTORY" && pwd)" + +if [ "${CONFIGURATION:-}" != "Debug" ]; then CARGO_FLAGS=--release export CARGO_FLAGS fi -export PROTOC="${TARGET_DIRECTORY}/Frameworks/bin/protoc" - # TODO: Also do this for non-Apple Silicon Macs -if [[ $SPACEDRIVE_CI == "1" ]]; then +if [ "${SPACEDRIVE_CI:-}" = "1" ]; then # Required for CI - export PATH="$HOME/.cargo/bin:$PATH" + export PATH="${CARGO_HOME:-"${HOME}/.cargo"}/bin:$PATH" cargo build -p sd-mobile-ios --target x86_64-apple-ios - if [[ $PLATFORM_NAME = "iphonesimulator" ]] - then - lipo -create -output $TARGET_DIRECTORY/libsd_mobile_iossim.a $TARGET_DIRECTORY/x86_64-apple-ios/debug/libsd_mobile_ios.a + if [ "${PLATFORM_NAME:-}" = "iphonesimulator" ]; then + lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_iossim.a "$TARGET_DIRECTORY"/x86_64-apple-ios/debug/libsd_mobile_ios.a else - lipo -create -output $TARGET_DIRECTORY/libsd_mobile_ios.a $TARGET_DIRECTORY/x86_64-apple-ios/debug/libsd_mobile_ios.a + lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_ios.a "$TARGET_DIRECTORY"/x86_64-apple-ios/debug/libsd_mobile_ios.a fi exit 0 fi -if [[ $PLATFORM_NAME = "iphonesimulator" ]] -then - cargo build -p sd-mobile-ios --target aarch64-apple-ios-sim - lipo -create -output $TARGET_DIRECTORY/libsd_mobile_iossim.a $TARGET_DIRECTORY/aarch64-apple-ios-sim/debug/libsd_mobile_ios.a +if [ "${PLATFORM_NAME:-}" = "iphonesimulator" ]; then + cargo build -p sd-mobile-ios --target aarch64-apple-ios-sim + lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_iossim.a "$TARGET_DIRECTORY"/aarch64-apple-ios-sim/debug/libsd_mobile_ios.a else - cargo build -p sd-mobile-ios --target aarch64-apple-ios - lipo -create -output $TARGET_DIRECTORY/libsd_mobile_ios.a $TARGET_DIRECTORY/aarch64-apple-ios/debug/libsd_mobile_ios.a + cargo build -p sd-mobile-ios --target aarch64-apple-ios + lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_ios.a "$TARGET_DIRECTORY"/aarch64-apple-ios/debug/libsd_mobile_ios.a fi diff --git a/apps/mobile/package.json b/apps/mobile/package.json index 18ab0e7c9..1314251b8 100644 --- a/apps/mobile/package.json +++ b/apps/mobile/package.json @@ -28,7 +28,7 @@ "@sd/assets": "workspace:*", "@sd/client": "workspace:*", "@shopify/flash-list": "1.5.0", - "@tanstack/react-query": "^4.29.1", + "@tanstack/react-query": "^4.35", "class-variance-authority": "^0.5.3", "dayjs": "^1.11.8", "event-target-polyfill": "^0.0.3", @@ -42,7 +42,7 @@ "lottie-react-native": "6.2.0", "moti": "^0.26.0", "phosphor-react-native": "^1.1.2", - "react": "18.2.0", + "react": "^18.2.0", "react-hook-form": "~7.45.2", "react-native": "0.72.4", "react-native-document-picker": "^9.0.1", @@ -61,13 +61,13 @@ "zod": "~3.22.2" }, "devDependencies": { - "@babel/core": "^7.22.11", + "@babel/core": "~7", "@rnx-kit/metro-config": "^1.3.8", "@sd/config": "workspace:*", - "@types/react": "~18.0.38", + "@types/react": "^18.2.0", "babel-plugin-module-resolver": "^5.0.0", "eslint-plugin-react-native": "^4.0.0", "react-native-svg-transformer": "^1.1.0", - "typescript": "^5.1.3" + "typescript": "^5.2" } } diff --git a/apps/server/docker/Dockerfile b/apps/server/docker/Dockerfile index 7e7b97290..9baaba88d 100644 --- a/apps/server/docker/Dockerfile +++ b/apps/server/docker/Dockerfile @@ -67,6 +67,8 @@ ENV PATH="/root/.cargo/bin:$PATH" RUN --mount=type=cache,target=/var/cache/apt --mount=type=cache,target=/var/lib/apt \ env CI=true ./scripts/setup.sh +RUN cd ./scripts; npm i --production + RUN --mount=type=cache,target=/root/.cache/prisma/binaries/cli/ \ pnpm prep @@ -89,11 +91,12 @@ ENV TZ=UTC \ # Note: This needs to happen before the apt call to avoid locking issues with the previous step COPY --from=server /srv/spacedrive/target/release/sd-server /usr/bin/ +COPY --from=server /srv/spacedrive/apps/.deps/lib /usr/lib/spacedrive RUN --mount=type=cache,target=/var/cache/apt --mount=type=cache,target=/var/lib/apt \ apt-get install \ libavdevice59 libpostproc56 libswscale6 libswresample4 libavformat59 libavutil57 libavfilter8 \ - libavcodec59 libheif1 + libavcodec59 COPY --chmod=755 entrypoint.sh /usr/bin/ diff --git a/apps/storybook/package.json b/apps/storybook/package.json index bebfde3da..827fe29c2 100644 --- a/apps/storybook/package.json +++ b/apps/storybook/package.json @@ -21,15 +21,15 @@ "devDependencies": { "@sd/config": "workspace:*", "@sd/ui": "workspace:*", - "@types/react": "^18.0.28", - "@types/react-dom": "^18.0.11", - "@vitejs/plugin-react": "^3.1.0", + "@types/react": "^18.2.0", + "@types/react-dom": "^18.2.0", + "@vitejs/plugin-react": "^4.1", "autoprefixer": "^10.4.12", - "postcss": "^8.4.17", + "postcss": "^8.4", "prop-types": "^15.8.1", "storybook": "^7.0.5", "tailwindcss": "^3.3.2", - "typescript": "^5.0.4", - "vite": "^4.2.0" + "typescript": "^5.2", + "vite": "^4.4" } } diff --git a/apps/web/package.json b/apps/web/package.json index ac786e8f0..51ab39003 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -14,7 +14,7 @@ "@rspc/client": "=0.0.0-main-799eec5d", "@sd/client": "workspace:*", "@sd/interface": "workspace:*", - "@tanstack/react-query": "^4.12.0", + "@tanstack/react-query": "^4.35", "react": "^18.2.0", "react-dom": "^18.2.0", "react-router-dom": "6.9.0" @@ -23,16 +23,16 @@ "@playwright/test": "^1.30.0", "@sd/config": "workspace:*", "@sd/ui": "workspace:*", - "@types/react": "^18.0.21", - "@types/react-dom": "^18.0.6", - "@vitejs/plugin-react": "^2.1.0", + "@types/react": "^18.2.0", + "@types/react-dom": "^18.2.0", + "@vitejs/plugin-react": "^4.1", "autoprefixer": "^10.4.12", - "postcss": "^8.4.17", + "postcss": "^8.4", "rollup-plugin-visualizer": "^5.9.0", - "typescript": "^5.0.4", - "vite": "^4.0.4", - "vite-plugin-html": "^3.2.0", - "vite-plugin-svgr": "^2.2.1", - "vite-tsconfig-paths": "^4.0.3" + "typescript": "^5.2", + "vite": "^4.4", + "vite-plugin-html": "^3.2", + "vite-plugin-svgr": "^3.3", + "vite-tsconfig-paths": "^4.2" } } diff --git a/crates/sync/example/web/package.json b/crates/sync/example/web/package.json index e01ebc072..e219e9a5f 100644 --- a/crates/sync/example/web/package.json +++ b/crates/sync/example/web/package.json @@ -10,10 +10,10 @@ }, "license": "MIT", "devDependencies": { - "@tanstack/react-query": "^4.10.1", - "@vitejs/plugin-react": "^2.1.0", - "typescript": "^4.8.2", - "vite": "^4.0.4" + "@tanstack/react-query": "^4.35", + "@vitejs/plugin-react": "^4.1", + "typescript": "^5.2", + "vite": "^4.4" }, "dependencies": { "clsx": "^1.2.1", diff --git a/interface/app/$libraryId/Layout/Sidebar/Footer.tsx b/interface/app/$libraryId/Layout/Sidebar/Footer.tsx index 8ac8ffcd1..c04caa0cb 100644 --- a/interface/app/$libraryId/Layout/Sidebar/Footer.tsx +++ b/interface/app/$libraryId/Layout/Sidebar/Footer.tsx @@ -4,6 +4,7 @@ import { useKeys } from 'rooks'; import { JobManagerContextProvider, useClientContext, useDebugState } from '@sd/client'; import { Button, ButtonLink, dialogManager, modifierSymbols, Popover, Tooltip } from '@sd/ui'; import { useKeyMatcher } from '~/hooks'; +import { usePlatform } from '~/util/Platform'; import DebugPopover from './DebugPopover'; import FeedbackDialog from './FeedbackDialog'; @@ -20,8 +21,24 @@ export default () => { navigate('settings/client/general'); }); + const updater = usePlatform().updater; + const updaterState = updater?.useSnapshot(); + return (
+ {updater && updaterState && ( + <> + {updaterState.status === 'updateAvailable' && ( + + )} + + )}
{ const libraries = useCachedLibraries(); diff --git a/interface/package.json b/interface/package.json index dff5a5094..e290e9515 100644 --- a/interface/package.json +++ b/interface/package.json @@ -1,16 +1,8 @@ { "name": "@sd/interface", - "version": "1.0.0", - "license": "GPL-3.0-only", "private": true, "main": "index.tsx", "types": "index.tsx", - "exports": { - ".": "./index.tsx", - "./assets/*": "./assets/*", - "./components/*": "./components/*", - "./hooks/*": "./hooks/*" - }, "scripts": { "lint": "eslint . --cache", "typecheck": "tsc -b" @@ -19,6 +11,7 @@ "@fontsource/inter": "^4.5.13", "@headlessui/react": "^1.7.3", "@icons-pack/react-simple-icons": "^7.2.0", + "@phosphor-icons/react": "^2.0.10", "@radix-ui/react-progress": "^1.0.1", "@radix-ui/react-slider": "^1.1.0", "@radix-ui/react-toast": "^1.1.2", @@ -31,13 +24,10 @@ "@splinetool/react-spline": "^2.2.3", "@splinetool/runtime": "^0.9.128", "@tailwindcss/forms": "^0.5.3", - "@tanstack/react-query": "^4.12.0", - "@tanstack/react-query-devtools": "^4.22.0", + "@tanstack/react-query": "^4.35", + "@tanstack/react-query-devtools": "^4.35", "@tanstack/react-table": "^8.8.5", "@tanstack/react-virtual": "3.0.0-beta.61", - "@types/react-scroll-sync": "^0.8.4", - "@types/uuid": "^9.0.2", - "@vitejs/plugin-react": "^2.1.0", "autoprefixer": "^10.4.12", "class-variance-authority": "^0.5.3", "clsx": "^1.2.1", @@ -45,7 +35,6 @@ "dayjs": "^1.11.8", "dragselect": "^2.7.4", "framer-motion": "^10.11.5", - "@phosphor-icons/react": "^2.0.10", "prismjs": "^1.29.0", "react": "^18.2.0", "react-colorful": "^5.6.1", @@ -76,15 +65,16 @@ }, "devDependencies": { "@sd/config": "workspace:*", - "@types/babel__core": "^7.20.1", + "@types/babel__core": "^7.20", "@types/loadable__component": "^5.13.4", - "@types/node": "^18.11.9", - "@types/react": "^18.0.21", - "@types/react-dom": "^18.0.6", + "@types/node": "^18.17", + "@types/react": "^18.2.0", + "@types/react-dom": "^18.2.0", "@types/react-router-dom": "^5.3.3", - "@vitejs/plugin-react": "^1.3.1", - "typescript": "5.0.4", - "vite": "^4.0.4", - "vite-plugin-svgr": "^2.2.1" + "@types/uuid": "^9.0.2", + "@vitejs/plugin-react": "^4.1", + "typescript": "^5.2", + "vite": "^4.4", + "vite-plugin-svgr": "^3.3" } } diff --git a/interface/tsconfig.json b/interface/tsconfig.json index 3ad612626..30a4e12ed 100644 --- a/interface/tsconfig.json +++ b/interface/tsconfig.json @@ -1,20 +1,13 @@ { "extends": "../packages/config/base.tsconfig.json", "compilerOptions": { - "declarationDir": "dist", "paths": { "~/*": ["./*"] }, - "types": ["vite-plugin-svgr/client", "vite/client", "node"] + "types": ["vite-plugin-svgr/client", "vite/client", "node"], + "declarationDir": "dist" }, - "include": ["./**/*"], + "include": ["**/*"], "exclude": ["dist"], - "references": [ - { - "path": "../packages/ui" - }, - { - "path": "../packages/client" - } - ] + "references": [{ "path": "../packages/ui" }, { "path": "../packages/client" }] } diff --git a/interface/util/Platform.tsx b/interface/util/Platform.tsx index c12b6ccb1..35814d5d7 100644 --- a/interface/util/Platform.tsx +++ b/interface/util/Platform.tsx @@ -37,9 +37,23 @@ export type Platform = { openFilePathWith?(library: string, fileIdsAndAppUrls: [number, string][]): Promise; openEphemeralFileWith?(pathsAndUrls: [string, string][]): Promise; lockAppTheme?(themeType: 'Auto' | 'Light' | 'Dark'): any; + updater?: { + useSnapshot: () => UpdateStore; + checkForUpdate(): Promise; + installUpdate(): Promise; + }; auth: auth.ProviderConfig; }; +export type Update = { version: string; body: string | null }; +export type UpdateStore = + | { status: 'idle' } + | { status: 'loading' } + | { status: 'error' } + | { status: 'updateAvailable'; update: Update } + | { status: 'noUpdateAvailable' } + | { status: 'installing' }; + // Keep this private and use through helpers below const context = createContext(undefined!); diff --git a/package.json b/package.json index 913707efe..e94c9906c 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "private": true, "scripts": { - "preprep": "pnpm exec node scripts/preprep.mjs", + "preprep": "pnpm --filter @sd/scripts -- prep", "prep": "pnpm gen:prisma", "postprep": "pnpm codegen", "build": "turbo run build", @@ -27,7 +27,7 @@ "typecheck": "pnpm -r typecheck", "lint": "turbo run lint", "lint:fix": "turbo run lint -- --fix", - "clean": "rimraf -g \"node_modules/\" \"**/node_modules/\" \"target/\" \"**/.build/\" \"**/.next/\" \"**/dist/!(.gitignore)**\" \"**/tsconfig.tsbuildinfo\"" + "clean": "git clean -qfX ." }, "pnpm": { "overrides": { @@ -35,25 +35,22 @@ } }, "devDependencies": { - "@babel/plugin-syntax-import-assertions": "^7.22.5", - "@cspell/dict-rust": "^2.0.1", - "@cspell/dict-typescript": "^2.0.2", + "@babel/plugin-syntax-import-assertions": "~7", + "@cspell/dict-rust": "^4.0.1", + "@cspell/dict-typescript": "^3.1.2", "@ianvs/prettier-plugin-sort-imports": "^4.1.0", - "@storybook/react-vite": "^7.0.20", - "archive-wasm": "^1.5.3", - "cspell": "^6.31.1", - "mustache": "^4.2.0", + "@storybook/react-vite": "^7.4.6", + "cspell": "^7.3.7", "prettier": "^3.0.3", - "prettier-plugin-tailwindcss": "^0.5.3", - "rimraf": "^4.4.1", - "turbo": "^1.10.2", - "turbo-ignore": "^0.3.0", - "typescript": "^5.0.4", - "vite": "^4.3.9" + "prettier-plugin-tailwindcss": "^0.5.5", + "turbo": "^1.10.14", + "turbo-ignore": "^1.10.14", + "typescript": "^5.2", + "vite": "^4.4" }, "overrides": { "vite-plugin-svgr": "https://github.com/spacedriveapp/vite-plugin-svgr#cb4195b69849429cdb18d1f12381676bf9196a84", - "@types/node": "^18.0.0" + "@types/node": "^18.17" }, "engines": { "pnpm": ">=8.0.0", @@ -62,23 +59,6 @@ "node": ">=18.17 <19 || >=20.1" }, "eslintConfig": { - "root": true, - "overrides": [ - { - "files": [ - "*.mjs" - ], - "env": { - "node": true, - "es2022": true, - "browser": false, - "commonjs": false, - "shared-node-browser": false - }, - "parserOptions": { - "sourceType": "module" - } - } - ] + "root": true } } diff --git a/packages/client/package.json b/packages/client/package.json index ca98cbcb2..a5bf246a7 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -1,11 +1,8 @@ { "name": "@sd/client", - "version": "0.0.0", "private": true, "main": "./src/index.ts", - "files": [ - "dist/**" - ], + "types": "./src/index.ts", "scripts": { "test": "jest", "lint": "eslint src --cache", @@ -16,7 +13,7 @@ "@rspc/client": "=0.0.0-main-799eec5d", "@rspc/react": "=0.0.0-main-799eec5d", "@sd/config": "workspace:*", - "@tanstack/react-query": "^4.12.0", + "@tanstack/react-query": "^4.35", "@zxcvbn-ts/core": "^2.1.0", "@zxcvbn-ts/language-common": "^2.0.1", "@zxcvbn-ts/language-en": "^2.1.0", @@ -29,7 +26,7 @@ "@types/react": "^18.0.21", "scripts": "*", "tsconfig": "*", - "typescript": "^5.0.4" + "typescript": "^5.2" }, "peerDependencies": { "react": "^18.2.0" diff --git a/packages/client/tsconfig.json b/packages/client/tsconfig.json index 8e080d56b..41d393a37 100644 --- a/packages/client/tsconfig.json +++ b/packages/client/tsconfig.json @@ -2,7 +2,8 @@ "extends": "../config/base.tsconfig.json", "compilerOptions": { "rootDir": "src", - "declarationDir": "dist" + "outDir": "./dist", + "emitDeclarationOnly": false }, "include": ["src"] } diff --git a/packages/config/package.json b/packages/config/package.json index a1f9a78d2..2438f6ec6 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -11,19 +11,19 @@ "lint": "eslint . --cache" }, "devDependencies": { - "@typescript-eslint/eslint-plugin": "^5.59.6", - "@typescript-eslint/parser": "^5.59.6", - "eslint": "^8.41.0", + "@typescript-eslint/eslint-plugin": "^6.7", + "@typescript-eslint/parser": "^6.7", + "eslint": "^8.50", "eslint-config-next": "13.3.0", - "eslint-config-prettier": "^8.8.0", + "eslint-config-prettier": "^9.0", "eslint-config-turbo": "^1.9.8", - "eslint-plugin-prettier": "^4.2.1", + "eslint-plugin-prettier": "^5.0", "eslint-plugin-react": "^7.32.2", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-tailwindcss": "^3.12.0", "eslint-utils": "^3.0.0", "regexpp": "^3.2.0", - "vite-plugin-html": "^3.2.0", - "vite-plugin-svgr": "^2.2.1" + "vite-plugin-html": "^3.2", + "vite-plugin-svgr": "^3.3" } } diff --git a/packages/ui/package.json b/packages/ui/package.json index 4466a2193..8955b308d 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -36,7 +36,6 @@ "class-variance-authority": "^0.5.3", "clsx": "^1.2.1", "@phosphor-icons/react": "^2.0.10", - "postcss": "^8.4.17", "react": "^18.2.0", "react-dom": "^18.2.0", "react-loading-icons": "^1.1.0", @@ -48,20 +47,18 @@ "zod": "~3.22.2" }, "devDependencies": { - "@babel/core": "^7.22.11", + "@babel/core": "~7", "@sd/config": "workspace:*", "@storybook/types": "^7.0.24", "@tailwindcss/typography": "^0.5.7", - "@types/node": "^18.15.1", - "@types/react": "^18.0.21", - "@types/react-dom": "^18.0.6", + "@types/node": "^18.17", + "@types/react": "^18.2.0", + "@types/react-dom": "^18.2.0", "autoprefixer": "^10.4.12", - "babel-loader": "^8.2.5", - "sass": "^1.55.0", - "sass-loader": "^13.0.2", - "style-loader": "^3.3.1", + "sass": "^1.68", + "postcss": "^8.4", "tailwindcss": "^3.3.2", "tailwindcss-animate": "^1.0.5", - "typescript": "5.0.4" + "typescript": "^5.2" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 869883e46..86fc67bda 100644 Binary files a/pnpm-lock.yaml and b/pnpm-lock.yaml differ diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 0656b5b95..15e02b506 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -5,3 +5,4 @@ packages: - 'interface' - 'docs' - 'crates/sync/example/web' + - 'scripts' diff --git a/scripts/.eslintrc.cjs b/scripts/.eslintrc.cjs new file mode 100644 index 000000000..f567f53f3 --- /dev/null +++ b/scripts/.eslintrc.cjs @@ -0,0 +1,70 @@ +module.exports = { + root: true, + env: { + node: true, + es2022: true, + browser: false, + commonjs: false, + 'shared-node-browser': false, + }, + rules: { + 'no-void': [ + 'error', + { + allowAsStatement: true, + }, + ], + 'no-proto': 'error', + 'valid-jsdoc': 'off', + 'import/order': [ + 'error', + { + alphabetize: { + order: 'asc', + }, + 'newlines-between': 'always', + }, + ], + 'no-unused-vars': [ + 'error', + { argsIgnorePattern: '^_', destructuredArrayIgnorePattern: '^_' }, + ], + 'jsdoc/require-returns-check': 'off', + 'jsdoc/require-param-description': 'off', + 'jsdoc/require-returns-description': 'off', + 'standard/no-callback-literal': 'off', + }, + parser: '@babel/eslint-parser', + plugins: ['@babel'], + extends: [ + 'eslint:recommended', + 'standard', + 'plugin:import/recommended', + 'plugin:prettier/recommended', + 'plugin:jsdoc/recommended-typescript-flavor', + ], + settings: { + jsdoc: { + mode: 'typescript', + tagNamePreference: { + typicalname: 'typicalname', + }, + }, + }, + parserOptions: { + project: './tsconfig.json', + sourceType: 'module', + babelOptions: { + presets: [ + [ + '@babel/preset-env', + { + shippedProposals: true, + }, + ], + ], + }, + tsconfigRootDir: __dirname, + requireConfigFile: false, + }, +} diff --git a/scripts/.gitignore b/scripts/.gitignore new file mode 100644 index 000000000..b152df746 --- /dev/null +++ b/scripts/.gitignore @@ -0,0 +1,2 @@ +.tmp +node_modules diff --git a/scripts/deps.mjs b/scripts/deps.mjs deleted file mode 100644 index fb8b9f8de..000000000 --- a/scripts/deps.mjs +++ /dev/null @@ -1,197 +0,0 @@ -import * as fs from 'node:fs/promises'; -import * as os from 'node:os'; -import * as path from 'node:path'; -import { env } from 'node:process'; -import { extractTo } from 'archive-wasm/src/fs.mjs'; - -import { - getGh, - getGhArtifactContent, - getGhReleasesAssets, - getGhWorkflowRunArtifacts -} from './github.mjs'; -import { - FFMPEG_SUFFFIX, - FFMPEG_WORKFLOW, - getConst, - getSuffix, - LIBHEIF_SUFFIX, - LIBHEIF_WORKFLOW, - PDFIUM_SUFFIX, - PROTOC_SUFFIX -} from './suffix.mjs'; -import { which } from './which.mjs'; - -const noop = () => {}; - -const __debug = env.NODE_ENV === 'debug'; -const __osType = os.type(); - -// Github repos -const PDFIUM_REPO = 'bblanchon/pdfium-binaries'; -const PROTOBUF_REPO = 'protocolbuffers/protobuf'; -const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive'; - -/** - * Download and extract protobuff compiler binary - * @param {string[]} machineId - * @param {string} framework - */ -export async function downloadProtc(machineId, framework) { - if (await which('protoc')) return; - - console.log('Downloading protoc...'); - - const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId); - if (protocSuffix == null) throw new Error('NO_PROTOC'); - - let found = false; - for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) { - if (!protocSuffix.test(release.name)) continue; - try { - await extractTo(await getGh(release.downloadUrl), framework, { - chmod: 0o600, - overwrite: true - }); - found = true; - break; - } catch (error) { - console.warn('Failed to download protoc, re-trying...'); - if (__debug) console.error(error); - } - } - - if (!found) throw new Error('NO_PROTOC'); - - // cleanup - await fs.unlink(path.join(framework, 'readme.txt')).catch(__debug ? console.error : noop); -} - -/** - * Download and extract pdfium library for generating PDFs thumbnails - * @param {string[]} machineId - * @param {string} framework - */ -export async function downloadPDFium(machineId, framework) { - console.log('Downloading pdfium...'); - - const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId); - if (pdfiumSuffix == null) throw new Error('NO_PDFIUM'); - - let found = false; - for await (const release of getGhReleasesAssets(PDFIUM_REPO)) { - if (!pdfiumSuffix.test(release.name)) continue; - try { - await extractTo(await getGh(release.downloadUrl), framework, { - chmod: 0o600, - overwrite: true - }); - found = true; - break; - } catch (error) { - console.warn('Failed to download pdfium, re-trying...'); - if (__debug) console.error(error); - } - } - - if (!found) throw new Error('NO_PDFIUM'); - - // cleanup - const cleanup = [ - fs.rename(path.join(framework, 'LICENSE'), path.join(framework, 'LICENSE.pdfium')), - ...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map((file) => - fs.unlink(path.join(framework, file)).catch(__debug ? console.error : noop) - ) - ]; - - switch (__osType) { - case 'Linux': - cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.so'), 0o750)); - break; - case 'Darwin': - cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.dylib'), 0o750)); - break; - } - - await Promise.all(cleanup); -} - -/** - * Download and extract ffmpeg libs for video thumbnails - * @param {string[]} machineId - * @param {string} framework - * @param {string[]} branches - */ -export async function downloadFFMpeg(machineId, framework, branches) { - const workflow = getConst(FFMPEG_WORKFLOW, machineId); - if (workflow == null) { - console.log('Checking FFMPeg...'); - if (await which('ffmpeg')) { - // TODO: check ffmpeg version match what we need - return; - } else { - throw new Error('NO_FFMPEG'); - } - } - - console.log('Downloading FFMPeg...'); - - const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId); - if (ffmpegSuffix == null) throw new Error('NO_FFMPEG'); - - let found = false; - for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) { - if (!ffmpegSuffix.test(artifact.name)) continue; - try { - const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id); - await extractTo(data, framework, { - chmod: 0o600, - recursive: true, - overwrite: true - }); - found = true; - break; - } catch (error) { - console.warn('Failed to download FFMpeg, re-trying...'); - if (__debug) console.error(error); - } - } - - if (!found) throw new Error('NO_FFMPEG'); -} - -/** - * Download and extract libheif libs for heif thumbnails - * @param {string[]} machineId - * @param {string} framework - * @param {string[]} branches - */ -export async function downloadLibHeif(machineId, framework, branches) { - const workflow = getConst(LIBHEIF_WORKFLOW, machineId); - if (workflow == null) return; - - console.log('Downloading LibHeif...'); - - const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId); - if (libHeifSuffix == null) throw new Error('NO_LIBHEIF'); - - let found = false; - for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) { - if (!libHeifSuffix.test(artifact.name)) continue; - try { - const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id); - await extractTo(data, framework, { - chmod: 0o600, - recursive: true, - overwrite: true - }); - found = true; - break; - } catch (error) { - console.warn('Failed to download LibHeif, re-trying...'); - if (__debug) console.error(error); - } - } - - if (!found) throw new Error('NO_LIBHEIF'); -} diff --git a/scripts/git.mjs b/scripts/git.mjs deleted file mode 100644 index 9e33b28ee..000000000 --- a/scripts/git.mjs +++ /dev/null @@ -1,86 +0,0 @@ -import { exec as execCb } from 'node:child_process'; -import * as fs from 'node:fs/promises'; -import * as path from 'node:path'; -import { env } from 'node:process'; -import { promisify } from 'node:util'; - -const __debug = env.NODE_ENV === 'debug'; - -const exec = promisify(execCb); - -/** - * @param {string} repoPath - * @returns {string?} - */ -async function getRemoteBranchName(repoPath) { - let branchName; - try { - branchName = (await exec('git symbolic-ref --short HEAD', { cwd: repoPath })).stdout.trim(); - if (!branchName) throw 'Empty local branch name'; - } catch (error) { - if (__debug) { - console.warn(`Failed to read git local branch name`); - console.error(error); - } - return null; - } - - let remoteBranchName; - try { - remoteBranchName = ( - await exec(`git for-each-ref --format="%(upstream:short)" refs/heads/${branchName}`, { - cwd: repoPath - }) - ).stdout.trim(); - const [remote, branch] = remoteBranchName.split('/'); - if (!branch) throw 'Empty remote branch name'; - remoteBranchName = branch; - } catch (error) { - if (__debug) { - console.warn(`Failed to read git remote branch name`); - console.error(error); - } - return null; - } - - return remoteBranchName; -} - -// https://stackoverflow.com/q/3651860#answer-67151923 -const REF_REGEX = /ref:\s+refs\/heads\/(?[^\s\x00-\x1F\:\?\[\\\^\~]+)/; -const GITHUB_REF_REGEX = /^refs\/heads\//; - -/** - * @param {string} repoPath - * @returns {Promise} - */ -export async function getGitBranches(repoPath) { - const branches = ['main', 'master']; - - if (env.GITHUB_HEAD_REF) { - branches.unshift(env.GITHUB_HEAD_REF); - } else if (env.GITHUB_REF) { - branches.unshift(env.GITHUB_REF.replace(GITHUB_REF_REGEX, '')); - } - - const remoteBranchName = await getRemoteBranchName(repoPath); - if (remoteBranchName) { - branches.unshift(remoteBranchName); - } else { - let head; - try { - head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' }); - } catch (error) { - if (__debug) { - console.warn(`Failed to read git HEAD file`); - console.error(error); - } - return branches; - } - - const match = REF_REGEX.exec(head); - if (match?.groups?.branch) branches.unshift(match.groups.branch); - } - - return branches; -} diff --git a/scripts/machineId.mjs b/scripts/machineId.mjs deleted file mode 100644 index eb8ad56bd..000000000 --- a/scripts/machineId.mjs +++ /dev/null @@ -1,60 +0,0 @@ -import { exec as execCb } from 'node:child_process'; -import * as os from 'node:os'; -import { env } from 'node:process'; -import { promisify } from 'node:util'; - -const __debug = env.NODE_ENV === 'debug'; - -let libc = 'glibc'; -if (os.type() === 'Linux') { - try { - const exec = promisify(execCb); - if ((await exec('ldd /bin/ls')).stdout.includes('musl')) { - libc = 'musl'; - } - } catch (error) { - if (__debug) { - console.warn(`Failed to check libc type`); - console.error(error); - } - } -} - -const OS_TYPE = { - darwin: 'Darwin', - windows: 'Windows_NT', - linux: 'Linux' -}; - -export function getMachineId() { - let machineId; - - /** - * Possible TARGET_TRIPLE: - * x86_64-apple-darwin - * aarch64-apple-darwin - * x86_64-pc-windows-msvc - * aarch64-pc-windows-msvc - * x86_64-unknown-linux-gnu - * x86_64-unknown-linux-musl - * aarch64-unknown-linux-gnu - * aarch64-unknown-linux-musl - * armv7-unknown-linux-gnueabihf - */ - if (env.TARGET_TRIPLE) { - const target = env.TARGET_TRIPLE.split('-'); - const osType = OS_TYPE[target[2]]; - - if (!osType) throw new Error(`Unknown OS type: ${target[2]}`); - if (!target[0]) throw new Error(`Unknown machine type: ${target[0]}`); - - machineId = [osType, target[0]]; - if (machineId[0] === 'Linux') machineId.push(target[3].includes('musl') ? 'musl' : 'glibc'); - } else { - // Current machine identifiers - machineId = [os.type(), os.machine()]; - if (machineId[0] === 'Linux') machineId.push(libc); - } - - return machineId; -} diff --git a/scripts/package.json b/scripts/package.json new file mode 100644 index 000000000..82651d6ea --- /dev/null +++ b/scripts/package.json @@ -0,0 +1,42 @@ +{ + "name": "@sd/scripts", + "private": true, + "main": "./preprep.mjs", + "type": "module", + "scripts": { + "prep": "node preprep.mjs", + "tauri": "node tauri.mjs", + "lint": "eslint --cache", + "typecheck": "tsc" + }, + "prettier": { + "semi": false, + "endOfLine": "lf", + "printWidth": 99, + "singleQuote": true, + "arrowParens": "avoid", + "trailingComma": "es5" + }, + "dependencies": { + "@iarna/toml": "^2.2.5", + "archive-wasm": "^1.5.3", + "mustache": "^4.2.0", + "semver": "^7.5.0", + "undici": "^5.25.4" + }, + "devDependencies": { + "@babel/core": "~7", + "@babel/eslint-parser": "~7", + "@babel/eslint-plugin": "~7", + "@types/mustache": "^4.2.3", + "@types/node": "^18.17", + "@typescript-eslint/eslint-plugin": "^6.7", + "@typescript-eslint/parser": "^6.7", + "eslint": "^8.50", + "eslint-config-prettier": "^9.0", + "eslint-config-standard": "^17.1", + "eslint-plugin-jsdoc": "^46.8", + "eslint-plugin-prettier": "^5.0", + "typescript": "^5.2" + } +} diff --git a/scripts/preprep.mjs b/scripts/preprep.mjs index 58495ec89..269c9b1ee 100644 --- a/scripts/preprep.mjs +++ b/scripts/preprep.mjs @@ -1,229 +1,156 @@ -import { exec as _exec } from 'node:child_process'; -import * as fs from 'node:fs/promises'; -import * as os from 'node:os'; -import * as path from 'node:path'; -import { env, umask } from 'node:process'; -import { fileURLToPath } from 'node:url'; -import { promisify } from 'node:util'; -import mustache from 'mustache'; +import * as fs from 'node:fs/promises' +import * as path from 'node:path' +import { env, exit, umask } from 'node:process' +import { fileURLToPath } from 'node:url' -import { downloadFFMpeg, downloadLibHeif, downloadPDFium, downloadProtc } from './deps.mjs'; -import { getGitBranches } from './git.mjs'; -import { getMachineId } from './machineId.mjs'; -import { which } from './which.mjs'; +import * as _mustache from 'mustache' -umask(0o026); +import { downloadFFMpeg, downloadLibHeif, downloadPDFium, downloadProtc } from './utils/deps.mjs' +import { getGitBranches } from './utils/git.mjs' +import { getMachineId } from './utils/machineId.mjs' +import { + setupMacOsFramework, + symlinkSharedLibsMacOS, + symlinkSharedLibsLinux, +} from './utils/shared.mjs' +import { which } from './utils/which.mjs' if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) { - console.error('Bash for windows is not supported, please execute this from Powershell or CMD'); - process.exit(255); + console.error( + 'Bash for windows is not supported, please interact with this repo from Powershell or CMD' + ) + exit(255) } -const exec = promisify(_exec); +// @ts-expect-error +const mustache = /** @type {import("mustache")} */ (_mustache.default) -const __debug = env.NODE_ENV === 'debug'; -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); +// Limit file permissions +umask(0o026) + +const __debug = env.NODE_ENV === 'debug' +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) // NOTE: Must point to package root path -const __root = path.resolve(path.join(__dirname, '..')); +const __root = path.resolve(path.join(__dirname, '..')) + +const bugWarn = + 'This is probably a bug, please open a issue with you system info at: ' + + 'https://github.com/spacedriveapp/spacedrive/issues/new/choose' // Current machine identifiers -const machineId = getMachineId(); +const machineId = getMachineId() // Basic dependeny check -if ( - (await Promise.all([which('cargo'), which('rustc'), which('pnpm'), which('node')])).some( - (found) => !found - ) -) { +if ((await Promise.all([which('cargo'), which('rustc'), which('pnpm')])).some(found => !found)) { console.error(`Basic dependencies missing. -Make sure you have rust, node.js and pnpm installed: +Make sure you have rust and pnpm installed: https://rustup.rs -https://nodejs.org/en/download https://pnpm.io/installation Also that you have run the setup script: packages/scripts/${machineId[0] === 'Windows_NT' ? 'setup.ps1' : 'setup.sh'} -`); +`) } -// Accepted git branches for querying for artifacts (current, main, master) -const branches = await getGitBranches(__root); - -// Create the basic target directory hierarchy -const framework = path.join(__root, 'target', 'Frameworks'); -await fs.rm(framework, { force: true, recursive: true }); +// Directory where the native deps will be downloaded +const nativeDeps = path.join(__root, 'apps', '.deps') +await fs.rm(nativeDeps, { force: true, recursive: true }) await Promise.all( - ['bin', 'lib', 'include'].map((dir) => - fs.mkdir(path.join(framework, dir), { mode: 0o750, recursive: true }) + ['bin', 'lib', 'include'].map(dir => + fs.mkdir(path.join(nativeDeps, dir), { mode: 0o750, recursive: true }) ) -); +) + +// Accepted git branches for querying for artifacts (current, main, master) +const branches = await getGitBranches(__root) // Download all necessary external dependencies await Promise.all([ - downloadProtc(machineId, framework).catch((e) => { + downloadProtc(machineId, nativeDeps).catch(e => { console.error( - 'Failed to download protoc, this is required for Spacedrive to compile. ' + + 'Failed to download protobuf compiler, this is required to build Spacedrive. ' + 'Please install it with your system package manager' - ); - throw e; + ) + throw e }), - downloadPDFium(machineId, framework).catch((e) => { + downloadPDFium(machineId, nativeDeps).catch(e => { console.warn( 'Failed to download pdfium lib. ' + - "This is optional, but if one isn't configured Spacedrive won't be able to generate thumbnails for PDF files" - ); - if (__debug) console.error(e); + "This is optional, but if one isn't present Spacedrive won't be able to generate thumbnails for PDF files" + ) + if (__debug) console.error(e) }), - downloadFFMpeg(machineId, framework, branches).catch((e) => { - console.error( - 'Failed to download ffmpeg. This is probably a bug, please open a issue with you system info at: ' + - 'https://github.com/spacedriveapp/spacedrive/issues/new/choose' - ); - throw e; + downloadFFMpeg(machineId, nativeDeps, branches).catch(e => { + console.error(`Failed to download ffmpeg. ${bugWarn}`) + throw e }), - downloadLibHeif(machineId, framework, branches).catch((e) => { - console.error( - 'Failed to download libheif. This is probably a bug, please open a issue with you system info at: ' + - 'https://github.com/spacedriveapp/spacedrive/issues/new/choose' - ); - throw e; - }) -]).catch((e) => { - if (__debug) console.error(e); - process.exit(1); -}); + downloadLibHeif(machineId, nativeDeps, branches).catch(e => { + console.error(`Failed to download libheif. ${bugWarn}`) + throw e + }), +]).catch(e => { + if (__debug) console.error(e) + exit(1) +}) + +// Extra OS specific setup +try { + if (machineId[0] === 'Linux') { + console.log(`Symlink shared libs...`) + symlinkSharedLibsLinux(__root, nativeDeps).catch(e => { + console.error(`Failed to symlink shared libs. ${bugWarn}`) + throw e + }) + } else if (machineId[0] === 'Darwin') { + console.log(`Setup Framework...`) + await setupMacOsFramework(nativeDeps).catch(e => { + console.error(`Failed to setup Framework. ${bugWarn}`) + throw e + }) + // This is still required due to how ffmpeg-sys-next builds script works + console.log(`Symlink shared libs...`) + await symlinkSharedLibsMacOS(nativeDeps).catch(e => { + console.error(`Failed to symlink shared libs. ${bugWarn}`) + throw e + }) + } +} catch (error) { + if (__debug) console.error(error) + exit(1) +} // Generate .cargo/config.toml -console.log('Generating cargo config...'); +console.log('Generating cargo config...') try { await fs.writeFile( path.join(__root, '.cargo', 'config.toml'), mustache .render( await fs.readFile(path.join(__root, '.cargo', 'config.toml.mustache'), { - encoding: 'utf8' + encoding: 'utf8', }), { - ffmpeg: machineId[0] === 'Linux' ? false : framework.replaceAll('\\', '\\\\'), + isWin: machineId[0] === 'Windows_NT', + isMacOS: machineId[0] === 'Darwin', + isLinux: machineId[0] === 'Linux', + // Escape windows path separator to be compatible with TOML parsing protoc: path .join( - framework, + nativeDeps, 'bin', machineId[0] === 'Windows_NT' ? 'protoc.exe' : 'protoc' ) .replaceAll('\\', '\\\\'), - projectRoot: __root.replaceAll('\\', '\\\\'), - isWin: machineId[0] === 'Windows_NT', - isMacOS: machineId[0] === 'Darwin', - isLinux: machineId[0] === 'Linux' + nativeDeps: nativeDeps.replaceAll('\\', '\\\\'), } ) .replace(/\n\n+/g, '\n'), { mode: 0o751, flag: 'w+' } - ); + ) } catch (error) { - console.error( - 'Failed to generate .cargo/config.toml, please open an issue on: ' + - 'https://github.com/spacedriveapp/spacedrive/issues/new/choose' - ); - if (__debug) console.error(error); - process.exit(1); -} - -if (machineId[0] === 'Linux') { - // Setup Linux libraries - const libDir = path.join(__root, 'target', 'lib'); - await fs.rm(libDir, { force: true, recursive: true }); - await fs.mkdir(libDir, { recursive: true, mode: 0o751 }); - await fs.symlink(path.join(framework, 'lib'), path.join(__root, 'target', 'lib', 'spacedrive')); -} else if (machineId[0] === 'Darwin') { - // Setup macOS Frameworks - try { - console.log('Setup Frameworks & Sign libraries...'); - const ffmpegFramework = path.join(framework, 'FFMpeg.framework'); - // Move pdfium License to FFMpeg.framework - await fs.rename( - path.join(framework, 'LICENSE.pdfium'), - path.join( - ffmpegFramework, - 'Resources', - 'English.lproj', - 'Documentation', - 'LICENSE.pdfium' - ) - ); - // Move include files to FFMpeg.framework - const include = path.join(framework, 'include'); - const headers = path.join(ffmpegFramework, 'Headers'); - const includeFiles = await fs.readdir(include, { recursive: true, withFileTypes: true }); - const moveIncludes = includeFiles - .filter( - (entry) => - (entry.isFile() || entry.isSymbolicLink()) && !entry.name.endsWith('.proto') - ) - .map(async (entry) => { - const file = path.join(entry.path, entry.name); - const newFile = path.resolve(headers, path.relative(include, file)); - await fs.mkdir(path.dirname(newFile), { mode: 0o751, recursive: true }); - await fs.rename(file, newFile); - }); - // Move libs to FFMpeg.framework - const lib = path.join(framework, 'lib'); - const libraries = path.join(ffmpegFramework, 'Libraries'); - const libFiles = await fs.readdir(lib, { recursive: true, withFileTypes: true }); - const moveLibs = libFiles - .filter( - (entry) => - (entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib') - ) - .map(async (entry) => { - const file = path.join(entry.path, entry.name); - const newFile = path.resolve(libraries, path.relative(lib, file)); - await fs.mkdir(path.dirname(newFile), { mode: 0o751, recursive: true }); - await fs.rename(file, newFile); - }); - - await Promise.all([...moveIncludes, ...moveLibs]); - - // Symlink headers - const headerFiles = await fs.readdir(headers, { recursive: true, withFileTypes: true }); - const linkHeaders = headerFiles - .filter((entry) => entry.isFile() || entry.isSymbolicLink()) - .map(async (entry) => { - const file = path.join(entry.path, entry.name); - const link = path.resolve(include, path.relative(headers, file)); - const linkDir = path.dirname(link); - await fs.mkdir(linkDir, { mode: 0o751, recursive: true }); - await fs.symlink(path.relative(linkDir, file), link); - }); - // Symlink libraries - const libraryFiles = await fs.readdir(libraries, { recursive: true, withFileTypes: true }); - const linkLibs = libraryFiles - .filter( - (entry) => - (entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib') - ) - .map(async (entry) => { - const file = path.join(entry.path, entry.name); - const link = path.resolve(lib, path.relative(libraries, file)); - const linkDir = path.dirname(link); - await fs.mkdir(linkDir, { mode: 0o751, recursive: true }); - await fs.symlink(path.relative(linkDir, file), link); - if (entry.isFile()) { - // Sign the lib with the local machine certificate (Required for it to work on macOS 13+) - await exec(`codesign -s "${env.APPLE_SIGNING_IDENTITY || '-'}" -f "${file}"`); - } - }); - - await Promise.all([...linkHeaders, ...linkLibs]); - } catch (error) { - console.error( - 'Failed to configure required Frameworks.This is probably a bug, please open a issue with you system info at: ' + - 'https://github.com/spacedriveapp/spacedrive/issues/new/choose' - ); - if (__debug) console.error(error); - process.exit(1); - } + console.error(`Failed to generate .cargo/config.toml. ${bugWarn}`) + if (__debug) console.error(error) + exit(1) } diff --git a/scripts/setup.sh b/scripts/setup.sh index de5d8fefb..d9ff58e2d 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -32,6 +32,12 @@ script_failure() { trap 'script_failure ${LINENO:-}' ERR +case "${OSTYPE:-}" in + 'msys' | 'mingw' | 'cygwin') + err 'Bash for windows is not supported, please interact with this repo from Powershell or CMD' + ;; +esac + if [ "${CI:-}" != "true" ]; then echo 'Spacedrive Development Environment Setup' echo 'To set up your machine for Spacedrive development, this script will install some required dependencies with your system package manager' @@ -106,7 +112,7 @@ case "$(uname)" in echo fi ;; - "Linux") # https://github.com/tauri-apps/tauri-docs/blob/dev/docs/guides/getting-started/prerequisites.md + "Linux") # https://github.com/tauri-apps/tauri-docs/blob/dev/docs/guides/getting-started/prerequisites.md#setting-up-linux if has apt-get; then echo "Detected apt!" echo "Installing dependencies with apt..." diff --git a/scripts/tauri.mjs b/scripts/tauri.mjs new file mode 100644 index 000000000..08b8cbc16 --- /dev/null +++ b/scripts/tauri.mjs @@ -0,0 +1,139 @@ +import * as fs from 'node:fs/promises' +import * as path from 'node:path' +import { env, exit, umask, platform } from 'node:process' +import { fileURLToPath } from 'node:url' + +import * as toml from '@iarna/toml' + +import { patchTauri } from './utils/patchTauri.mjs' +import spawn from './utils/spawn.mjs' + +if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) { + console.error( + 'Bash for windows is not supported, please interact with this repo from Powershell or CMD' + ) + exit(255) +} + +// Limit file permissions +umask(0o026) + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) +const [_, __, ...args] = process.argv + +// NOTE: Must point to package root path +const __root = path.resolve(path.join(__dirname, '..')) + +// Location for desktop app +const desktopApp = path.join(__root, 'apps', 'desktop') + +// Location of the native dependencies +const nativeDeps = path.join(__root, 'apps', '.deps') + +// Files to be removed when script finish executing +const __cleanup = /** @type {string[]} */ ([]) +const cleanUp = () => Promise.all(__cleanup.map(file => fs.unlink(file).catch(() => {}))) +process.on('SIGINT', cleanUp) + +// Check if file/dir exists +const exists = (/** @type {string} */ path) => + fs + .access(path, fs.constants.R_OK) + .then(() => true) + .catch(() => false) + +// Export environment variables defined in cargo.toml +const cargoConfig = await fs + .readFile(path.resolve(__root, '.cargo', 'config.toml'), { encoding: 'binary' }) + .then(toml.parse) +if (cargoConfig.env && typeof cargoConfig.env === 'object') + for (const [name, value] of Object.entries(cargoConfig.env)) if (!env[name]) env[name] = value + +// Default command +if (args.length === 0) args.push('build') + +let code = 0 +try { + switch (args[0]) { + case 'dev': { + __cleanup.push(...(await patchTauri(__root, nativeDeps, args))) + break + } + case 'build': { + if (!env.NODE_OPTIONS || !env.NODE_OPTIONS.includes('--max_old_space_size')) { + env.NODE_OPTIONS = `--max_old_space_size=4096 ${env.NODE_OPTIONS ?? ''}` + } + + __cleanup.push(...(await patchTauri(__root, nativeDeps, args))) + + switch (process.platform) { + case 'darwin': { + // Configure DMG background + env.BACKGROUND_FILE = path.resolve( + desktopApp, + 'src-tauri', + 'dmg-background.png' + ) + env.BACKGROUND_FILE_NAME = path.basename(env.BACKGROUND_FILE) + env.BACKGROUND_CLAUSE = `set background picture of opts to file ".background:${env.BACKGROUND_FILE_NAME}"` + + if (!(await exists(env.BACKGROUND_FILE))) + console.warn( + `WARNING: DMG background file not found at ${env.BACKGROUND_FILE}` + ) + + break + } + case 'linux': + // Cleanup appimage bundle to avoid build_appimage.sh failing + await fs.rm(path.join(__root, 'target', 'release', 'bundle', 'appimage'), { + recursive: true, + force: true, + }) + break + } + } + } + + await spawn('pnpm', ['exec', 'tauri', ...args], desktopApp).catch(async error => { + if (args[0] === 'build' || platform === 'linux') { + // Work around appimage buindling not working sometimes + const appimageDir = path.join(__root, 'target', 'release', 'bundle', 'appimage') + if ( + (await exists(path.join(appimageDir, 'build_appimage.sh'))) && + (await fs.readdir(appimageDir).then(f => f.every(f => !f.endsWith('.AppImage')))) + ) { + // Remove AppDir to allow build_appimage to rebuild it + await fs.rm(path.join(appimageDir, 'spacedrive.AppDir'), { + recursive: true, + force: true, + }) + return spawn('bash', ['build_appimage.sh'], appimageDir).catch(exitCode => { + code = exitCode + console.error(`tauri ${args[0]} failed with exit code ${exitCode}`) + }) + } + } + + console.error( + `tauri ${args[0]} failed with exit code ${typeof error === 'number' ? error : 1}` + ) + + console.warn( + `If you got an error related to FFMpeg or Protoc/Protobuf you may need to re-run \`pnpm prep\`` + ) + + throw error + }) +} catch (error) { + if (typeof error === 'number') { + code = error + } else { + if (error instanceof Error) console.error(error) + code = 1 + } +} finally { + cleanUp() + exit(code) +} diff --git a/scripts/tsconfig.json b/scripts/tsconfig.json new file mode 100644 index 000000000..353936000 --- /dev/null +++ b/scripts/tsconfig.json @@ -0,0 +1,33 @@ +{ + "compilerOptions": { + "lib": ["esnext"], + "noEmit": true, + "outDir": "src", + "strict": true, + "checkJs": true, + "allowJs": true, + "module": "esnext", + "target": "esnext", + "declaration": true, + "incremental": true, + "skipLibCheck": true, + "removeComments": false, + "noUnusedLocals": true, + "isolatedModules": true, + "esModuleInterop": false, + "disableSizeLimit": true, + "moduleResolution": "node", + "noImplicitReturns": true, + "resolveJsonModule": true, + "noUnusedParameters": true, + "experimentalDecorators": true, + "useDefineForClassFields": true, + "noUncheckedIndexedAccess": true, + "exactOptionalPropertyTypes": true, + "forceConsistentCasingInFileNames": true, + "noPropertyAccessFromIndexSignature": false + }, + "include": ["./**/*.mjs"], + "exclude": ["node_modules"], + "$schema": "https://json.schemastore.org/tsconfig" +} diff --git a/scripts/suffix.mjs b/scripts/utils/consts.mjs similarity index 58% rename from scripts/suffix.mjs rename to scripts/utils/consts.mjs index 69f6f71d0..44255b4aa 100644 --- a/scripts/suffix.mjs +++ b/scripts/utils/consts.mjs @@ -4,78 +4,69 @@ export const PROTOC_SUFFIX = { i386: 'linux-x86_32', i686: 'linux-x86_32', x86_64: 'linux-x86_64', - arm64: 'linux-aarch_64', - aarch64: 'linux-aarch_64' + aarch64: 'linux-aarch_64', }, Darwin: { x86_64: 'osx-x86_64', - arm64: 'osx-aarch_64', - aarch64: 'osx-aarch_64' + + aarch64: 'osx-aarch_64', }, Windows_NT: { i386: 'win32', i686: 'win32', - x86_64: 'win64' - } -}; + x86_64: 'win64', + }, +} export const PDFIUM_SUFFIX = { Linux: { x86_64: { musl: 'linux-musl-x64', - glibc: 'linux-x64' + glibc: 'linux-x64', }, - arm64: 'linux-arm64', - aarch64: 'linux-arm64' + aarch64: 'linux-arm64', }, Darwin: { x86_64: 'mac-x64', - arm64: 'mac-arm64', - aarch64: 'mac-arm64' + aarch64: 'mac-arm64', }, Windows_NT: { x86_64: 'win-x64', - arm64: 'win-arm64', - aarch64: 'win-arm64' - } -}; + aarch64: 'win-arm64', + }, +} export const FFMPEG_SUFFFIX = { Darwin: { x86_64: 'x86_64', - arm64: 'arm64', - aarch64: 'arm64' + aarch64: 'arm64', }, Windows_NT: { - x86_64: 'x86_64' - } -}; + x86_64: 'x86_64', + }, +} export const FFMPEG_WORKFLOW = { Darwin: 'ffmpeg-macos.yml', - Windows_NT: 'ffmpeg-windows.yml' -}; + Windows_NT: 'ffmpeg-windows.yml', +} export const LIBHEIF_SUFFIX = { Linux: { x86_64: { musl: 'x86_64-linux-musl', - glibc: 'x86_64-linux-gnu' - }, - arm64: { - musl: 'aarch64-linux-musl', - glibc: 'aarch64-linux-gnu' + glibc: 'x86_64-linux-gnu', }, aarch64: { musl: 'aarch64-linux-musl', - glibc: 'aarch64-linux-gnu' - } - } -}; + glibc: 'aarch64-linux-gnu', + }, + }, +} export const LIBHEIF_WORKFLOW = { - Linux: 'libheif-linux.yml' -}; + Linux: 'libheif-linux.yml', +} /** * @param {Record} constants @@ -84,15 +75,15 @@ export const LIBHEIF_WORKFLOW = { */ export function getConst(constants, identifiers) { /** @type {string | Record} */ - let constant = constants; + let constant = constants for (const id of identifiers) { - constant = /** @type {string | Record} */ (constant[id]); - if (!constant) return null; - if (typeof constant !== 'object') break; + constant = /** @type {string | Record} */ (constant[id]) + if (!constant) return null + if (typeof constant !== 'object') break } - return typeof constant === 'string' ? constant : null; + return typeof constant === 'string' ? constant : null } /** @@ -101,6 +92,6 @@ export function getConst(constants, identifiers) { * @returns {RegExp?} */ export function getSuffix(suffixes, identifiers) { - const suffix = getConst(suffixes, identifiers); - return suffix ? new RegExp(`${suffix}(\\.[^\\.]+)*$`) : null; + const suffix = getConst(suffixes, identifiers) + return suffix ? new RegExp(`${suffix}(\\.[^\\.]+)*$`) : null } diff --git a/scripts/utils/deps.mjs b/scripts/utils/deps.mjs new file mode 100644 index 000000000..e10db4bfc --- /dev/null +++ b/scripts/utils/deps.mjs @@ -0,0 +1,198 @@ +import * as fs from 'node:fs/promises' +import * as os from 'node:os' +import * as path from 'node:path' +import { env } from 'node:process' + +import { extractTo } from 'archive-wasm/src/fs.mjs' + +import { + FFMPEG_SUFFFIX, + FFMPEG_WORKFLOW, + getConst, + getSuffix, + LIBHEIF_SUFFIX, + LIBHEIF_WORKFLOW, + PDFIUM_SUFFIX, + PROTOC_SUFFIX, +} from './consts.mjs' +import { + getGh, + getGhArtifactContent, + getGhReleasesAssets, + getGhWorkflowRunArtifacts, +} from './github.mjs' +import { which } from './which.mjs' + +const noop = () => {} + +const __debug = env.NODE_ENV === 'debug' +const __osType = os.type() + +// Github repos +const PDFIUM_REPO = 'bblanchon/pdfium-binaries' +const PROTOBUF_REPO = 'protocolbuffers/protobuf' +const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive' + +/** + * Download and extract protobuff compiler binary + * @param {string[]} machineId + * @param {string} nativeDeps + */ +export async function downloadProtc(machineId, nativeDeps) { + if (await which('protoc')) return + + console.log('Downloading protoc...') + + const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId) + if (protocSuffix == null) throw new Error('NO_PROTOC') + + let found = false + for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) { + if (!protocSuffix.test(release.name)) continue + try { + await extractTo(await getGh(release.downloadUrl), nativeDeps, { + chmod: 0o600, + overwrite: true, + }) + found = true + break + } catch (error) { + console.warn('Failed to download protoc, re-trying...') + if (__debug) console.error(error) + } + } + + if (!found) throw new Error('NO_PROTOC') + + // cleanup + await fs.unlink(path.join(nativeDeps, 'readme.txt')).catch(__debug ? console.error : noop) +} + +/** + * Download and extract pdfium library for generating PDFs thumbnails + * @param {string[]} machineId + * @param {string} nativeDeps + */ +export async function downloadPDFium(machineId, nativeDeps) { + console.log('Downloading pdfium...') + + const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId) + if (pdfiumSuffix == null) throw new Error('NO_PDFIUM') + + let found = false + for await (const release of getGhReleasesAssets(PDFIUM_REPO)) { + if (!pdfiumSuffix.test(release.name)) continue + try { + await extractTo(await getGh(release.downloadUrl), nativeDeps, { + chmod: 0o600, + overwrite: true, + }) + found = true + break + } catch (error) { + console.warn('Failed to download pdfium, re-trying...') + if (__debug) console.error(error) + } + } + + if (!found) throw new Error('NO_PDFIUM') + + // cleanup + const cleanup = [ + fs.rename(path.join(nativeDeps, 'LICENSE'), path.join(nativeDeps, 'LICENSE.pdfium')), + ...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map(file => + fs.unlink(path.join(nativeDeps, file)).catch(__debug ? console.error : noop) + ), + ] + + switch (__osType) { + case 'Linux': + cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.so'), 0o750)) + break + case 'Darwin': + cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.dylib'), 0o750)) + break + } + + await Promise.all(cleanup) +} + +/** + * Download and extract ffmpeg libs for video thumbnails + * @param {string[]} machineId + * @param {string} nativeDeps + * @param {string[]} branches + */ +export async function downloadFFMpeg(machineId, nativeDeps, branches) { + const workflow = getConst(FFMPEG_WORKFLOW, machineId) + if (workflow == null) { + console.log('Checking FFMPeg...') + if (await which('ffmpeg')) { + // TODO: check ffmpeg version match what we need + return + } else { + throw new Error('NO_FFMPEG') + } + } + + console.log('Downloading FFMPeg...') + + const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId) + if (ffmpegSuffix == null) throw new Error('NO_FFMPEG') + + let found = false + for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) { + if (!ffmpegSuffix.test(artifact.name)) continue + try { + const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id) + await extractTo(data, nativeDeps, { + chmod: 0o600, + recursive: true, + overwrite: true, + }) + found = true + break + } catch (error) { + console.warn('Failed to download FFMpeg, re-trying...') + if (__debug) console.error(error) + } + } + + if (!found) throw new Error('NO_FFMPEG') +} + +/** + * Download and extract libheif libs for heif thumbnails + * @param {string[]} machineId + * @param {string} nativeDeps + * @param {string[]} branches + */ +export async function downloadLibHeif(machineId, nativeDeps, branches) { + const workflow = getConst(LIBHEIF_WORKFLOW, machineId) + if (workflow == null) return + + console.log('Downloading LibHeif...') + + const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId) + if (libHeifSuffix == null) throw new Error('NO_LIBHEIF') + + let found = false + for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) { + if (!libHeifSuffix.test(artifact.name)) continue + try { + const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id) + await extractTo(data, nativeDeps, { + chmod: 0o600, + recursive: true, + overwrite: true, + }) + found = true + break + } catch (error) { + console.warn('Failed to download LibHeif, re-trying...') + if (__debug) console.error(error) + } + } + + if (!found) throw new Error('NO_LIBHEIF') +} diff --git a/scripts/utils/git.mjs b/scripts/utils/git.mjs new file mode 100644 index 000000000..35750da21 --- /dev/null +++ b/scripts/utils/git.mjs @@ -0,0 +1,87 @@ +import { exec as execCb } from 'node:child_process' +import * as fs from 'node:fs/promises' +import * as path from 'node:path' +import { env } from 'node:process' +import { promisify } from 'node:util' + +const __debug = env.NODE_ENV === 'debug' + +const exec = promisify(execCb) + +/** + * @param {string} repoPath + * @returns {Promise} + */ +async function getRemoteBranchName(repoPath) { + let branchName + try { + branchName = (await exec('git symbolic-ref --short HEAD', { cwd: repoPath })).stdout.trim() + if (!branchName) throw new Error('Empty local branch name') + } catch (error) { + if (__debug) { + console.warn(`Failed to read git local branch name`) + console.error(error) + } + return null + } + + let remoteBranchName + try { + remoteBranchName = ( + await exec(`git for-each-ref --format="%(upstream:short)" refs/heads/${branchName}`, { + cwd: repoPath, + }) + ).stdout.trim() + const [_, branch] = remoteBranchName.split('/') + if (!branch) throw new Error('Empty remote branch name') + remoteBranchName = branch + } catch (error) { + if (__debug) { + console.warn(`Failed to read git remote branch name`) + console.error(error) + } + return null + } + + return remoteBranchName +} + +// https://stackoverflow.com/q/3651860#answer-67151923 +// eslint-disable-next-line no-control-regex +const REF_REGEX = /ref:\s+refs\/heads\/(?[^\s\x00-\x1F:?[\\^~]+)/ +const GITHUB_REF_REGEX = /^refs\/heads\// + +/** + * @param {string} repoPath + * @returns {Promise} + */ +export async function getGitBranches(repoPath) { + const branches = ['main', 'master'] + + if (env.GITHUB_HEAD_REF) { + branches.unshift(env.GITHUB_HEAD_REF) + } else if (env.GITHUB_REF) { + branches.unshift(env.GITHUB_REF.replace(GITHUB_REF_REGEX, '')) + } + + const remoteBranchName = await getRemoteBranchName(repoPath) + if (remoteBranchName) { + branches.unshift(remoteBranchName) + } else { + let head + try { + head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' }) + } catch (error) { + if (__debug) { + console.warn(`Failed to read git HEAD file`) + console.error(error) + } + return branches + } + + const match = REF_REGEX.exec(head) + if (match?.groups?.branch) branches.unshift(match.groups.branch) + } + + return branches +} diff --git a/scripts/github.mjs b/scripts/utils/github.mjs similarity index 67% rename from scripts/github.mjs rename to scripts/utils/github.mjs index db7664a9c..7d38bcf2f 100644 --- a/scripts/github.mjs +++ b/scripts/utils/github.mjs @@ -1,35 +1,36 @@ -import * as fs from 'node:fs/promises'; -import { dirname, join as joinPath, posix as path } from 'node:path'; -import { env } from 'node:process'; -import { setTimeout } from 'node:timers/promises'; -import { fileURLToPath } from 'node:url'; -import { extract } from 'archive-wasm'; +import * as fs from 'node:fs/promises' +import { dirname, join as joinPath, posix as path } from 'node:path' +import { env } from 'node:process' +import { setTimeout } from 'node:timers/promises' +import { fileURLToPath } from 'node:url' -const __debug = env.NODE_ENV === 'debug'; -const __offline = env.OFFLINE === 'true'; -const __filename = fileURLToPath(import.meta.url); -const __dirname = dirname(__filename); -const cacheDir = joinPath(__dirname, '.tmp'); -await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 }); +import { fetch, Headers } from 'undici' + +const __debug = env.NODE_ENV === 'debug' +const __offline = env.OFFLINE === 'true' +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) +const cacheDir = joinPath(__dirname, '.tmp') +await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 }) // Note: Trailing slashs are important to correctly append paths -const GH = 'https://api.github.com/repos/'; -const NIGTHLY = 'https://nightly.link/'; +const GH = 'https://api.github.com/repos/' +const NIGTHLY = 'https://nightly.link/' // Github routes -const RELEASES = 'releases'; -const WORKFLOWS = 'actions/workflows'; -const ARTIFACTS = 'actions/artifacts'; +const RELEASES = 'releases' +const WORKFLOWS = 'actions/workflows' +const ARTIFACTS = 'actions/artifacts' // Default GH headers const GH_HEADERS = new Headers({ - 'Accept': 'application/vnd.github+json', - 'X-GitHub-Api-Version': '2022-11-28' -}); + Accept: 'application/vnd.github+json', + 'X-GitHub-Api-Version': '2022-11-28', +}) // Load github auth token if available if ('GITHUB_TOKEN' in env && env.GITHUB_TOKEN) - GH_HEADERS.append('Authorization', `Bearer ${env.GITHUB_TOKEN}`); + GH_HEADERS.append('Authorization', `Bearer ${env.GITHUB_TOKEN}`) /** * @param {string} resource @@ -38,69 +39,69 @@ if ('GITHUB_TOKEN' in env && env.GITHUB_TOKEN) */ async function getCache(resource, headers) { /** @type {Buffer | undefined} */ - let data; + let data /** @type {[string, string] | undefined} */ - let header; + let header // Don't cache in CI - if (env.CI === 'true') return null; + if (env.CI === 'true') return null if (headers) resource += Array.from(headers.entries()) .filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since') .flat() - .join(':'); + .join(':') try { const cache = JSON.parse( await fs.readFile(joinPath(cacheDir, Buffer.from(resource).toString('base64url')), { - encoding: 'utf8' + encoding: 'utf8', }) - ); + ) if (cache && typeof cache === 'object') { if (cache.etag && typeof cache.etag === 'string') { - header = ['If-None-Match', cache.etag]; + header = ['If-None-Match', cache.etag] } else if (cache.modifiedSince && typeof cache.modifiedSince === 'string') { - header = ['If-Modified-Since', cache.modifiedSince]; + header = ['If-Modified-Since', cache.modifiedSince] } if (cache.data && typeof cache.data === 'string') - data = Buffer.from(cache.data, 'base64'); + data = Buffer.from(cache.data, 'base64') } } catch (error) { if (__debug) { - console.warn(`CACHE MISS: ${resource}`); - console.error(error); + console.warn(`CACHE MISS: ${resource}`) + console.error(error) } } - return data ? { data, header } : null; + return data ? { data, header } : null } /** - * @param {Response} response + * @param {import('undici').Response} response * @param {string} resource * @param {Buffer} [cachedData] * @param {Headers} [headers] * @returns {Promise} */ async function setCache(response, resource, cachedData, headers) { - const data = Buffer.from(await response.arrayBuffer()); + const data = Buffer.from(await response.arrayBuffer()) // Don't cache in CI - if (env.CI === 'true') return data; + if (env.CI === 'true') return data - const etag = response.headers.get('ETag') || undefined; - const modifiedSince = response.headers.get('Last-Modified') || undefined; + const etag = response.headers.get('ETag') || undefined + const modifiedSince = response.headers.get('Last-Modified') || undefined if (headers) resource += Array.from(headers.entries()) .filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since') .flat() - .join(':'); + .join(':') if (response.status === 304 || (response.ok && data.length === 0)) { // Cache hit - if (!cachedData) throw new Error('Empty cache hit ????'); - return cachedData; + if (!cachedData) throw new Error('Empty cache hit ????') + return cachedData } try { @@ -109,18 +110,18 @@ async function setCache(response, resource, cachedData, headers) { JSON.stringify({ etag, modifiedSince, - data: data.toString('base64') + data: data.toString('base64'), }), { mode: 0o640, flag: 'w+' } - ); + ) } catch (error) { if (__debug) { - console.warn(`CACHE WRITE FAIL: ${resource}`); - console.error(error); + console.warn(`CACHE WRITE FAIL: ${resource}`) + console.error(error) } } - return data; + return data } /** @@ -130,30 +131,30 @@ async function setCache(response, resource, cachedData, headers) { * @returns {Promise} */ export async function get(resource, headers, preferCache) { - if (headers == null) headers = new Headers(); - if (resource instanceof URL) resource = resource.toString(); + if (headers == null) headers = new Headers() + if (resource instanceof URL) resource = resource.toString() - const cache = await getCache(resource, headers); + const cache = await getCache(resource, headers) if (__offline) { if (cache?.data == null) - throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`); - return cache.data; + throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`) + return cache.data } - if (preferCache && cache?.data != null) return cache.data; + if (preferCache && cache?.data != null) return cache.data - if (cache?.header) headers.append(...cache.header); + if (cache?.header) headers.append(...cache.header) - const response = await fetch(resource, { headers }); + const response = await fetch(resource, { headers }) if (!response.ok) { if (cache?.data) { - if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`); - return cache.data; + if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`) + return cache.data } - throw new Error(response.statusText); + throw new Error(response.statusText) } - return await setCache(response, resource, cache?.data, headers); + return await setCache(response, resource, cache?.data, headers) } // Header name Description @@ -163,8 +164,8 @@ export async function get(resource, headers, preferCache) { // x-ratelimit-reset The time at which the current rate limit window resets in UTC epoch seconds. const RATE_LIMIT = { reset: 0, - remaining: Infinity -}; + remaining: Infinity, +} /** * Get resource from a Github route with some pre-defined parameters @@ -172,52 +173,52 @@ const RATE_LIMIT = { * @returns {Promise} */ export async function getGh(route) { - route = new URL(route, GH).toString(); + route = new URL(route, GH).toString() - const cache = await getCache(route); + const cache = await getCache(route) if (__offline) { if (cache?.data == null) - throw new Error(`OFFLINE MODE: Cache for request ${route} doesn't exist`); - return cache?.data; + throw new Error(`OFFLINE MODE: Cache for request ${route} doesn't exist`) + return cache?.data } if (RATE_LIMIT.remaining === 0) { - if (cache?.data) return cache.data; + if (cache?.data) return cache.data console.warn( `RATE LIMIT: Waiting ${RATE_LIMIT.reset} seconds before contacting Github again... [CTRL+C to cancel]` - ); - await setTimeout(RATE_LIMIT.reset * 1000); + ) + await setTimeout(RATE_LIMIT.reset * 1000) } - const headers = new Headers(GH_HEADERS); - if (cache?.header) headers.append(...cache.header); + const headers = new Headers(GH_HEADERS) + if (cache?.header) headers.append(...cache.header) - const response = await fetch(route, { method: 'GET', headers }); + const response = await fetch(route, { method: 'GET', headers }) - const rateReset = Number.parseInt(response.headers.get('x-ratelimit-reset') ?? ''); - const rateRemaining = Number.parseInt(response.headers.get('x-ratelimit-remaining') ?? ''); + const rateReset = Number.parseInt(response.headers.get('x-ratelimit-reset') ?? '') + const rateRemaining = Number.parseInt(response.headers.get('x-ratelimit-remaining') ?? '') if (!(Number.isNaN(rateReset) || Number.isNaN(rateRemaining))) { - const reset = rateReset - Date.now() / 1000; - if (reset > RATE_LIMIT.reset) RATE_LIMIT.reset = reset; + const reset = rateReset - Date.now() / 1000 + if (reset > RATE_LIMIT.reset) RATE_LIMIT.reset = reset if (rateRemaining < RATE_LIMIT.remaining) { - RATE_LIMIT.remaining = rateRemaining; + RATE_LIMIT.remaining = rateRemaining if (__debug) { - console.warn(`Github remaining requests: ${RATE_LIMIT.remaining}`); - await setTimeout(5000); + console.warn(`Github remaining requests: ${RATE_LIMIT.remaining}`) + await setTimeout(5000) } } } if (!response.ok) { if (cache?.data) { - if (__debug) console.warn(`CACHE HIT due to fail: ${route} ${response.statusText}`); - return cache.data; + if (__debug) console.warn(`CACHE HIT due to fail: ${route} ${response.statusText}`) + return cache.data } - if (response.status === 403 && RATE_LIMIT.remaining === 0) return await getGh(route); - throw new Error(response.statusText); + if (response.status === 403 && RATE_LIMIT.remaining === 0) return await getGh(route) + throw new Error(response.statusText) } - return await setCache(response, route, cache?.data); + return await setCache(response, route, cache?.data) } /** @@ -225,17 +226,17 @@ export async function getGh(route) { * @yields {{name: string, downloadUrl: string}} */ export async function* getGhReleasesAssets(repo) { - let page = 0; + let page = 0 while (true) { // "${_gh_url}/protocolbuffers/protobuf/releases?page=${_page}&per_page=100" const releases = JSON.parse( (await getGh(path.join(repo, `${RELEASES}?page=${page++}&per_page=100`))).toString( 'utf8' ) - ); + ) - if (!Array.isArray(releases)) throw new Error(`Error: ${JSON.stringify(releases)}`); - if (releases.length === 0) return; + if (!Array.isArray(releases)) throw new Error(`Error: ${JSON.stringify(releases)}`) + if (releases.length === 0) return for (const release of /** @type {unknown[]} */ (releases)) { if ( @@ -246,9 +247,9 @@ export async function* getGhReleasesAssets(repo) { Array.isArray(release.assets) ) ) - throw new Error(`Invalid release: ${release}`); + throw new Error(`Invalid release: ${release}`) - if ('prerelease' in release && release.prerelease) continue; + if ('prerelease' in release && release.prerelease) continue for (const asset of /** @type {unknown[]} */ (release.assets)) { if ( @@ -261,9 +262,9 @@ export async function* getGhReleasesAssets(repo) { typeof asset.browser_download_url === 'string' ) ) - throw new Error(`Invalid release.asset: ${asset}`); + throw new Error(`Invalid release.asset: ${asset}`) - yield { name: asset.name, downloadUrl: asset.browser_download_url }; + yield { name: asset.name, downloadUrl: asset.browser_download_url } } } } @@ -276,11 +277,11 @@ export async function* getGhReleasesAssets(repo) { * @yields {{ id: number, name: string }} */ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) { - if (!branch) branch = 'main'; - if (typeof branch === 'string') branch = [branch]; - if (!(branch instanceof Set)) branch = new Set(branch); + if (!branch) branch = 'main' + if (typeof branch === 'string') branch = [branch] + if (!(branch instanceof Set)) branch = new Set(branch) - let page = 0; + let page = 0 while (true) { const workflow = /** @type {unknown} */ ( JSON.parse( @@ -295,7 +296,7 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) { ) ).toString('utf8') ) - ); + ) if ( !( workflow && @@ -304,9 +305,9 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) { Array.isArray(workflow.workflow_runs) ) ) - throw new Error(`Error: ${JSON.stringify(workflow)}`); + throw new Error(`Error: ${JSON.stringify(workflow)}`) - if (workflow.workflow_runs.length === 0) return; + if (workflow.workflow_runs.length === 0) return for (const run of /** @type {unknown[]} */ (workflow.workflow_runs)) { if ( @@ -319,13 +320,13 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) { typeof run.artifacts_url === 'string' ) ) - throw new Error(`Invalid Workflow run: ${run}`); + throw new Error(`Invalid Workflow run: ${run}`) - if (!branch.has(run.head_branch)) continue; + if (!branch.has(run.head_branch)) continue const response = /** @type {unknown} */ ( JSON.parse((await getGh(run.artifacts_url)).toString('utf8')) - ); + ) if ( !( @@ -335,7 +336,7 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) { Array.isArray(response.artifacts) ) ) - throw new Error(`Error: ${JSON.stringify(response)}`); + throw new Error(`Error: ${JSON.stringify(response)}`) for (const artifact of /** @type {unknown[]} */ (response.artifacts)) { if ( @@ -348,9 +349,9 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) { typeof artifact.name === 'string' ) ) - throw new Error(`Invalid artifact: ${artifact}`); + throw new Error(`Invalid artifact: ${artifact}`) - yield { id: artifact.id, name: artifact.name }; + yield { id: artifact.id, name: artifact.name } } } } @@ -366,11 +367,11 @@ export async function getGhArtifactContent(repo, id) { if (GH_HEADERS.has('Authorization')) { try { // "${_gh_url}/${_sd_gh_path}/actions/artifacts/${_artifact_id}/zip" - return await getGh(path.join(repo, ARTIFACTS, id.toString(), 'zip')); + return await getGh(path.join(repo, ARTIFACTS, id.toString(), 'zip')) } catch (error) { if (__debug) { - console.warn('Failed to download artifact from github, fallback to nightly.link'); - console.error(error); + console.warn('Failed to download artifact from github, fallback to nightly.link') + console.error(error) } } } @@ -381,5 +382,5 @@ export async function getGhArtifactContent(repo, id) { * Use it when running in evironments that are not authenticated with github * "https://nightly.link/${_sd_gh_path}/actions/artifacts/${_artifact_id}.zip" */ - return await get(new URL(path.join(repo, ARTIFACTS, `${id}.zip`), NIGTHLY), null, true); + return await get(new URL(path.join(repo, ARTIFACTS, `${id}.zip`), NIGTHLY), null, true) } diff --git a/scripts/utils/machineId.mjs b/scripts/utils/machineId.mjs new file mode 100644 index 000000000..0351dc420 --- /dev/null +++ b/scripts/utils/machineId.mjs @@ -0,0 +1,68 @@ +import { exec as execCb } from 'node:child_process' +import * as os from 'node:os' +import { env } from 'node:process' +import { promisify } from 'node:util' + +const __debug = env.NODE_ENV === 'debug' + +/** @type {'musl' | 'glibc'} */ +let libc = 'glibc' +if (os.type() === 'Linux') { + try { + const exec = promisify(execCb) + if ((await exec('ldd /bin/ls')).stdout.includes('musl')) { + libc = 'musl' + } + } catch (error) { + if (__debug) { + console.warn(`Failed to check libc type`) + console.error(error) + } + } +} + +/** @type {Record} */ +const OS_TYPE = { + darwin: 'Darwin', + windows: 'Windows_NT', + linux: 'Linux', +} + +/** @returns {['Darwin' | 'Windows_NT', 'x86_64' | 'aarch64'] | ['Linux', 'x86_64' | 'aarch64', 'musl' | 'glibc']} */ +export function getMachineId() { + let _os, _arch + let _libc = libc + + /** + * Supported TARGET_TRIPLE: + * x86_64-apple-darwin + * aarch64-apple-darwin + * x86_64-pc-windows-msvc + * aarch64-pc-windows-msvc + * x86_64-unknown-linux-gnu + * x86_64-unknown-linux-musl + * aarch64-unknown-linux-gnu + * aarch64-unknown-linux-musl + */ + if (env.TARGET_TRIPLE) { + const target = env.TARGET_TRIPLE.split('-') + _os = OS_TYPE[target[2] ?? ''] + _arch = target[0] + if (_os === 'Linux') _libc = target[3]?.includes('musl') ? 'musl' : 'glibc' + } else { + // Current machine identifiers + _os = os.type() + _arch = os.machine() + if (_arch === 'arm64') _arch = 'aarch64' + } + + if (_arch !== 'x86_64' && _arch !== 'aarch64') throw new Error(`Unsuported architecture`) + + if (_os === 'Linux') { + return [_os, _arch, _libc] + } else if (_os !== 'Darwin' && _os !== 'Windows_NT') { + throw new Error(`Unsuported OS`) + } + + return [_os, _arch] +} diff --git a/scripts/utils/patchTauri.mjs b/scripts/utils/patchTauri.mjs new file mode 100644 index 000000000..efb3a1802 --- /dev/null +++ b/scripts/utils/patchTauri.mjs @@ -0,0 +1,142 @@ +import { exec as _exec } from 'node:child_process' +import * as fs from 'node:fs/promises' +import * as os from 'node:os' +import * as path from 'node:path' +import { env } from 'node:process' +import { promisify } from 'node:util' + +import * as semver from 'semver' + +import { copyLinuxLibs, copyWindowsDLLs } from './shared.mjs' + +const exec = promisify(_exec) +const __debug = env.NODE_ENV === 'debug' + +/** + * @param {string} nativeDeps + * @returns {Promise} + */ +export async function tauriUpdaterKey(nativeDeps) { + if (env.TAURI_PRIVATE_KEY) return null + + // pnpm exec tauri signer generate -w + const privateKeyPath = path.join(nativeDeps, 'tauri.key') + const publicKeyPath = path.join(nativeDeps, 'tauri.key.pub') + const readKeys = () => + Promise.all([ + fs.readFile(publicKeyPath, { encoding: 'utf-8' }), + fs.readFile(privateKeyPath, { encoding: 'utf-8' }), + ]) + + let privateKey, publicKey + try { + ;[publicKey, privateKey] = await readKeys() + if (!(publicKey && privateKey)) throw new Error('Empty keys') + } catch (err) { + if (__debug) { + console.warn('Failed to read tauri updater keys') + console.error(err) + } + + const quote = os.type() === 'Windows_NT' ? '"' : "'" + await exec(`pnpm exec tauri signer generate --ci -w ${quote}${privateKeyPath}${quote}`) + ;[publicKey, privateKey] = await readKeys() + if (!(publicKey && privateKey)) throw new Error('Empty keys') + } + + env.TAURI_PRIVATE_KEY = privateKey + return publicKey +} + +/** + * @param {string} root + * @param {string} nativeDeps + * @param {string[]} args + * @returns {Promise} + */ +export async function patchTauri(root, nativeDeps, args) { + if (args.findIndex(e => e === '-c' || e === '--config') !== -1) { + throw new Error('Custom tauri build config is not supported.') + } + + // Location for desktop app tauri code + const tauriRoot = path.join(root, 'apps', 'desktop', 'src-tauri') + + const osType = os.type() + const resources = + osType === 'Linux' + ? await copyLinuxLibs(root, nativeDeps) + : osType === 'Windows_NT' + ? await copyWindowsDLLs(root, nativeDeps) + : { files: [], toClean: [] } + const tauriPatch = { + tauri: { + bundle: { + macOS: { + minimumSystemVersion: '', + }, + resources: resources.files, + }, + updater: /** @type {{ pubkey?: string }} */ ({}), + }, + } + + const tauriConfig = await fs + .readFile(path.join(tauriRoot, 'tauri.conf.json'), 'utf-8') + .then(JSON.parse) + + if (args[0] === 'build') { + if (tauriConfig?.tauri?.updater?.active) { + const pubKey = await tauriUpdaterKey(nativeDeps) + if (pubKey != null) tauriPatch.tauri.updater.pubkey = pubKey + } + } + + if (osType === 'Darwin') { + // ARM64 support was added in macOS 11, but we need at least 11.2 due to our ffmpeg build + const macOSArm64MinimumVersion = '11.2' + + let macOSMinimumVersion = tauriConfig?.tauri?.bundle?.macOS?.minimumSystemVersion + + const targets = args + .filter((_, index, args) => { + if (index === 0) return false + const previous = args[index - 1] + return previous === '-t' || previous === '--target' + }) + .flatMap(target => target.split(',')) + + if ( + (targets.includes('aarch64-apple-darwin') || + (targets.length === 0 && process.arch === 'arm64')) && + (macOSMinimumVersion == null || + semver.lt( + /** @type {import('semver').SemVer} */ (semver.coerce(macOSMinimumVersion)), + /** @type {import('semver').SemVer} */ ( + semver.coerce(macOSArm64MinimumVersion) + ) + )) + ) { + macOSMinimumVersion = macOSArm64MinimumVersion + console.log( + `aarch64-apple-darwin target detected, setting minimum system version to ${macOSMinimumVersion}` + ) + } + + if (macOSMinimumVersion) { + env.MACOSX_DEPLOYMENT_TARGET = macOSMinimumVersion + tauriPatch.tauri.bundle.macOS.minimumSystemVersion = macOSMinimumVersion + } else { + throw new Error('No minimum macOS version detected, please review tauri.conf.json') + } + } + + const tauriPatchConf = path.join(tauriRoot, 'tauri.conf.patch.json') + await fs.writeFile(tauriPatchConf, JSON.stringify(tauriPatch, null, 2)) + + // Modify args to load patched tauri config + args.splice(1, 0, '-c', tauriPatchConf) + + // Files to be removed + return [tauriPatchConf, ...resources.toClean] +} diff --git a/scripts/utils/shared.mjs b/scripts/utils/shared.mjs new file mode 100644 index 000000000..c50dd00b6 --- /dev/null +++ b/scripts/utils/shared.mjs @@ -0,0 +1,200 @@ +import { exec as execCb } from 'node:child_process' +import * as fs from 'node:fs/promises' +import * as path from 'node:path' +import { env } from 'node:process' +import { promisify } from 'node:util' + +const exec = promisify(execCb) +const signId = env.APPLE_SIGNING_IDENTITY || '-' + +/** + * @param {string} origin + * @param {string} target + * @param {boolean} [rename] + */ +async function link(origin, target, rename) { + const parent = path.dirname(target) + await fs.mkdir(parent, { recursive: true, mode: 0o751 }) + await (rename ? fs.rename(origin, target) : fs.symlink(path.relative(parent, origin), target)) +} + +/** + * Move headers and dylibs of external deps to our framework + * @param {string} nativeDeps + */ +export async function setupMacOsFramework(nativeDeps) { + // External deps + const lib = path.join(nativeDeps, 'lib') + const include = path.join(nativeDeps, 'include') + + // Framework + const framework = path.join(nativeDeps, 'FFMpeg.framework') + const headers = path.join(framework, 'Headers') + const libraries = path.join(framework, 'Libraries') + const documentation = path.join(framework, 'Resources', 'English.lproj', 'Documentation') + + // Move files + await Promise.all([ + // Move pdfium license to framework + fs.rename( + path.join(nativeDeps, 'LICENSE.pdfium'), + path.join(documentation, 'LICENSE.pdfium') + ), + // Move dylibs to framework + fs.readdir(lib, { recursive: true, withFileTypes: true }).then(file => + file + .filter( + entry => + (entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib') + ) + .map(entry => { + const file = path.join(entry.path, entry.name) + const newFile = path.resolve(libraries, path.relative(lib, file)) + return link(file, newFile, true) + }) + ), + // Move headers to framework + fs.readdir(include, { recursive: true, withFileTypes: true }).then(file => + file + .filter( + entry => + (entry.isFile() || entry.isSymbolicLink()) && + !entry.name.endsWith('.proto') + ) + .map(entry => { + const file = path.join(entry.path, entry.name) + const newFile = path.resolve(headers, path.relative(include, file)) + return link(file, newFile, true) + }) + ), + ]) +} + +/** + * Symlink shared libs paths for Linux + * @param {string} root + * @param {string} nativeDeps + * @returns {Promise} + */ +export async function symlinkSharedLibsLinux(root, nativeDeps) { + // rpath=${ORIGIN}/../lib/spacedrive + const targetLib = path.join(root, 'target', 'lib') + const targetRPath = path.join(targetLib, 'spacedrive') + await fs.unlink(targetRPath).catch(() => {}) + await fs.mkdir(targetLib, { recursive: true }) + await link(path.join(nativeDeps, 'lib'), targetRPath) +} + +/** + * Symlink shared libs paths for macOS + * @param {string} nativeDeps + */ +export async function symlinkSharedLibsMacOS(nativeDeps) { + // External deps + const lib = path.join(nativeDeps, 'lib') + const include = path.join(nativeDeps, 'include') + + // Framework + const framework = path.join(nativeDeps, 'FFMpeg.framework') + const headers = path.join(framework, 'Headers') + const libraries = path.join(framework, 'Libraries') + + // Link files + await Promise.all([ + // Link header files + fs.readdir(headers, { recursive: true, withFileTypes: true }).then(files => + Promise.all( + files + .filter(entry => entry.isFile() || entry.isSymbolicLink()) + .map(entry => { + const file = path.join(entry.path, entry.name) + return link(file, path.resolve(include, path.relative(headers, file))) + }) + ) + ), + // Link dylibs + fs.readdir(libraries, { recursive: true, withFileTypes: true }).then(files => + Promise.all( + files + .filter( + entry => + (entry.isFile() || entry.isSymbolicLink()) && + entry.name.endsWith('.dylib') + ) + .map(entry => { + const file = path.join(entry.path, entry.name) + /** @type {Promise[]} */ + const actions = [ + link(file, path.resolve(lib, path.relative(libraries, file))), + ] + + // Sign dylib (Required for it to work on macOS 13+) + if (entry.isFile()) + actions.push(exec(`codesign -s "${signId}" -f "${file}"`)) + + return actions.length > 1 ? Promise.all(actions) : actions[0] + }) + ) + ), + ]) +} + +/** + * Copy Windows DLLs for tauri build + * @param {string} root + * @param {string} nativeDeps + * @returns {Promise<{files: string[], toClean: string[]}>} + */ +export async function copyWindowsDLLs(root, nativeDeps) { + const tauriSrc = path.join(root, 'apps', 'desktop', 'src-tauri') + const files = await Promise.all( + await fs.readdir(path.join(nativeDeps, 'bin'), { withFileTypes: true }).then(files => + files + .filter(entry => entry.isFile() && entry.name.endsWith(`.dll`)) + .map(async entry => { + await fs.copyFile( + path.join(entry.path, entry.name), + path.join(tauriSrc, entry.name) + ) + return entry.name + }) + ) + ) + + return { files, toClean: files.map(file => path.join(tauriSrc, file)) } +} + +/** + * Symlink shared libs paths for Linux + * @param {string} root + * @param {string} nativeDeps + * @returns {Promise<{files: string[], toClean: string[]}>} + */ +export async function copyLinuxLibs(root, nativeDeps) { + // rpath=${ORIGIN}/../lib/spacedrive + const tauriSrc = path.join(root, 'apps', 'desktop', 'src-tauri') + const files = await fs + .readdir(path.join(nativeDeps, 'lib'), { withFileTypes: true }) + .then(files => + Promise.all( + files + .filter( + entry => + (entry.isFile() || entry.isSymbolicLink()) && + (entry.name.endsWith('.so') || entry.name.includes('.so.')) + ) + .map(async entry => { + await fs.copyFile( + path.join(entry.path, entry.name), + path.join(tauriSrc, entry.name) + ) + return entry.name + }) + ) + ) + + return { + files, + toClean: files.map(file => path.join(tauriSrc, file)), + } +} diff --git a/scripts/utils/spawn.mjs b/scripts/utils/spawn.mjs new file mode 100644 index 000000000..590dd6f5b --- /dev/null +++ b/scripts/utils/spawn.mjs @@ -0,0 +1,33 @@ +import { spawn } from 'node:child_process' + +/** + * @param {string} command + * @param {string[]} args + * @param {string} [cwd] + * @returns {Promise} + */ +export default function (command, args, cwd) { + if (typeof command !== 'string' || command.length === 0) + throw new Error('Command must be a string and not empty') + + if (args == null) args = [] + else if (!Array.isArray(args) || args.some(arg => typeof arg !== 'string')) + throw new Error('Args must be an array of strings') + + return new Promise((resolve, reject) => { + const child = spawn(command, args, { cwd, shell: true, stdio: 'inherit' }) + process.on('SIGTERM', () => child.kill('SIGTERM')) + process.on('SIGINT', () => child.kill('SIGINT')) + process.on('SIGBREAK', () => child.kill('SIGBREAK')) + process.on('SIGHUP', () => child.kill('SIGHUP')) + child.on('error', reject) + child.on('exit', (code, signal) => { + if (code === null) code = signal === 'SIGINT' ? 0 : 1 + if (code === 0) { + resolve() + } else { + reject(code) + } + }) + }) +} diff --git a/scripts/utils/which.mjs b/scripts/utils/which.mjs new file mode 100644 index 000000000..7fa316384 --- /dev/null +++ b/scripts/utils/which.mjs @@ -0,0 +1,41 @@ +import { exec as execCb } from 'node:child_process' +import * as fs from 'node:fs/promises' +import * as os from 'node:os' +import * as path from 'node:path' +import { env } from 'node:process' +import { promisify } from 'node:util' + +const exec = promisify(execCb) + +/** + * @param {string} progName + * @returns {Promise} + */ +async function where(progName) { + // Reject paths + if (/[\\]/.test(progName)) return false + try { + await exec(`where "${progName}"`) + } catch { + return false + } + + return true +} + +/** + * @param {string} progName + * @returns {Promise} + */ +export async function which(progName) { + return os.type() === 'Windows_NT' + ? where(progName) + : Promise.any( + Array.from(new Set(env.PATH?.split(':'))).map(dir => + fs.access(path.join(dir, progName), fs.constants.X_OK) + ) + ).then( + () => true, + () => false + ) +} diff --git a/scripts/which.mjs b/scripts/which.mjs deleted file mode 100644 index 8bcd1ccab..000000000 --- a/scripts/which.mjs +++ /dev/null @@ -1,41 +0,0 @@ -import { exec as execCb } from 'node:child_process'; -import * as fs from 'node:fs/promises'; -import * as os from 'node:os'; -import * as path from 'node:path'; -import { env } from 'node:process'; -import { promisify } from 'node:util'; - -const exec = promisify(execCb); - -/** - * @param {string} progName - * @returns {Promise} - */ -async function where(progName) { - // Reject paths - if (/[\\]/.test(progName)) return false; - try { - await exec(`where "${progName}"`); - } catch { - return false; - } - - return true; -} - -/** - * @param {string} progName - * @returns {Promise} - */ -export async function which(progName) { - return os.type() === 'Windows_NT' - ? where(progName) - : Promise.any( - Array.from(new Set(env.PATH?.split(':'))).map((dir) => - fs.access(path.join(dir, progName), fs.constants.X_OK) - ) - ).then( - () => true, - () => false - ); -} diff --git a/turbo.json b/turbo.json index 0d26e706a..0afb03a45 100644 --- a/turbo.json +++ b/turbo.json @@ -2,7 +2,7 @@ "$schema": "https://turborepo.org/schema.json", "pipeline": { "build": { - "inputs": ["!src-tauri/**"], + "inputs": ["**/*.ts", "!src-tauri/**", "!node_modules/**"], "dependsOn": ["^build"], "outputs": ["dist/**"] },