Compare commits

..

1 Commits

Author SHA1 Message Date
Michael Telatynski
4955643960 Specify node-version for setup-node action 2022-10-13 08:52:52 +01:00
156 changed files with 7534 additions and 10021 deletions

View File

@@ -1,22 +0,0 @@
module.exports = {
plugins: ["matrix-org"],
extends: [".eslintrc.js"],
parserOptions: {
project: ["hak/tsconfig.json"],
},
overrides: [
{
files: ["hak/**/*.ts"],
extends: ["plugin:matrix-org/typescript"],
rules: {
// Things we do that break the ideal style
"prefer-promise-reject-errors": "off",
"quotes": "off",
"@typescript-eslint/no-explicit-any": "off",
// We're okay with assertion errors when we ask for them
"@typescript-eslint/no-non-null-assertion": "off",
},
},
],
};

View File

@@ -1,22 +0,0 @@
module.exports = {
plugins: ["matrix-org"],
extends: [".eslintrc.js"],
parserOptions: {
project: ["scripts/tsconfig.json"],
},
overrides: [
{
files: ["scripts/**/*.ts"],
extends: ["plugin:matrix-org/typescript"],
rules: {
// Things we do that break the ideal style
"prefer-promise-reject-errors": "off",
"quotes": "off",
"@typescript-eslint/no-explicit-any": "off",
// We're okay with assertion errors when we ask for them
"@typescript-eslint/no-non-null-assertion": "off",
},
},
],
};

View File

@@ -1,22 +0,0 @@
module.exports = {
plugins: ["matrix-org"],
extends: [".eslintrc.js"],
parserOptions: {
project: ["test/tsconfig.json"],
},
overrides: [
{
files: ["test/**/*.ts"],
extends: ["plugin:matrix-org/typescript"],
rules: {
// Things we do that break the ideal style
"prefer-promise-reject-errors": "off",
"quotes": "off",
"@typescript-eslint/no-explicit-any": "off",
// We're okay with assertion errors when we ask for them
"@typescript-eslint/no-non-null-assertion": "off",
},
},
],
};

View File

@@ -1,9 +1,10 @@
module.exports = {
plugins: ["matrix-org"],
extends: ["plugin:matrix-org/javascript"],
extends: [
"plugin:matrix-org/javascript",
],
parserOptions: {
ecmaVersion: 2021,
project: ["tsconfig.json"],
},
env: {
es6: true,
@@ -19,19 +20,18 @@ module.exports = {
"prefer-promise-reject-errors": "off",
"no-async-promise-executor": "off",
},
overrides: [
{
files: ["src/**/*.ts"],
extends: ["plugin:matrix-org/typescript"],
rules: {
// Things we do that break the ideal style
"prefer-promise-reject-errors": "off",
"quotes": "off",
overrides: [{
files: ["{src,scripts,hak}/**/*.{ts,tsx}"],
extends: [
"plugin:matrix-org/typescript",
],
rules: {
// Things we do that break the ideal style
"prefer-promise-reject-errors": "off",
"quotes": "off",
"@typescript-eslint/no-explicit-any": "off",
// We're okay with assertion errors when we ask for them
"@typescript-eslint/no-non-null-assertion": "off",
},
// We disable this while we're transitioning
"@typescript-eslint/no-explicit-any": "off",
},
],
}],
};

5
.github/CODEOWNERS vendored
View File

@@ -1,4 +1 @@
* @vector-im/element-web
/.github/workflows/** @vector-im/element-web-app-team
/package.json @vector-im/element-web-app-team
/yarn.lock @vector-im/element-web-app-team
* @vector-im/element-web

View File

@@ -1,76 +0,0 @@
name: Bug report for the Element desktop app (not in a browser)
description: File a bug report if you are using the desktop Element application.
labels: [T-Defect]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
Please report security issues by email to security@matrix.org
- type: textarea
id: reproduction-steps
attributes:
label: Steps to reproduce
description: Please attach screenshots, videos or logs if you can.
placeholder: Tell us what you see!
value: |
1. Where are you starting? What can you see?
2. What do you click?
3. More steps…
validations:
required: true
- type: textarea
id: result
attributes:
label: Outcome
placeholder: Tell us what went wrong
value: |
#### What did you expect?
#### What happened instead?
validations:
required: true
- type: input
id: os
attributes:
label: Operating system
placeholder: Windows, macOS, Ubuntu, Arch Linux…
validations:
required: false
- type: input
id: version
attributes:
label: Application version
description: You can find the version information in Settings -> Help & About.
placeholder: e.g. Element version 1.7.34, olm version 3.2.3
validations:
required: false
- type: input
id: source
attributes:
label: How did you install the app?
description: Where did you install the app from? Please give a link or a description.
placeholder: e.g. From https://element.io/get-started
validations:
required: false
- type: input
id: homeserver
attributes:
label: Homeserver
description: |
Which server is your account registered on? If it is a local or non-public homeserver, please tell us what is the homeserver implementation (ex: Synapse/Dendrite/etc.) and the version.
placeholder: e.g. matrix.org or Synapse 1.50.0rc1
validations:
required: false
- type: dropdown
id: rageshake
attributes:
label: Will you send logs?
description: |
Did you know that you can send a /rageshake command from your application to submit logs for this issue? Trigger the defect, then type `/rageshake` into the message input area followed by a description of the problem and send the command. You will be able to add a link to this defect report and submit anonymous logs to the developers.
options:
- "Yes"
- "No"
validations:
required: true

View File

@@ -1 +0,0 @@
blank_issues_enabled: false

View File

@@ -1,36 +0,0 @@
name: Enhancement request
description: Do you have a suggestion or feature request?
labels: [T-Enhancement]
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to propose an enhancement to an existing feature. If you would like to propose a new feature or a major cross-platform change, please [start a discussion here](https://github.com/vector-im/element-meta/discussions/new?category=ideas).
- type: textarea
id: usecase
attributes:
label: Your use case
description: What would you like to be able to do? Please feel welcome to include screenshots or mock ups.
placeholder: Tell us what you would like to do!
value: |
#### What would you like to do?
#### Why would you like to do it?
#### How would you like to achieve it?
validations:
required: true
- type: textarea
id: alternative
attributes:
label: Have you considered any alternatives?
placeholder: A clear and concise description of any alternative solutions or features you've considered.
validations:
required: false
- type: textarea
id: additional-context
attributes:
label: Additional context
placeholder: Is there anything else you'd like to add?
validations:
required: false

View File

@@ -2,9 +2,9 @@
## Checklist
- [ ] Ensure your code works with manual testing
- [ ] Linter and other CI checks pass
- [ ] Sign-off given on the changes (see [CONTRIBUTING.md](https://github.com/vector-im/element-desktop/blob/develop/CONTRIBUTING.md))
* [ ] Ensure your code works with manual testing
* [ ] Linter and other CI checks pass
* [ ] Sign-off given on the changes (see [CONTRIBUTING.md](https://github.com/vector-im/element-desktop/blob/develop/CONTRIBUTING.md))
<!--
If you would like to specify text for the changelog entry other than your PR title, add the following:

View File

@@ -1,4 +1,6 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["github>matrix-org/renovate-config-element-web"]
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"github>matrix-org/renovate-config-element-web"
]
}

View File

@@ -1,30 +1,30 @@
name: Backport
on:
pull_request_target:
types:
- closed
- labeled
branches:
- develop
pull_request_target:
types:
- closed
- labeled
branches:
- develop
jobs:
backport:
name: Backport
runs-on: ubuntu-latest
# Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& (
github.event.action == 'closed'
|| (
github.event.action == 'labeled'
&& contains(github.event.label.name, 'backport')
)
)
steps:
- uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2
with:
labels_template: "<%= JSON.stringify([...labels, 'X-Release-Blocker']) %>"
# We can't use GITHUB_TOKEN here or CI won't run on the new PR
github_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
backport:
name: Backport
runs-on: ubuntu-latest
# Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& (
github.event.action == 'closed'
|| (
github.event.action == 'labeled'
&& contains(github.event.label.name, 'backport')
)
)
steps:
- uses: tibdex/backport@v2
with:
labels_template: "<%= JSON.stringify([...labels, 'X-Release-Blocker']) %>"
# We can't use GITHUB_TOKEN here or CI won't run on the new PR
github_token: ${{ secrets.ELEMENT_BOT_TOKEN }}

288
.github/workflows/build.yaml vendored Normal file
View File

@@ -0,0 +1,288 @@
name: Build and Test
on:
pull_request: { }
push:
branches: [ develop, master ]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
fetch:
name: Prepare
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
cache: "yarn"
node-version: 16
- name: Install Deps
run: "yarn install --pure-lockfile"
- name: Fetch Element Web (develop)
if: github.event.pull_request.base.ref == 'develop'
run: yarn run fetch --noverify develop -d element.io/nightly
- name: Fetch Element Web
if: github.event.pull_request.base.ref != 'develop'
run: yarn run fetch --noverify --cfgdir element.io/release
- uses: actions/upload-artifact@v3
with:
name: webapp
retention-days: 1
path: |
webapp.asar
package.json
windows:
needs: fetch
strategy:
matrix:
include:
- target: x86_64-pc-windows-msvc
arch: x64
- target: i686-pc-windows-msvc
arch: x86
build-args: --ia32
name: Windows (${{ matrix.arch }})
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: webapp
- name: Cache .hak
uses: actions/cache@v3
with:
key: ${{ runner.os }}-${{ hashFiles('./yarn.lock') }}
path: |
./.hak
- name: Set up build tools
uses: ilammy/msvc-dev-cmd@v1
with:
arch: ${{ matrix.arch }}
# ActiveTCL package on choco is from 2015,
# this one is newer but includes more than we need
- name: Choco install tclsh
shell: pwsh
run: |
choco install -y magicsplat-tcl-tk --no-progress
echo "${HOME}/AppData/Local/Apps/Tcl86/bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Choco install NetWide Assembler
shell: pwsh
run: |
choco install -y nasm --no-progress
echo "C:/Program Files/NASM" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: ${{ matrix.target }}
- uses: actions/setup-node@v3
with:
cache: "yarn"
node-version: 16
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --pure-lockfile"
- name: Build Natives
run: |
refreshenv
yarn build:native --target ${{ matrix.target }}
- name: Build App
run: "yarn build --publish never -w ${{ matrix.build-args }}"
- name: Upload Artifacts
uses: actions/upload-artifact@v3
with:
name: win-${{ matrix.arch }}
path: dist
retention-days: 1
linux:
needs: fetch
strategy:
matrix:
include:
- sqlcipher: system
- sqlcipher: static
static: 1
name: 'Linux (sqlcipher: ${{ matrix.sqlcipher }})'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: webapp
- name: Cache .hak
uses: actions/cache@v3
with:
key: ${{ hashFiles('./yarn.lock') }}
path: |
./.hak
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
- name: Install libsqlcipher-dev
if: matrix.sqlcipher == 'system'
run: sudo apt-get install -y libsqlcipher-dev
- uses: actions/setup-node@v3
with:
cache: "yarn"
node-version: 16
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --pure-lockfile"
- name: Build Natives
run: "yarn build:native"
env:
SQLCIPHER_STATIC: ${{ matrix.static }}
- name: Build App
run: "yarn build --publish never"
- name: Install .deb
run: "sudo apt install ./dist/*.deb"
- name: Upload Artifacts
uses: actions/upload-artifact@v3
with:
name: linux-sqlcipher-${{ matrix.sqlcipher }}
path: dist
retention-days: 1
macos:
needs: fetch
name: macOS (universal)
runs-on: macos-latest
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: webapp
- name: Cache .hak
uses: actions/cache@v3
with:
key: ${{ hashFiles('./yarn.lock') }}
path: |
./.hak
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: aarch64-apple-darwin
- uses: actions/setup-node@v3
with:
cache: "yarn"
node-version: 16
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --pure-lockfile"
- name: Build Natives
run: "yarn build:native:universal"
- name: Build App
run: "yarn build:universal --publish never"
- name: Upload Artifacts
uses: actions/upload-artifact@v3
with:
name: macos
path: dist
retention-days: 1
test:
needs:
- macos
- linux
- windows
strategy:
matrix:
include:
# Disable macOS tests for now, they fail to run in CI, needs investigation.
# - name: macOS Universal
# os: macos
# artifact: macos
# executable: "./dist/mac-universal/Element.app/Contents/MacOS/Element"
# prepare_cmd: "chmod +x ./dist/mac-universal/Element.app/Contents/MacOS/Element"
- name: 'Linux (sqlcipher: system)'
os: ubuntu
artifact: linux-sqlcipher-system
executable: "element-desktop"
prepare_cmd: "sudo apt install ./dist/*.deb"
- name: 'Linux (sqlcipher: static)'
os: ubuntu
artifact: linux-sqlcipher-static
executable: "element-desktop"
prepare_cmd: "sudo apt install ./dist/*.deb"
- name: Windows (x86)
os: windows
artifact: win-x86
executable: "./dist/win-ia32-unpacked/Element.exe"
- name: Windows (x64)
os: windows
artifact: win-x64
executable: "./dist/win-unpacked/Element.exe"
name: Test ${{ matrix.name }}
runs-on: ${{ matrix.os }}-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
cache: "yarn"
node-version: 16
- name: Install Deps
run: "yarn install --pure-lockfile"
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.artifact }}
path: dist
- name: Prepare for tests
run: ${{ matrix.prepare_cmd }}
if: matrix.prepare_cmd
- name: Run tests
uses: GabrielBB/xvfb-action@v1
timeout-minutes: 5
with:
run: "yarn test"
env:
ELEMENT_DESKTOP_EXECUTABLE: ${{ matrix.executable }}
- name: Upload Artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.artifact }}
path: test_artifacts
retention-days: 1

View File

@@ -1,173 +0,0 @@
name: Build and Deploy
on:
# Nightly build
schedule:
- cron: "0 9 * * *"
# Release build
release:
types: [published]
# Manual nightly & release
workflow_dispatch:
inputs:
mode:
description: What type of build to trigger. Release builds MUST be ran from the `master` branch.
required: true
default: nightly
type: choice
options:
- nightly
- release
macos:
description: Build macOS
required: true
type: boolean
default: true
windows:
description: Build Windows
required: true
type: boolean
default: true
linux:
description: Build Linux
required: true
type: boolean
default: true
deploy:
description: Deploy artifacts
required: true
type: boolean
default: true
run-name: Element ${{ inputs.mode != 'release' && github.event_name != 'release' && 'Nightly' || 'Desktop' }}
concurrency: ${{ github.workflow }}
env:
R2_BUCKET: ${{ vars.R2_BUCKET }}
jobs:
prepare:
uses: ./.github/workflows/build_prepare.yaml
with:
config: element.io/${{ inputs.mode || (github.event_name == 'release' && 'release') || 'nightly' }}
version: ${{ (inputs.mode != 'release' && github.event_name != 'release') && 'develop' || '' }}
nightly: ${{ inputs.mode != 'release' && github.event_name != 'release' }}
secrets:
CF_R2_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
CF_R2_TOKEN: ${{ secrets.CF_R2_TOKEN }}
windows:
if: github.event_name != 'workflow_dispatch' || inputs.windows
needs: prepare
name: Windows ${{ matrix.arch }}
strategy:
matrix:
arch: [x86, x64]
uses: ./.github/workflows/build_windows.yaml
secrets: inherit
with:
sign: true
deploy-mode: true
arch: ${{ matrix.arch }}
version: ${{ needs.prepare.outputs.nightly-version }}
macos:
if: github.event_name != 'workflow_dispatch' || inputs.macos
needs: prepare
name: macOS
uses: ./.github/workflows/build_macos.yaml
secrets: inherit
with:
sign: true
deploy-mode: true
base-url: https://packages.element.io/${{ needs.prepare.outputs.packages-dir }}
version: ${{ needs.prepare.outputs.nightly-version }}
# We do not put these calls into deploy-mode as we do not want it to add to the packages.element.io artifact
# We ship this build via reprepro only
linux:
if: github.event_name != 'workflow_dispatch' || inputs.linux
needs: prepare
name: Linux ${{ matrix.arch }} (sqlcipher system)
strategy:
matrix:
arch: [amd64, arm64]
uses: ./.github/workflows/build_linux.yaml
with:
arch: ${{ matrix.arch }}
config: ${{ needs.prepare.outputs.config }}
sqlcipher: system
version: ${{ needs.prepare.outputs.nightly-version }}
# We ship the static build via static tarball only
linux_static:
if: github.event_name != 'workflow_dispatch' || inputs.linux
needs: prepare
name: Linux (sqlcipher static)
uses: ./.github/workflows/build_linux.yaml
with:
arch: amd64
deploy-mode: true
config: ${{ needs.prepare.outputs.config }}
sqlcipher: static
version: ${{ needs.prepare.outputs.nightly-version }}
# This deploy job only handles Windows, macOS & linux_static as those are stateless and static.
# Linux will be deployed via reprepro after it, but we list it as a dependency to abort if it fails.
deploy:
needs:
- prepare
- macos
- linux
- linux_static
- windows
runs-on: ubuntu-latest
name: Deploy
if: |
(
github.event_name != 'workflow_dispatch' &&
github.event.release.prerelease != true
) || (
always() && !failure() && !cancelled() && inputs.deploy &&
(inputs.macos || inputs.windows || inputs.linux)
)
environment: packages.element.io
steps:
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: packages.element.io
path: packages.element.io
- name: Deploy artifacts
run: |
aws s3 cp --recursive packages.element.io/ s3://$R2_BUCKET/$DEPLOYMENT_DIR --endpoint-url $R2_URL --region auto
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}
R2_URL: ${{ vars.CF_R2_S3_API }}
DEPLOYMENT_DIR: ${{ needs.prepare.outputs.packages-dir }}
- name: Notify packages.element.io of new files
uses: peter-evans/repository-dispatch@bf47d102fdb849e755b0b0023ea3e81a44b6f570 # v2
with:
token: ${{ secrets.ELEMENT_BOT_TOKEN }}
repository: vector-im/packages.element.io
event-type: packages-index
reprepro:
needs:
- linux
# We queue this after the other deploy stage as we want to abort if that fails
- deploy
name: Run reprepro ${{ matrix.arch }}
strategy:
matrix:
arch: [amd64, arm64]
if: |
(
github.event_name != 'workflow_dispatch' &&
github.event.release.prerelease != true
) || (
always() && !failure() && !cancelled() && inputs.deploy && inputs.linux
)
uses: ./.github/workflows/reprepro.yaml
secrets: inherit
with:
artifact-name: linux-${{ matrix.arch }}-sqlcipher-system

View File

@@ -1,164 +0,0 @@
name: Build and Test
on:
pull_request: {}
push:
branches: [develop, staging, master]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
fetch:
uses: ./.github/workflows/build_prepare.yaml
with:
config: ${{ github.event.pull_request.base.ref == 'develop' && 'element.io/nightly' || 'element.io/release' }}
version: ${{ github.event.pull_request.base.ref == 'develop' && 'develop' || '' }}
windows:
needs: fetch
name: Windows
uses: ./.github/workflows/build_windows.yaml
strategy:
matrix:
arch: [x64, x86]
with:
arch: ${{ matrix.arch }}
# This allows core contributors to test changes to the dockerbuild image within a pull request
linux_docker:
name: Linux docker
runs-on: ubuntu-latest
if: github.event_name == 'pull_request'
outputs:
docker-image: ${{ steps.docker.outputs.image }}
permissions:
contents: read
packages: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}-dockerbuild-pr
steps:
- uses: actions/checkout@v3
- name: "Get modified files"
id: changed_files
uses: tj-actions/changed-files@41960309398d165631f08c5df47a11147e14712b # v39
with:
files: |
dockerbuild/*
- name: Log in to the Container registry
if: steps.changed_files.outputs.any_modified == 'true'
uses: docker/login-action@b4bedf8053341df3b5a9f9e0f2cf4e79e27360c6
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- id: docker
if: steps.changed_files.outputs.any_modified == 'true'
run: |
echo "image=$IMAGE:$PR" >> $GITHUB_OUTPUT
env:
IMAGE: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
PR: ${{ github.event.pull_request.number }}
- name: Build and push Docker image
if: steps.changed_files.outputs.any_modified == 'true'
uses: docker/build-push-action@4c1b68d83ad20cc1a09620ca477d5bbbb5fa14d0
with:
context: dockerbuild
push: true
tags: ${{ steps.docker.outputs.image }}
linux:
needs:
- fetch
- linux_docker
name: "Linux (${{ matrix.arch }}) (sqlcipher: ${{ matrix.sqlcipher }})"
uses: ./.github/workflows/build_linux.yaml
strategy:
matrix:
sqlcipher: [system, static]
arch: [amd64, arm64]
exclude:
# FIXME: This combination yields a broken Seshat at this time
# Errors at launch with `undefined symbol: PKCS5_PBKDF2_HMAC
- arch: arm64
sqlcipher: static
with:
config: ${{ github.event.pull_request.base.ref == 'develop' && 'element.io/nightly' || 'element.io/release' }}
sqlcipher: ${{ matrix.sqlcipher }}
docker-image: ${{ needs.linux_docker.outputs.docker-image }}
arch: ${{ matrix.arch }}
macos:
needs: fetch
name: macOS
uses: ./.github/workflows/build_macos.yaml
test:
needs:
- macos
- linux
- windows
strategy:
matrix:
include:
- name: macOS Universal
os: macos
artifact: macos
executable: "/Volumes/Element/Element.app/Contents/MacOS/Element"
prepare_cmd: "hdiutil attach ./dist/*.dmg -mountpoint /Volumes/Element"
- name: "Linux (amd64) (sqlcipher: system)"
os: ubuntu
artifact: linux-amd64-sqlcipher-system
executable: "element-desktop"
prepare_cmd: "sudo apt install ./dist/*.deb"
- name: "Linux (amd64) (sqlcipher: static)"
os: ubuntu
artifact: linux-amd64-sqlcipher-static
executable: "element-desktop"
prepare_cmd: "sudo apt install ./dist/*.deb"
- name: Windows (x86)
os: windows
artifact: win-x86
executable: "./dist/win-ia32-unpacked/Element.exe"
- name: Windows (x64)
os: windows
artifact: win-x64
executable: "./dist/win-unpacked/Element.exe"
name: Test ${{ matrix.name }}
runs-on: ${{ matrix.os }}-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
cache: "yarn"
- name: Install Deps
run: "yarn install --frozen-lockfile"
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.artifact }}
path: dist
- name: Prepare for tests
run: ${{ matrix.prepare_cmd }}
if: matrix.prepare_cmd
- name: Run tests
uses: coactions/setup-xvfb@b6b4fcfb9f5a895edadc3bc76318fae0ac17c8b3 # v1
timeout-minutes: 5
with:
run: "yarn test"
env:
ELEMENT_DESKTOP_EXECUTABLE: ${{ matrix.executable }}
- name: Upload Artifacts
uses: actions/upload-artifact@v3
if: always()
with:
name: ${{ matrix.artifact }}
path: test_artifacts
retention-days: 1

View File

@@ -1,190 +0,0 @@
# This workflow relies on actions/cache to store the hak dependency artifacts as they take a long time to build
# Due to this extra care must be taken to only ever run all build_* scripts against the same branch to ensure
# the correct cache scoping, and additional care must be taken to not run untrusted actions on the develop branch.
on:
workflow_call:
inputs:
arch:
type: string
required: true
description: "The architecture to build for, one of 'amd64' | 'arm64'"
config:
type: string
required: true
description: "The config directory to use"
version:
type: string
required: false
description: "Version string to override the one in package.json, used for non-release builds"
sqlcipher:
type: string
required: true
description: "How to link sqlcipher, one of 'system' | 'static'"
deploy-mode:
type: boolean
required: false
description: "Whether to arrange artifacts in the arrangement needed for deployment, skipping unrelated ones"
docker-image:
type: string
required: false
description: "The docker image to use for the build, defaults to ghcr.io/vector-im/element-desktop-dockerbuild"
jobs:
build:
runs-on: ubuntu-latest
container:
image: ${{ inputs.docker-image || format('ghcr.io/vector-im/element-desktop-dockerbuild:{0}', github.ref_name == 'master' && 'master' || 'develop') }}
defaults:
run:
shell: bash
steps:
- uses: kanga333/variable-mapper@master
id: config
with:
key: "${{ inputs.arch }}"
export_to: output
map: |
{
"amd64": {
"target": "x86_64-unknown-linux-gnu",
"arch": "x86-64"
},
"arm64": {
"target": "aarch64-unknown-linux-gnu",
"arch": "aarch64",
"build-args": "--arm64"
}
}
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: webapp
- name: Cache .hak
id: cache
uses: actions/cache@v3
with:
key: ${{ runner.os }}-${{ inputs.docker-image || github.ref_name }}-${{ inputs.sqlcipher }}-${{ inputs.arch }}-${{ hashFiles('hakHash', 'electronVersion') }}
path: |
./.hak
- uses: actions/setup-node@v3
with:
cache: "yarn"
env:
# Workaround for https://github.com/actions/setup-node/issues/317
FORCE_COLOR: 0
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --frozen-lockfile"
- name: Prepare for static sqlcipher build
if: inputs.sqlcipher == 'static'
run: |
echo "SQLCIPHER_BUNDLED=1" >> $GITHUB_ENV
# Ideally the docker image would be ready for cross-compilation but libsqlcipher-dev is not Multi-Arch compatible
# https://unix.stackexchange.com/a/349359
- name: Prepare for cross compilation
if: steps.cache.outputs.cache-hit != 'true' && inputs.arch == 'arm64'
run: |
set -x
sed -i 's/deb http/deb [arch=amd64] http/g' /etc/apt/sources.list
echo "deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports/ bionic main multiverse restricted universe" | tee -a /etc/apt/sources.list
echo "deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main multiverse restricted universe" | tee -a /etc/apt/sources.list
dpkg --add-architecture arm64
apt-get -qq update
apt-get -qq install --no-install-recommends crossbuild-essential-arm64 libsqlcipher-dev:arm64 libssl-dev:arm64 libsecret-1-dev:arm64 libgnome-keyring-dev:arm64
rustup target add aarch64-unknown-linux-gnu
mv dockerbuild/aarch64/.cargo .
cat dockerbuild/aarch64/.env >> $GITHUB_ENV
- name: Build Natives
if: steps.cache.outputs.cache-hit != 'true'
run: "yarn build:native --target ${{ steps.config.outputs.target }}"
- name: "[Nightly] Resolve version"
id: nightly
if: inputs.version != ''
run: |
echo "config-args=--nightly '${{ inputs.version }}'" >> $GITHUB_OUTPUT
- name: Generate debian files and arguments
id: debian
run: |
if [ -f changelog.Debian ]; then
echo "config-args=--deb-changelog changelog.Debian" >> $GITHUB_OUTPUT
fi
- name: Build App
run: |
npx ts-node scripts/generate-builder-config.ts \
${{ steps.nightly.outputs.config-args }} \
${{ steps.debian.outputs.config-args }}
yarn build --publish never -l --config electron-builder.json ${{ steps.config.outputs.build-args }}
- name: Check native libraries
run: |
set -x
shopt -s globstar
FILES=$(file dist/**/*.node)
echo "$FILES"
if [ grep -v "$ARCH" ]; then
exit 1
fi
LIBS=$(readelf -d dist/**/*.node | grep NEEDED)
echo "$LIBS"
set +x
assert_contains_string() { [[ "$1" == *"$2"* ]]; }
! assert_contains_string "$LIBS" "libcrypto.so.1.1"
if [ "$SQLCIPHER_BUNDLED" == "1" ]; then
! assert_contains_string "$LIBS" "libsqlcipher.so.0"
else
assert_contains_string "$LIBS" "libsqlcipher.so.0"
fi
env:
ARCH: ${{ steps.config.outputs.arch }}
- name: Stash deb package
if: inputs.deploy-mode
uses: actions/upload-artifact@v3
with:
name: linux-sqlcipher-${{ inputs.sqlcipher }}-deb
path: dist/*.deb
retention-days: 1
- name: Prepare artifacts for deployment
if: inputs.deploy-mode
run: |
mv dist _dist
mkdir -p "dist/install/linux/glibc-x86-64/"
mv _dist/*.tar.gz "dist/install/linux/glibc-x86-64"
# We don't wish to store the tarball for every nightly ever, so we only keep the latest
- name: "[Nightly] Strip version from tarball"
if: inputs.deploy-mode && inputs.version != ''
run: |
mv dist/install/linux/glibc-x86-64/*.tar.gz "dist/install/linux/glibc-x86-64/element-desktop-nightly.tar.gz"
- name: "[Release] Prepare release latest symlink"
if: inputs.deploy-mode && inputs.version == ''
shell: bash
run: |
ln -s "$(find . -type f -iname "*.tar.gz" | xargs -0 -n1 -- basename)" "element-desktop.tar.gz"
working-directory: "dist/install/linux/glibc-x86-64"
# We exclude *-unpacked as it loses permissions and the tarball contains it with correct permissions
- name: Upload Artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.deploy-mode && 'packages.element.io' || format('linux-{0}-sqlcipher-{1}', inputs.arch, inputs.sqlcipher) }}
path: |
dist
!dist/*-unpacked/**
retention-days: 1

View File

@@ -1,157 +0,0 @@
# This workflow relies on actions/cache to store the hak dependency artifacts as they take a long time to build
# Due to this extra care must be taken to only ever run all build_* scripts against the same branch to ensure
# the correct cache scoping, and additional care must be taken to not run untrusted actions on the develop branch.
on:
workflow_call:
secrets:
APPLE_ID:
required: false
APPLE_ID_PASSWORD:
required: false
APPLE_TEAM_ID:
required: false
APPLE_CSC_KEY_PASSWORD:
required: false
APPLE_CSC_LINK:
required: false
inputs:
version:
type: string
required: false
description: "Version string to override the one in package.json, used for non-release builds"
sign:
type: string
required: false
description: "Whether to sign & notarise the build, requires 'packages.element.io' environment"
deploy-mode:
type: boolean
required: false
description: "Whether to arrange artifacts in the arrangement needed for deployment, skipping unrelated ones"
base-url:
type: string
required: false
description: "The URL to which the output will be deployed, required if deploy-mode is enabled."
jobs:
build:
runs-on: macos-latest
environment: ${{ inputs.sign && 'packages.element.io' || '' }}
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: webapp
- name: Cache .hak
id: cache
uses: actions/cache@v3
with:
key: ${{ runner.os }}-${{ hashFiles('hakHash', 'electronVersion') }}
path: |
./.hak
- name: Install Rust
if: steps.cache.outputs.cache-hit != 'true'
run: |
rustup toolchain install stable --profile minimal --no-self-update
rustup default stable
rustup target add aarch64-apple-darwin
- uses: actions/setup-node@v3
with:
cache: "yarn"
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --frozen-lockfile"
- name: Build Natives
if: steps.cache.outputs.cache-hit != 'true'
run: "yarn build:native:universal"
- name: "[Nightly] Resolve version"
id: nightly
if: inputs.version != ''
run: |
echo "config-args=--nightly '${{ inputs.version }}'" >> $GITHUB_OUTPUT
# We split these because electron-builder gets upset if we set CSC_LINK even to an empty string
- name: "[Signed] Build App"
if: inputs.sign != ''
run: |
scripts/generate-builder-config.ts ${{ steps.nightly.outputs.config-args }} --notarytool-team-id='${{ secrets.APPLE_TEAM_ID }}'
yarn build:universal --publish never --config electron-builder.json
env:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CSC_KEY_PASSWORD }}
CSC_LINK: ${{ secrets.APPLE_CSC_LINK }}
- name: Check app was signed & notarised successfully
if: inputs.sign != ''
run: |
hdiutil attach dist/*.dmg
codesign -dv --verbose=4 /Volumes/Element*/*.app
spctl -a -vvv -t install /Volumes/Element*/*.app
hdiutil detach /Volumes/Element*
- name: "[Unsigned] Build App"
if: inputs.sign == ''
run: |
scripts/generate-builder-config.ts ${{ steps.nightly.outputs.config-args }}
yarn build:universal --publish never --config electron-builder.json
env:
CSC_IDENTITY_AUTO_DISCOVERY: false
- name: Prepare artifacts for deployment
if: inputs.deploy-mode
run: |
mv dist _dist
mkdir -p dist/install/macos dist/update/macos
mv _dist/*-mac.zip dist/update/macos/
mv _dist/*.dmg dist/install/macos/
PKG_JSON_VERSION=$(cat package.json | jq -r .version)
LATEST=$(find dist -type f -iname "*-mac.zip" | xargs -0 -n1 -- basename)
# Encode spaces in the URL as Squirrel.Mac complains about bad JSON otherwise
URL="${{ inputs.base-url }}/update/macos/${LATEST// /%20}"
jq -n --arg version "${VERSION:-$PKG_JSON_VERSION}" --arg url "$URL" '
{
currentRelease: $version,
releases: [{
version: $version,
updateTo: {
version: $version,
url: $url,
},
}],
}
' > dist/update/macos/releases.json
jq -n --arg url "$URL" '
{ url: $url }
' > dist/update/macos/releases-legacy.json
env:
VERSION: ${{ inputs.version }}
# We don't wish to store the installer for every nightly ever, so we only keep the latest
- name: "[Nightly] Strip version from installer file"
if: inputs.deploy-mode && inputs.version != ''
run: |
mv dist/install/macos/*.dmg "dist/install/macos/Element Nightly.dmg"
- name: "[Release] Prepare release latest symlink"
if: inputs.deploy-mode && inputs.version == ''
run: |
ln -s "$(find . -type f -iname "*.dmg" | xargs -0 -n1 -- basename)" "Element.dmg"
working-directory: "dist/install/macos"
# We exclude mac-universal as the unpacked app takes forever to upload and zip and dmg already contain it
- name: Upload Artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.deploy-mode && 'packages.element.io' || 'macos' }}
path: |
dist
!dist/mac-universal/**
retention-days: 1

View File

@@ -1,140 +0,0 @@
# This action helps perform common actions before the build_* actions are started in parallel.
on:
workflow_call:
inputs:
config:
type: string
required: true
description: "The config directory to use"
version:
type: string
required: false
description: "The version tag to fetch, or 'develop', will pick automatically if not passed"
nightly:
type: boolean
required: false
default: false
description: "Whether the build is a Nightly and to calculate the version strings new builds should use"
secrets:
# Required if `nightly` is set
CF_R2_ACCESS_KEY_ID:
required: false
# Required if `nightly` is set
CF_R2_TOKEN:
required: false
outputs:
nightly-version:
description: "The version string the next Nightly should use, only output for nightly"
value: ${{ jobs.prepare.outputs.nightly-version }}
packages-dir:
description: "The directory non-deb packages for this run should live in within packages.element.io"
value: ${{ inputs.nightly && 'nightly' || 'desktop' }}
# This is just a simple pass-through of the input to simplify reuse of complex inline conditions
config:
description: "The relative path to the config file for this run"
value: ${{ inputs.config }}
jobs:
prepare:
name: Prepare
environment: ${{ inputs.nightly && 'packages.element.io' || '' }}
runs-on: ubuntu-latest
outputs:
nightly-version: ${{ steps.versions.outputs.nightly }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
cache: "yarn"
- name: Install Deps
run: "yarn install --frozen-lockfile"
- name: Fetch Element Web
run: yarn run fetch --noverify -d ${{ inputs.config }} ${{ inputs.version }}
# We split this out to save the build_* scripts having to do it to make use of `hashFiles` in the cache action
- name: Generate cache hash files
run: |
yarn run --silent electron --version > electronVersion
cat package.json | jq -c .hakDependencies | sha1sum > hakHash
find hak -type f -print0 | xargs -0 sha1sum >> hakHash
find scripts/hak -type f -print0 | xargs -0 sha1sum >> hakHash
- name: "[Nightly] Calculate version"
id: versions
if: inputs.nightly
run: |
# Find all latest Nightly versions
aws s3 cp s3://$R2_BUCKET/nightly/update/macos/releases.json - --endpoint-url $R2_URL --region auto | jq -r .currentRelease >> VERSIONS
aws s3 cp s3://$R2_BUCKET/debian/dists/default/main/binary-amd64/Packages - --endpoint-url $R2_URL --region auto | grep "Package: element-nightly" -A 50 | grep Version -m1 | sed -n 's/Version: //p' >> VERSIONS
aws s3 cp s3://$R2_BUCKET/debian/dists/default/main/binary-arm64/Packages - --endpoint-url $R2_URL --region auto | grep "Package: element-nightly" -A 50 | grep Version -m1 | sed -n 's/Version: //p' >> VERSIONS
aws s3 cp s3://$R2_BUCKET/nightly/update/win32/x64/RELEASES - --endpoint-url $R2_URL --region auto | awk '{print $2}' | cut -d "-" -f 5 | cut -c 8- >> VERSIONS
aws s3 cp s3://$R2_BUCKET/nightly/update/win32/ia32/RELEASES - --endpoint-url $R2_URL --region auto | awk '{print $2}' | cut -d "-" -f 5 | cut -c 8- >> VERSIONS
# Pick the greatest one
VERSION=$(cat VERSIONS | sort -uf | tail -n1)
# Increment it
echo "nightly=$(scripts/generate-nightly-version.ts --latest $VERSION)" >> $GITHUB_OUTPUT
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}
R2_BUCKET: ${{ vars.R2_BUCKET }}
R2_URL: ${{ vars.CF_R2_S3_API }}
- name: Check version
id: package
run: |
echo "version=$(cat package.json | jq -r .version)" >> $GITHUB_OUTPUT
- name: "[Release] Fetch release"
id: release
if: ${{ !inputs.nightly && inputs.version != 'develop' }}
uses: cardinalby/git-get-release-action@cedef2faf69cb7c55b285bad07688d04430b7ada # v1
env:
GITHUB_TOKEN: ${{ github.token }}
with:
tag: v${{ steps.package.outputs.version }}
- name: "[Release] Write changelog"
if: ${{ !inputs.nightly && inputs.version != 'develop' }}
run: |
TIME=$(date -d "$PUBLISHED_AT" -R)
echo "element-desktop ($VERSION) default; urgency=medium" >> changelog.Debian
echo "$BODY" | sed 's/^##/\n */g;s/^\*/ */g' | perl -pe 's/\[.+?]\((.+?)\)/\1/g' >> changelog.Debian
echo "" >> changelog.Debian
echo " -- $ACTOR <support@element.io> $TIME" >> changelog.Debian
env:
ACTOR: ${{ github.actor }}
VERSION: v${{ steps.package.outputs.version }}
BODY: ${{ steps.release.outputs.body }}
PUBLISHED_AT: ${{ steps.release.outputs.published_at }}
- name: "[Nightly] Write summary"
if: inputs.nightly
run: |
BUNDLE_HASH=$(npx asar l webapp.asar | grep /bundles/ | head -n 1 | sed 's|.*/||')
WEBAPP_VERSION=$(./scripts/get-version.ts)
WEB_VERSION=${WEBAPP_VERSION:0:12}
REACT_VERSION=${WEBAPP_VERSION:19:12}
JS_VERSION=${WEBAPP_VERSION:35:12}
echo "### Nightly build ${{ steps.versions.outputs.nightly }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Component | Version |" >> $GITHUB_STEP_SUMMARY
echo "| ----------- | ------- |" >> $GITHUB_STEP_SUMMARY
echo "| Bundle Hash | $BUNDLE_HASH |" >> $GITHUB_STEP_SUMMARY
echo "| Element Web | [$WEB_VERSION](https://github.com/vector-im/element-web/commit/$WEB_VERSION) |" >> $GITHUB_STEP_SUMMARY
echo "| React SDK | [$REACT_VERSION](https://github.com/matrix-org/matrix-react-sdk/commit/$REACT_VERSION) |" >> $GITHUB_STEP_SUMMARY
echo "| JS SDK | [$JS_VERSION](https://github.com/matrix-org/matrix-js-sdk/commit/$JS_VERSION) |" >> $GITHUB_STEP_SUMMARY
- uses: actions/upload-artifact@v3
with:
name: webapp
retention-days: 1
path: |
webapp.asar
package.json
electronVersion
hakHash
changelog.Debian

View File

@@ -1,204 +0,0 @@
# This workflow relies on actions/cache to store the hak dependency artifacts as they take a long time to build
# Due to this extra care must be taken to only ever run all build_* scripts against the same branch to ensure
# the correct cache scoping, and additional care must be taken to not run untrusted actions on the develop branch.
on:
workflow_call:
secrets:
ESIGNER_USER_NAME:
required: false
ESIGNER_USER_PASSWORD:
required: false
ESIGNER_USER_TOTP:
required: false
inputs:
arch:
type: string
required: true
description: "The architecture to build for, one of 'x64' | 'x86' | 'arm64'"
version:
type: string
required: false
description: "Version string to override the one in package.json, used for non-release builds"
sign:
type: string
required: false
description: "Whether to sign & notarise the build, requires 'packages.element.io' environment"
deploy-mode:
type: boolean
required: false
description: "Whether to arrange artifacts in the arrangement needed for deployment, skipping unrelated ones"
jobs:
build:
runs-on: windows-latest
environment: ${{ inputs.sign && 'packages.element.io' || '' }}
env:
SIGNTOOL_PATH: "C:/Program Files (x86)/Windows Kits/10/bin/10.0.22000.0/x86/signtool.exe"
steps:
- uses: kanga333/variable-mapper@3681b75f5c6c00162721168fb91ab74925eaebcb
id: config
with:
key: "${{ inputs.arch }}"
export_to: output
map: |
{
"x64": {
"target": "x86_64-pc-windows-msvc",
"dir": "x64"
},
"arm64": {
"target": "aarch64-pc-windows-msvc",
"build-args": "--arm64",
"arch": "amd64_arm64",
"dir": "arm64"
},
"x86": {
"target": "i686-pc-windows-msvc",
"build-args": "--ia32",
"dir": "ia32"
}
}
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: webapp
- name: Cache .hak
id: cache
uses: actions/cache@v3
with:
key: ${{ runner.os }}-${{ inputs.arch }}-${{ hashFiles('hakHash', 'electronVersion') }}
path: |
./.hak
- name: Set up build tools
uses: ilammy/msvc-dev-cmd@cec98b9d092141f74527d0afa6feb2af698cfe89
with:
arch: ${{ steps.config.outputs.arch || inputs.arch }}
# ActiveTCL package on choco is from 2015,
# this one is newer but includes more than we need
- name: Choco install tclsh
if: steps.cache.outputs.cache-hit != 'true'
shell: pwsh
run: |
choco install -y magicsplat-tcl-tk --no-progress
echo "${HOME}/AppData/Local/Apps/Tcl86/bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Choco install NetWide Assembler
if: steps.cache.outputs.cache-hit != 'true'
shell: pwsh
run: |
choco install -y nasm --no-progress
echo "C:/Program Files/NASM" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Install Rust
if: steps.cache.outputs.cache-hit != 'true'
run: |
rustup toolchain install stable --profile minimal --no-self-update
rustup default stable
rustup target add ${{ steps.config.outputs.target }}
- uses: actions/setup-node@v3
with:
cache: "yarn"
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --frozen-lockfile"
- name: Build Natives
if: steps.cache.outputs.cache-hit != 'true'
run: |
refreshenv
yarn build:native --target ${{ steps.config.outputs.target }}
- name: Install and configure eSigner CKA
id: esigner
if: inputs.sign
run: |
Set-StrictMode -Version 'Latest'
# Download
Invoke-WebRequest -OutFile eSigner_CKA.exe "https://packages.element.io/tools/SSL.COM%20eSigner%20CKA_1.0.4-build-20230221_signed.exe"
# Install
New-Item -ItemType Directory -Force -Path "$env:INSTALL_DIR"
./eSigner_CKA.exe /CURRENTUSER /VERYSILENT /SUPPRESSMSGBOXES /DIR="${{ env.INSTALL_DIR }}" | Out-Null
# Disable logger
$LogConfig = Get-Content -Path ${{ env.INSTALL_DIR }}/log4net.config
$LogConfig[0] = '<log4net threshold="OFF">'
$LogConfig | Set-Content -Path ${{ env.INSTALL_DIR }}/log4net.config
# Configure
${{ env.INSTALL_DIR }}/eSignerCKATool.exe config -mode product -user "${{ secrets.ESIGNER_USER_NAME }}" -pass "${{ secrets.ESIGNER_USER_PASSWORD }}" -totp "${{ secrets.ESIGNER_USER_TOTP }}" -key "${{ env.MASTER_KEY_FILE }}" -r
${{ env.INSTALL_DIR }}/eSignerCKATool.exe unload
${{ env.INSTALL_DIR }}/eSignerCKATool.exe load
# Find certificate
$CodeSigningCert = Get-ChildItem Cert:\CurrentUser\My -CodeSigningCert | Select-Object -First 1
echo Certificate: $CodeSigningCert
# Extract thumbprint and subject name
$Thumbprint = $CodeSigningCert.Thumbprint
$SubjectName = ($CodeSigningCert.Subject -replace ", ?", "`n" | ConvertFrom-StringData).CN
echo "config-args=--signtool-thumbprint '$Thumbprint' --signtool-subject-name '$SubjectName'" >> $env:GITHUB_OUTPUT
env:
INSTALL_DIR: C:\Users\runneradmin\eSignerCKA
MASTER_KEY_FILE: C:\Users\runneradmin\eSignerCKA\master.key
- name: "[Nightly] Resolve version"
id: nightly
if: inputs.version != ''
shell: bash
run: |
echo "config-args=--nightly '${{ inputs.version }}'" >> $GITHUB_OUTPUT
- name: Build App
run: |
yarn ts-node scripts/generate-builder-config.ts ${{ steps.nightly.outputs.config-args }} ${{ steps.esigner.outputs.config-args }}
yarn build --publish never -w --config electron-builder.json ${{ steps.config.outputs.build-args }}
- name: Check app was signed successfully
if: inputs.sign != ''
run: |
. "$env:SIGNTOOL_PATH" verify /pa (get-item ./dist/squirrel-windows*/*.exe)
- name: Prepare artifacts for deployment
if: inputs.deploy-mode
shell: bash
run: |
mv dist _dist
mkdir -p "dist/install/win32/$DIR/msi" "dist/update/win32/$DIR"
mv _dist/squirrel-windows*/*.exe "dist/install/win32/$DIR"
mv _dist/squirrel-windows*/*.nupkg "dist/update/win32/$DIR/"
mv _dist/squirrel-windows*/RELEASES "dist/update/win32/$DIR/"
# mv _dist/*.msi "dist/install/win32/$DIR/msi/"
env:
DIR: ${{ steps.config.outputs.dir }}
# We don't wish to store the installer for every nightly ever, so we only keep the latest
- name: "[Nightly] Strip version from installer file"
if: inputs.deploy-mode && inputs.version != ''
shell: bash
run: |
mv dist/install/win32/$DIR/*.exe "dist/install/win32/$DIR/Element Nightly Setup.exe"
# mv dist/install/win32/$DIR/msi/*.msi "dist/install/win32/$DIR/msi/Element Nightly Setup.msi"
env:
DIR: ${{ steps.config.outputs.dir }}
- name: "[Release] Prepare release latest symlink"
if: inputs.deploy-mode && inputs.version == ''
shell: bash
run: |
ln -s "$(find . -type f -iname "*.exe" | xargs -0 -n1 -- basename)" "Element Setup.exe"
working-directory: "dist/install/win32/${{ steps.config.outputs.dir }}"
- name: Upload Artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.deploy-mode && 'packages.element.io' || format('win-{0}', inputs.arch) }}
path: dist
retention-days: 1

View File

@@ -1,43 +0,0 @@
name: Dockerbuild
on:
workflow_dispatch: {}
push:
branches: [master, develop]
paths:
- "dockerbuild/**"
concurrency: ${{ github.workflow }}-${{ github.ref_name }}
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}-dockerbuild
jobs:
build:
name: Docker Build
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v3
- name: Log in to the Container registry
uses: docker/login-action@b4bedf8053341df3b5a9f9e0f2cf4e79e27360c6
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@879dcbb708d40f8b8679d4f7941b938a086e23a7
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
- name: Build and push Docker image
uses: docker/build-push-action@4c1b68d83ad20cc1a09620ca477d5bbbb5fa14d0
with:
context: dockerbuild
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -1,8 +0,0 @@
name: Localazy Download
on:
workflow_dispatch: {}
jobs:
download:
uses: matrix-org/matrix-web-i18n/.github/workflows/localazy_download.yaml@main
secrets:
ELEMENT_BOT_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}

View File

@@ -1,11 +0,0 @@
name: Localazy Upload
on:
push:
branches: [develop]
paths:
- "src/i18n/strings/en_EN.json"
jobs:
upload:
uses: matrix-org/matrix-web-i18n/.github/workflows/localazy_upload.yaml@main
secrets:
LOCALAZY_WRITE_KEY: ${{ secrets.LOCALAZY_WRITE_KEY }}

View File

@@ -1,9 +1,12 @@
name: Pull Request
on:
pull_request_target:
types: [opened, edited, labeled, unlabeled, synchronize]
pull_request_target:
types: [ opened, edited, labeled, unlabeled, synchronize ]
concurrency: ${{ github.workflow }}-${{ github.event.pull_request.head.ref }}
jobs:
action:
uses: matrix-org/matrix-js-sdk/.github/workflows/pull_request.yaml@develop
secrets:
ELEMENT_BOT_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
action:
uses: matrix-org/matrix-js-sdk/.github/workflows/pull_request.yaml@develop
with:
labels: "T-Defect,T-Enhancement,T-Task"
secrets:
ELEMENT_BOT_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}

View File

@@ -1,49 +0,0 @@
on:
workflow_call:
inputs:
artifact-name:
type: string
required: true
description: "The name of the artifact containing the deb to include"
secrets:
ELEMENT_BOT_TOKEN:
required: true
CF_R2_ACCESS_KEY_ID:
required: true
CF_R2_TOKEN:
required: true
# Protect reprepro database using concurrency
concurrency: reprepro
jobs:
reprepro:
name: Deploy debian package
environment: packages.element.io
runs-on: ubuntu-latest
env:
R2_INCOMING_BUCKET: ${{ vars.R2_INCOMING_BUCKET }}
R2_URL: ${{ vars.CF_R2_S3_API }}
steps:
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.artifact-name }}
path: dist
- name: Upload incoming deb
id: upload
run: |
deb="$(ls *.deb | tail -n1)"
echo "incoming=$deb" >> $GITHUB_OUTPUT
aws s3 cp "$deb" "s3://$R2_INCOMING_BUCKET" --endpoint-url "$R2_URL" --region auto
working-directory: dist
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}
- name: Notify packages.element.io of incoming deb
uses: peter-evans/repository-dispatch@bf47d102fdb849e755b0b0023ea3e81a44b6f570 # v2
with:
token: ${{ secrets.ELEMENT_BOT_TOKEN }}
repository: vector-im/packages.element.io
event-type: reprepro-incoming
client-payload: '{"incoming": "${{ steps.upload.outputs.incoming }}"}'

View File

@@ -1,43 +1,45 @@
name: Static Analysis
on:
pull_request: {}
push:
branches: [develop, master]
pull_request: { }
push:
branches: [ develop, master ]
jobs:
ts_lint:
name: "Typescript Syntax Check"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
ts_lint:
name: "Typescript Syntax Check"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v3
with:
cache: "yarn"
- uses: actions/setup-node@v3
with:
cache: 'yarn'
node-version: 16
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --frozen-lockfile"
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --pure-lockfile"
- name: Typecheck
run: "yarn run lint:types"
- name: Typecheck
run: "yarn run lint:types"
i18n_lint:
name: "i18n Check"
uses: matrix-org/matrix-web-i18n/.github/workflows/i18n_check.yml@main
i18n_lint:
name: "i18n Check"
uses: matrix-org/matrix-react-sdk/.github/workflows/i18n_check.yml@develop
js_lint:
name: "ESLint"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
js_lint:
name: "ESLint"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v3
with:
cache: "yarn"
- uses: actions/setup-node@v3
with:
cache: 'yarn'
node-version: 16
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --frozen-lockfile"
# Does not need branch matching as only analyses this layer
- name: Install Deps
run: "yarn install --pure-lockfile"
- name: Run Linter
run: "yarn run lint:js"
- name: Run Linter
run: "yarn run lint:js"

View File

@@ -1,8 +1,8 @@
name: Upgrade Dependencies
on:
workflow_dispatch: {}
workflow_dispatch: { }
jobs:
upgrade:
uses: matrix-org/matrix-js-sdk/.github/workflows/upgrade_dependencies.yml@develop
secrets:
ELEMENT_BOT_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
upgrade:
uses: matrix-org/matrix-js-sdk/.github/workflows/upgrade_dependencies.yml@develop
secrets:
ELEMENT_BOT_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}

6
.gitignore vendored
View File

@@ -4,7 +4,8 @@
/webapp.asar
/packages
/deploys
node_modules/
/node_modules
/docker_node_modules
/pkg/control
/.hak
/.yarnrc
@@ -14,6 +15,3 @@ node_modules/
.vscode/
/test_artifacts/
/coverage/
yarn-error.log
/hak/**/*.js
/scripts/hak/**/*.js

View File

@@ -1,19 +0,0 @@
/build/
/dockerbuild/
/lib/
/node_modules/
/packages.elememt.io/
/webapp
/src/i18n/strings
/CHANGELOG.md
/package-lock.json
/yarn.lock
**/.idea
.vscode
.vscode/
.tmp
.env
/coverage
/.npmrc
/*.log

View File

@@ -1 +0,0 @@
module.exports = require("eslint-plugin-matrix-org/.prettierrc.js");

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +1,25 @@
![Build](https://github.com/vector-im/element-desktop/actions/workflows/build.yaml/badge.svg)
![Static Analysis](https://github.com/vector-im/element-desktop/actions/workflows/static_analysis.yaml/badge.svg)
[![Localazy](https://img.shields.io/endpoint?url=https%3A%2F%2Fconnect.localazy.com%2Fstatus%2Felement-web%2Fdata%3Fcontent%3Dall%26title%3Dlocalazy%26logo%3Dtrue)](https://localazy.com/p/element-web)
[![Weblate](https://translate.element.io/widgets/element-desktop/-/element-desktop/svg-badge.svg)](https://translate.element.io/engage/element-desktop/)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=element-desktop&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=element-desktop)
[![Vulnerabilities](https://sonarcloud.io/api/project_badges/measure?project=element-desktop&metric=vulnerabilities)](https://sonarcloud.io/summary/new_code?id=element-desktop)
[![Bugs](https://sonarcloud.io/api/project_badges/measure?project=element-desktop&metric=bugs)](https://sonarcloud.io/summary/new_code?id=element-desktop)
# Element Desktop
Element Desktop
===============
Element Desktop is a Matrix client for desktop platforms with Element Web at its core.
# First Steps
First Steps
===========
Before you do anything else, fetch the dependencies:
```
yarn install
```
# Fetching Element
Fetching Element
================
Since this package is just the Electron wrapper for Element Web, it doesn't contain any of the Element Web code,
so the first step is to get a working copy of Element Web. There are a few ways of doing this:
@@ -30,7 +31,6 @@ yarn run fetch --noverify --cfgdir ""
```
...or if you'd like to use GPG to verify the downloaded package:
```
# Fetch the Element public key from the element.io web server over a secure connection and import
# it into your local GPG keychain (you'll need GPG installed). You only need to to do this
@@ -41,7 +41,6 @@ yarn run fetch --cfgdir ""
```
...or either of the above, but fetching a specific version of Element:
```
# Fetch the prebuilt release Element package from the element-web GitHub releases page. The version
# fetched will be the same as the local element-desktop package.
@@ -50,7 +49,6 @@ yarn run fetch --noverify --cfgdir "" v1.5.6
If you only want to run the app locally and don't need to build packages, you can
provide the `webapp` directory directly:
```
# Assuming you've checked out and built a copy of element-web in ../element-web
ln -s ../element-web/webapp ./
@@ -58,32 +56,29 @@ ln -s ../element-web/webapp ./
[TODO: add support for fetching develop builds, arbitrary URLs and arbitrary paths]
# Building
Building
========
## Native Build
TODO: List native pre-requisites
Optionally, [build the native modules](https://github.com/vector-im/element-desktop/blob/develop/docs/native-node-modules.md),
which include support for searching in encrypted rooms and secure storage. Skipping this step is fine, you just won't have those features.
Optionally, [build the native modules](https://github.com/vector-im/element-desktop/blob/develop/docs/native-node-modules.md),
which include support for searching in encrypted rooms and secure storage. Skipping this step is fine, you just won't have those features.
Then, run
```
yarn run build
```
This will do a couple of things:
- Run the `setversion` script to set the local package version to match whatever
version of Element you installed above.
- Run electron-builder to build a package. The package built will match the operating system
you're running the build process on.
* Run the `setversion` script to set the local package version to match whatever
version of Element you installed above.
* Run electron-builder to build a package. The package built will match the operating system
you're running the build process on.
## Docker
Alternatively, you can also build using docker, which will always produce the linux package:
```
# Run this once to make the docker image
yarn run docker:setup
@@ -96,10 +91,9 @@ yarn run docker:build
After running, the packages should be in `dist/`.
# Starting
Starting
========
If you'd just like to run the electron app locally for development:
```
# Install electron - we don't normally need electron itself as it's provided
# by electron-builder when building packages
@@ -107,22 +101,21 @@ yarn add electron
yarn start
```
# Config
Config
======
If you'd like the packaged Element to have a configuration file, you can create a
config directory and place `config.json` in there, then specify this directory
with the `--cfgdir` option to `yarn run fetch`, eg:
```
mkdir myconfig
cp /path/to/my/config.json myconfig/
yarn run fetch --cfgdir myconfig
```
The config dir for the official Element app is in `element.io`. If you use this,
your app will auto-update itself using builds from element.io.
# Profiles
Profiles
========
To run multiple instances of the desktop app for different accounts, you can
launch the executable with the `--profile` argument followed by a unique
@@ -132,25 +125,30 @@ not interfere with the default one.
Alternatively, a custom location for the profile data can be specified using the
`--profile-dir` flag followed by the desired path.
# User-specified config.json
User-specified config.json
==========================
- `%APPDATA%\$NAME\config.json` on Windows
- `$XDG_CONFIG_HOME/$NAME/config.json` or `~/.config/$NAME/config.json` on Linux
- `~/Library/Application Support/$NAME/config.json` on macOS
+ `%APPDATA%\$NAME\config.json` on Windows
+ `$XDG_CONFIG_HOME/$NAME/config.json` or `~/.config/$NAME/config.json` on Linux
+ `~/Library/Application Support/$NAME/config.json` on macOS
In the paths above, `$NAME` is typically `Element`, unless you use `--profile
$PROFILE` in which case it becomes `Element-$PROFILE`, or it is using one of
the above created by a pre-1.7 install, in which case it will be `Riot` or
`Riot-$PROFILE`.
# Translations
Translations
==========================
To add a new translation, head to the [translating doc](https://github.com/vector-im/element-web/blob/develop/docs/translating.md).
For a developer guide, see the [translating dev doc](https://github.com/vector-im/element-web/blob/develop/docs/translating-dev.md).
# Report bugs & give feedback
[<img src="https://translate.element.io/widgets/element-desktop/-/multi-auto.svg" alt="translationsstatus" width="340">](https://translate.element.io/engage/element-desktop/?utm_source=widget)
Report bugs & give feedback
==========================
If you run into any bugs or have feedback you'd like to share, please let us know on GitHub.
To help avoid duplicate issues, please [view existing issues](https://github.com/vector-im/element-web/issues?q=is%3Aopen+is%3Aissue+sort%3Areactions-%2B1-desc) first (and add a +1) or [create a new issue](https://github.com/vector-im/element-web/issues/new/choose) if you can't find it. Please note that this issue tracker is associated with the [element-web](https://github.com/vector-im/element-web) repo, but is also applied to the code in this repo as well.
To help avoid duplicate issues, please [view existing issues](https://github.com/vector-im/element-web/issues?q=is%3Aopen+is%3Aissue+sort%3Areactions-%2B1-desc) first (and add a +1) or [create a new issue](https://github.com/vector-im/element-web/issues/new/choose) if you can't find it. Please note that this issue tracker is associated with the [element-web](https://github.com/vector-im/element-web) repo, but is also applied to the code in this repo as well.

View File

@@ -1,3 +1,6 @@
module.exports = {
presets: [["@babel/preset-env", { targets: { node: "current" } }], "@babel/preset-typescript"],
presets: [
['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript',
],
};

View File

@@ -0,0 +1,51 @@
#include <windows.h>
#include <stdio.h>
#include <string.h>
#include <tchar.h>
/*
* This just runs 'Element.exe' with the same args as
* this process was invoked with. This gets around the fact that
* squirrel always tries to run an executable with the same name,
* so fails to restart if the app's name has changed.
*/
void _tmain( int argc, TCHAR *argv[] )
{
LPSTR myCmdLine = GetCommandLineA();
char cmdLine[32767];
LPSTR cmdLinePos = cmdLine;
LPSTR toRun = "\"Element.exe\" ";
strncpy(cmdLinePos, toRun, strlen(toRun));
cmdLinePos += strlen(toRun);
if (myCmdLine[0] == '"') ++myCmdLine;
myCmdLine += strlen(argv[0]);
if (myCmdLine[0] == '"') ++myCmdLine;
if (myCmdLine[0] == ' ') ++myCmdLine;
strncpy(cmdLinePos, myCmdLine, (cmdLine + 32767) - cmdLinePos);
STARTUPINFO si;
PROCESS_INFORMATION pi;
ZeroMemory(&si, sizeof(si));
si.cb = sizeof(si);
ZeroMemory(&pi, sizeof(pi));
if (!CreateProcess(NULL,
cmdLine, // Command line
NULL, // Process handle not inheritable
NULL, // Thread handle not inheritable
FALSE, // Set handle inheritance to FALSE
0, // No creation flags
NULL, // Use parent's environment block
NULL, // Use parent's starting directory
&si, // Pointer to STARTUPINFO structure
&pi ) // Pointer to PROCESS_INFORMATION structure
)
{
printf("CreateProcess failed (%d).\n", GetLastError());
return;
}
}

View File

Binary file not shown.

View File

@@ -1,9 +1,8 @@
# Docker image to facilitate building Element Desktop with native bits using a glibc version with broader compatibility
FROM buildpack-deps:bionic-curl
ENV DEBIAN_FRONTEND noninteractive
RUN curl --proto "=https" -L https://yarnpkg.com/latest.tar.gz | tar xvz && mv yarn-* /yarn && ln -s /yarn/bin/yarn /usr/bin/yarn
RUN curl -L https://yarnpkg.com/latest.tar.gz | tar xvz && mv yarn-* /yarn && ln -s /yarn/bin/yarn /usr/bin/yarn
RUN apt-get -qq update && apt-get -qq dist-upgrade && \
# add repo for git-lfs
curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash && \
@@ -15,8 +14,6 @@ RUN apt-get -qq update && apt-get -qq dist-upgrade && \
apt-get -qq install --no-install-recommends qtbase5-dev bsdtar build-essential autoconf libssl-dev gcc-multilib g++-multilib lzip rpm python libcurl4 git git-lfs ssh unzip tcl \
libsecret-1-dev libgnome-keyring-dev \
libopenjp2-tools \
# Used by github actions \
jq grep file \
# Used by seshat (when not SQLCIPHER_STATIC) \
libsqlcipher-dev && \
# git-lfs
@@ -34,10 +31,10 @@ ENV LC_ALL C.UTF-8
ENV DEBUG_COLORS true
ENV FORCE_COLOR true
ENV NODE_VERSION 16.18.1
ENV NODE_VERSION 14.17.0
# this package is used for snapcraft and we should not clear apt list - to avoid apt-get update during snap build
RUN curl --proto "=https" -L https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.gz | tar xz -C /usr/local --strip-components=1 && \
RUN curl -L https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.gz | tar xz -C /usr/local --strip-components=1 && \
unlink /usr/local/CHANGELOG.md && unlink /usr/local/LICENSE && unlink /usr/local/README.md && \
# https://github.com/npm/npm/issues/4531
npm config set unsafe-perm true

View File

@@ -1,3 +0,0 @@
[target.aarch64-unknown-linux-gnu]
linker = "aarch64-linux-gnu-gcc"
rustflags = ["-L/usr/lib/aarch64-linux-gnu"]

View File

@@ -1,11 +0,0 @@
AS=/usr/bin/aarch64-linux-gnu-as
STRIP=/usr/bin/aarch64-linux-gnu-strip
AR=/usr/bin/aarch64-linux-gnu-ar
CC=/usr/bin/aarch64-linux-gnu-gcc
CPP=/usr/bin/aarch64-linux-gnu-cpp
CXX=/usr/bin/aarch64-linux-gnu-g++
LD=/usr/bin/aarch64-linux-gnu-ld
FC=/usr/bin/aarch64-linux-gnu-gfortran
PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig
CFLAGS=-L/usr/lib/aarch64-linux-gnu
RUSTFLAGS=-L/usr/lib/aarch64-linux-gnu

View File

@@ -1,16 +0,0 @@
# Summary
- [Introduction](../README.md)
# Build
- [Native Node modules](native-node-modules.md)
- [Windows requirements](windows-requirements.md)
# Distribution
- [Updates](updates.md)
# Setup
- [Config](config.md)

View File

@@ -1,15 +0,0 @@
# Configuration
All Element Web options documented [here](https://github.com/vector-im/element-web/blob/develop/docs/config.md) can be used as well as the following:
---
The app contains a configuration file specified at build time using [these instructions](https://github.com/vector-im/element-desktop/#config).
This config can be overwritten by the end using by creating a `config.json` file at the paths described [here](https://github.com/vector-im/element-desktop/#user-specified-configjson).
After changing the config, the app will need to be exited fully (including via the task tray) and re-started.
---
1. `update_base_url`: Specifies the URL of the update server, see [document](https://github.com/vector-im/element-desktop/blob/develop/docs/updates.md).
2. `web_base_url`: Specifies the Element Web URL when performing actions such as popout widget. Defaults to `https://app.element.io/`.

View File

@@ -17,14 +17,13 @@ when releasing.
Install the pre-requisites for your system:
- [Windows pre-requisites](https://github.com/vector-im/element-desktop/blob/develop/docs/windows-requirements.md)
- Linux: TODO
- OS X: TODO
* [Windows pre-requisites](https://github.com/vector-im/element-desktop/blob/develop/docs/windows-requirements.md)
* Linux: TODO
* OS X: TODO
Then optionally, [add seshat and dependencies to support search in E2E rooms](#adding-seshat-for-search-in-e2e-encrypted-rooms).
Then, to build for an architecture selected automatically based on your system (recommended), run:
```
yarn run build:native
```
@@ -71,9 +70,9 @@ as usual using:
On Windows & macOS we always statically link libsqlcipher for it is not generally available.
On Linux by default we will use a system package, on debian & ubuntu this is `libsqlcipher0`,
but this is problematic for some other packages, and we found that it may crashes for unknown reasons.
By including `SQLCIPHER_BUNDLED=1` in the build environment, the build scripts will fully statically
link sqlcipher, including a static build of OpenSSL.
but this is problematic for some other packages.
By including `SQLCIPHER_STATIC=1` in the build environment, the build scripts will statically link sqlcipher,
note that this will want a `libcrypto1.1` shared library available in the system.
More info can be found at https://github.com/matrix-org/seshat/issues/102
and https://github.com/vector-im/element-web/issues/20926.
@@ -83,19 +82,15 @@ and https://github.com/vector-im/element-web/issues/20926.
### macOS
On macOS, you can build universal native modules too:
```
yarn run build:native:universal
```
...or you can build for a specific architecture:
```
yarn run build:native --target x86_64-apple-darwin
```
or
```
yarn run build:native --target aarch64-apple-darwin
```
@@ -110,13 +105,10 @@ yarn run build:universal
### Windows
If you're on Windows, you can choose to build specifically for 32 or 64 bit:
```
yarn run build:32
```
or
```
yarn run build:64
```
@@ -152,6 +144,6 @@ The current set of native modules are stored in `.hak/hakModules`,
so you can use this to check what architecture is currently in place, eg:
```
$ lipo -info .hak/hakModules/keytar/build/Release/keytar.node
Architectures in the fat file: .hak/hakModules/keytar/build/Release/keytar.node are: x86_64 arm64
$ lipo -info .hak/hakModules/keytar/build/Release/keytar.node
Architectures in the fat file: .hak/hakModules/keytar/build/Release/keytar.node are: x86_64 arm64
```

View File

@@ -1,15 +0,0 @@
The Desktop app is capable of self-updating on macOS and Windows.
The update server base url is configurable as `update_base_url` in config.json and can be served by a static file host,
CDN or object storage.
Currently all packaging & deployment is handled by [Github actions](https://github.com/vector-im/element-desktop/blob/develop/.github/workflows/build_and_deploy.yaml)
# Windows
On Windows the update mechanism used is [Squirrel.Windows](https://github.com/Squirrel/Squirrel.Windows)
and can be served by any compatible Squirrel server, such as https://github.com/Tiliq/squirrel-server
# macOS
On macOS the update mechanism used is [Squirrel.Mac](https://github.com/Squirrel/Squirrel.Mac)
using the newer JSON format as documented [here](https://github.com/Squirrel/Squirrel.Mac#update-file-json-format).

View File

@@ -1,30 +1,26 @@
# Windows
## Requirements to build native modules
We rely on Github Actions `windows-latest` plus a few extra utilities as per [the workflow](https://github.com/vector-im/element-desktop/blob/develop/.github/workflows/build_windows.yaml).
If you want to build native modules, make sure that the following tools are installed on your system.
- [Git for Windows](https://git-scm.com/download/win)
- [Node 16](https://nodejs.org)
- [Python 3](https://www.python.org/downloads/) (if you type 'python' into command prompt it will offer to install it from the windows store)
- [Strawberry Perl](https://strawberryperl.com/)
- [Rustup](https://rustup.rs/)
- [NASM](https://www.nasm.us/)
- [Build Tools for Visual Studio 2019](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019) with the following configuration:
- On the Workloads tab:
- Desktop & Mobile -> C++ build tools
- On the Individual components tab:
- MSVC VS 2019 C++ build tools
- Windows 10 SDK (latest version available)
- C++ CMake tools for Windows
- [Git for Windows](https://git-scm.com/download/win)
- [Node 14](https://nodejs.org)
- [Python 3](https://www.python.org/downloads/) (if you type 'python' into command prompt it will offer to install it from the windows store)
- [Strawberry Perl](https://strawberryperl.com/)
- [Rustup](https://rustup.rs/)
- [NASM](https://www.nasm.us/)
- [Build Tools for Visual Studio 2019](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019) with the following configuration:
- On the Workloads tab:
- Desktop & Mobile -> C++ build tools
- On the Individual components tab:
- MSVC VS 2019 C++ build tools
- Windows 10 SDK (latest version available)
- C++ CMake tools for Windows
Once installed make sure all those utilities are accessible in your `PATH`.
If you want to be able to build x86 targets from an x64 host install the right toolchain:
```cmd
rustup toolchain install stable-i686-pc-windows-msvc
rustup target add i686-pc-windows-msvc

View File

@@ -1,14 +1,6 @@
{
"update_base_url": "https://packages.element.io/nightly/update/",
"default_server_name": "matrix.org",
"default_server_config": {
"m.homeserver": {
"base_url": "https://matrix-client.matrix.org"
},
"m.identity_server": {
"base_url": "https://vector.im"
}
},
"brand": "Element Nightly",
"integrations_ui_url": "https://scalar.vector.im/",
"integrations_rest_url": "https://scalar.vector.im/api",
@@ -19,11 +11,16 @@
"https://scalar-staging.vector.im/api",
"https://scalar-staging.riot.im/scalar/api"
],
"hosting_signup_link": "https://element.io/matrix-services?utm_source=element-web&utm_medium=web",
"bug_report_endpoint_url": "https://element.io/bugreports/submit",
"uisi_autorageshake_app": "element-auto-uisi",
"show_labs_settings": true,
"room_directory": {
"servers": ["matrix.org", "gitter.im", "libera.chat"]
"showLabsSettings": true,
"roomDirectory": {
"servers": [
"matrix.org",
"gitter.im",
"libera.chat"
]
},
"enable_presence_by_hs_url": {
"https://matrix.org": false,
@@ -44,8 +41,8 @@
"environment": "nightly"
},
"posthog": {
"project_api_key": "phc_Jzsm6DTm6V2705zeU5dcNvQDlonOR68XvX2sh1sEOHO",
"api_host": "https://posthog.element.io"
"projectApiKey": "phc_Jzsm6DTm6V2705zeU5dcNvQDlonOR68XvX2sh1sEOHO",
"apiHost": "https://posthog.element.io"
},
"privacy_policy_url": "https://element.io/cookie-policy",
"features": {
@@ -53,7 +50,7 @@
"feature_video_rooms": true
},
"element_call": {
"url": "https://call.element.dev"
"url": "https://element-call.netlify.app"
},
"map_style_url": "https://api.maptiler.com/maps/streets/style.json?key=fU3vlMsMn4Jb6dnEIFsx"
}

View File

@@ -0,0 +1,12 @@
Package: element-nightly
License: Apache-2.0
Vendor: support@element.io
Architecture: amd64
Maintainer: support@element.io
Depends: libgtk-3-0, libnotify4, libnss3, libxss1, libxtst6, xdg-utils, libatspi2.0-0, libuuid1, libsecret-1-0
Recommends: libappindicator3-1, libsqlcipher0
Section: net
Priority: extra
Homepage: https://element.io/
Description:
riot.im A feature-rich client for Matrix.org (nightly unstable build).

View File

@@ -0,0 +1,9 @@
Package: riot-nightly
Version: 2020071502
Depends: element-nightly
Maintainer: packages@element.io
Architecture: all
Priority: optional
Section: oldlibs
Description: transitional package
This is a transitional package. It can safely be removed.

View File

@@ -0,0 +1,81 @@
Origin: riot.im
Codename: default
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Suite: oldoldstable
Codename: jessie
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Suite: oldstable
Codename: stretch
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Suite: stable
Codename: buster
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Suite: testing
Codename: bullseye
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Suite: unstable
Codename: sid
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Codename: xenial
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Codename: bionic
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Codename: cosmic
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Codename: disco
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal
Origin: riot.im
Codename: eoan
Architectures: amd64 i386 source
Components: main
SignWith: D7B0B66941D01538
Tracking: minimal

View File

@@ -1,14 +1,6 @@
{
"update_base_url": "https://packages.element.io/desktop/update/",
"default_server_name": "matrix.org",
"default_server_config": {
"m.homeserver": {
"base_url": "https://matrix-client.matrix.org"
},
"m.identity_server": {
"base_url": "https://vector.im"
}
},
"brand": "Element",
"integrations_ui_url": "https://scalar.vector.im/",
"integrations_rest_url": "https://scalar.vector.im/api",
@@ -19,12 +11,17 @@
"https://scalar-staging.vector.im/api",
"https://scalar-staging.riot.im/scalar/api"
],
"hosting_signup_link": "https://element.io/matrix-services?utm_source=element-web&utm_medium=web",
"bug_report_endpoint_url": "https://element.io/bugreports/submit",
"uisi_autorageshake_app": "element-auto-uisi",
"room_directory": {
"servers": ["matrix.org", "gitter.im", "libera.chat"]
"roomDirectory": {
"servers": [
"matrix.org",
"gitter.im",
"libera.chat"
]
},
"show_labs_settings": false,
"showLabsSettings": false,
"enable_presence_by_hs_url": {
"https://matrix.org": false,
"https://matrix-client.matrix.org": false
@@ -40,8 +37,8 @@
}
],
"posthog": {
"project_api_key": "phc_Jzsm6DTm6V2705zeU5dcNvQDlonOR68XvX2sh1sEOHO",
"api_host": "https://posthog.element.io"
"projectApiKey": "phc_Jzsm6DTm6V2705zeU5dcNvQDlonOR68XvX2sh1sEOHO",
"apiHost": "https://posthog.element.io"
},
"privacy_policy_url": "https://element.io/cookie-policy",
"map_style_url": "https://api.maptiler.com/maps/streets/style.json?key=fU3vlMsMn4Jb6dnEIFsx"

View File

@@ -0,0 +1,14 @@
Package: element-desktop
License: Apache-2.0
Vendor: support@element.io
Architecture: amd64
Maintainer: support@element.io
Depends: libgtk-3-0, libnotify4, libnss3, libxss1, libxtst6, xdg-utils, libatspi2.0-0, libuuid1, libsecret-1-0
Recommends: libappindicator3-1, libsqlcipher0
Replaces: riot-desktop (<< 1.7.0), riot-web (<< 1.7.0)
Breaks: riot-desktop (<< 1.7.0), riot-web (<< 1.7.0)
Section: net
Priority: extra
Homepage: https://element.io/
Description:
A feature-rich client for Matrix.org

View File

@@ -0,0 +1,9 @@
Package: riot-desktop
Version: 1.7.0
Depends: element-desktop
Maintainer: packages@element.io
Architecture: all
Priority: optional
Section: oldlibs
Description: transitional package
This is a transitional package. It can safely be removed.

View File

@@ -14,11 +14,11 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from "path";
import childProcess from "child_process";
import path from 'path';
import childProcess from 'child_process';
import HakEnv from "../../scripts/hak/hakEnv";
import { DependencyInfo } from "../../scripts/hak/dep";
import HakEnv from '../../scripts/hak/hakEnv';
import { DependencyInfo } from '../../scripts/hak/dep';
export default async function buildKeytar(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const env = hakEnv.makeGypEnv();
@@ -26,15 +26,15 @@ export default async function buildKeytar(hakEnv: HakEnv, moduleInfo: Dependency
console.log("Running yarn with env", env);
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
path.join(moduleInfo.nodeModuleBinDir, "node-gyp" + (hakEnv.isWin() ? ".cmd" : "")),
["rebuild", "--arch", hakEnv.getTargetArch()],
path.join(moduleInfo.nodeModuleBinDir, 'node-gyp' + (hakEnv.isWin() ? '.cmd' : '')),
['rebuild'],
{
cwd: moduleInfo.moduleBuildDir,
env,
stdio: "inherit",
stdio: 'inherit',
},
);
proc.on("exit", (code) => {
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});

View File

@@ -14,20 +14,20 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import childProcess from "child_process";
import childProcess from 'child_process';
import HakEnv from "../../scripts/hak/hakEnv";
import { DependencyInfo } from "../../scripts/hak/dep";
import HakEnv from '../../scripts/hak/hakEnv';
import { DependencyInfo } from '../../scripts/hak/dep';
export default async function (hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const tools = [["python", "--version"]]; // node-gyp uses python for reasons beyond comprehension
export default async function(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const tools = [['python', '--version']]; // node-gyp uses python for reasons beyond comprehension
for (const tool of tools) {
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(tool[0], tool.slice(1), {
stdio: ["ignore"],
stdio: ['ignore'],
});
proc.on("exit", (code) => {
proc.on('exit', (code) => {
if (code !== 0) {
reject("Can't find " + tool);
} else {

View File

@@ -14,50 +14,319 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import childProcess from "child_process";
import path from 'path';
import childProcess from 'child_process';
import mkdirp from 'mkdirp';
import fsExtra from 'fs-extra';
import HakEnv from "../../scripts/hak/hakEnv";
import { DependencyInfo } from "../../scripts/hak/dep";
import HakEnv from '../../scripts/hak/hakEnv';
import { DependencyInfo } from '../../scripts/hak/dep';
export default async function (hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
export default async function(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
if (hakEnv.isWin()) {
await buildOpenSslWin(hakEnv, moduleInfo);
await buildSqlCipherWin(hakEnv, moduleInfo);
} else if (hakEnv.wantsStaticSqlCipherUnix()) {
await buildSqlCipherUnix(hakEnv, moduleInfo);
}
await buildMatrixSeshat(hakEnv, moduleInfo);
}
async function buildOpenSslWin(hakEnv: HakEnv, moduleInfo: DependencyInfo) {
const version = moduleInfo.cfg.dependencies.openssl;
const openSslDir = path.join(moduleInfo.moduleTargetDotHakDir, `openssl-${version}`);
const openSslArch = hakEnv.getTargetArch() === 'x64' ? 'VC-WIN64A' : 'VC-WIN32';
console.log("Building openssl in " + openSslDir);
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
'perl',
[
'Configure',
'--prefix=' + moduleInfo.depPrefix,
// sqlcipher only uses about a tiny part of openssl. We link statically
// so will only pull in the symbols we use, but we may as well turn off
// as much as possible to save on build time.
'no-afalgeng',
'no-capieng',
'no-cms',
'no-ct',
'no-deprecated',
'no-dgram',
'no-dso',
'no-ec',
'no-ec2m',
'no-gost',
'no-nextprotoneg',
'no-ocsp',
'no-sock',
'no-srp',
'no-srtp',
'no-tests',
'no-ssl',
'no-tls',
'no-dtls',
'no-shared',
'no-aria',
'no-camellia',
'no-cast',
'no-chacha',
'no-cmac',
'no-des',
'no-dh',
'no-dsa',
'no-ecdh',
'no-ecdsa',
'no-idea',
'no-md4',
'no-mdc2',
'no-ocb',
'no-poly1305',
'no-rc2',
'no-rc4',
'no-rmd160',
'no-scrypt',
'no-seed',
'no-siphash',
'no-sm2',
'no-sm3',
'no-sm4',
'no-whirlpool',
openSslArch,
],
{
cwd: openSslDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
'nmake',
['build_libs'],
{
cwd: openSslDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
'nmake',
['install_dev'],
{
cwd: openSslDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
}
async function buildSqlCipherWin(hakEnv: HakEnv, moduleInfo: DependencyInfo) {
const version = moduleInfo.cfg.dependencies.sqlcipher;
const sqlCipherDir = path.join(moduleInfo.moduleTargetDotHakDir, `sqlcipher-${version}`);
const buildDir = path.join(sqlCipherDir, 'bld');
await mkdirp(buildDir);
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
'nmake',
['/f', path.join('..', 'Makefile.msc'), 'libsqlite3.lib', 'TOP=..'],
{
cwd: buildDir,
stdio: 'inherit',
env: Object.assign({}, process.env, {
CCOPTS: "-DSQLITE_HAS_CODEC -I" + path.join(moduleInfo.depPrefix, 'include'),
LTLIBPATHS: "/LIBPATH:" + path.join(moduleInfo.depPrefix, 'lib'),
LTLIBS: "libcrypto.lib",
}),
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
await fsExtra.copy(
path.join(buildDir, 'libsqlite3.lib'),
path.join(moduleInfo.depPrefix, 'lib', 'sqlcipher.lib'),
);
await fsExtra.copy(
path.join(buildDir, 'sqlite3.h'),
path.join(moduleInfo.depPrefix, 'include', 'sqlcipher.h'),
);
}
async function buildSqlCipherUnix(hakEnv: HakEnv, moduleInfo: DependencyInfo) {
const version = moduleInfo.cfg.dependencies.sqlcipher;
const sqlCipherDir = path.join(moduleInfo.moduleTargetDotHakDir, `sqlcipher-${version}`);
const args = [
'--prefix=' + moduleInfo.depPrefix + '',
'--enable-tempstore=yes',
'--enable-shared=no',
'--enable-tcl=no',
];
if (hakEnv.isMac()) {
args.push('--with-crypto-lib=commoncrypto');
}
if (hakEnv.wantsStaticSqlCipherUnix()) {
args.push('--enable-tcl=no');
if (hakEnv.isLinux()) {
args.push('--with-pic=yes');
}
}
if (!hakEnv.isHost()) {
// In the nonsense world of `configure`, it is assumed you are building
// a compiler like `gcc`, so the `host` option actually means the target
// the build output runs on.
args.push(`--host=${hakEnv.getTargetId()}`);
}
const cflags = [
'-DSQLITE_HAS_CODEC',
];
if (!hakEnv.isHost()) {
// `clang` uses more logical option naming.
cflags.push(`--target=${hakEnv.getTargetId()}`);
}
if (cflags.length) {
args.push(`CFLAGS=${cflags.join(' ')}`);
}
const ldflags: string[] = [];
if (hakEnv.isMac()) {
ldflags.push('-framework Security');
ldflags.push('-framework Foundation');
}
if (ldflags.length) {
args.push(`LDFLAGS=${ldflags.join(' ')}`);
}
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
path.join(sqlCipherDir, 'configure'),
args,
{
cwd: sqlCipherDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
'make',
[],
{
cwd: sqlCipherDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
'make',
['install'],
{
cwd: sqlCipherDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
}
async function buildMatrixSeshat(hakEnv: HakEnv, moduleInfo: DependencyInfo) {
// seshat now uses n-api so we shouldn't need to specify a node version to
// build against, but it does seems to still need something in here, so leaving
// it for now: we should confirm how much of this it still actually needs.
const env = hakEnv.makeGypEnv();
if (!hakEnv.isLinux() || hakEnv.wantsStaticSqlCipherUnix()) {
Object.assign(env, {
SQLCIPHER_STATIC: 1,
SQLCIPHER_LIB_DIR: path.join(moduleInfo.depPrefix, 'lib'),
SQLCIPHER_INCLUDE_DIR: path.join(moduleInfo.depPrefix, 'include'),
});
}
if (hakEnv.isLinux() && hakEnv.wantsStaticSqlCipherUnix()) {
// Ensure Element uses the statically-linked seshat build, and prevent other applications
// from attempting to use this one. Detailed explanation:
//
// RUSTFLAGS
// An environment variable containing a list of arguments to pass to rustc.
// -Clink-arg=VALUE
// A rustc argument to pass a single argument to the linker.
// -Wl,
// gcc syntax to pass an argument (from gcc) to the linker (ld).
// -Bsymbolic:
// Prefer local/statically linked symbols over those in the environment.
// Prevent overriding native libraries by LD_PRELOAD etc.
// --exclude-libs ALL
// Prevent symbols from being exported by any archive libraries.
// Reduces output filesize and prevents being dynamically linked against.
env.RUSTFLAGS = '-Clink-arg=-Wl,-Bsymbolic -Clink-arg=-Wl,--exclude-libs,ALL';
}
if (hakEnv.isWin()) {
env.RUSTFLAGS = '-Ctarget-feature=+crt-static -Clink-args=libcrypto.lib';
// Note that in general, you can specify targets in Rust without having to have
// the matching toolchain, however for this, cargo gets confused when building
// the build scripts since they run on the host, but vcvarsall.bat sets the c
// compiler in the path to be the one for the target, so we just use the matching
// toolchain for the target architecture which makes everything happy.
env.RUSTUP_TOOLCHAIN = `stable-${hakEnv.getTargetId()}`;
}
if (!hakEnv.isHost()) {
env.CARGO_BUILD_TARGET = hakEnv.getTargetId();
}
console.log("Running yarn install");
console.log("Running neon with env", env);
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
"yarn" + (hakEnv.isWin() ? ".cmd" : ""),
["install"],
path.join(moduleInfo.nodeModuleBinDir, 'neon' + (hakEnv.isWin() ? '.cmd' : '')),
['build', '--release'],
{
cwd: moduleInfo.moduleBuildDir,
env,
shell: true,
stdio: "inherit",
stdio: 'inherit',
},
);
proc.on("exit", (code) => {
code ? reject(code) : resolve();
});
});
const buildTarget = hakEnv.wantsStaticSqlCipher() ? "build-bundled" : "build";
console.log("Running yarn build");
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
"yarn" + (hakEnv.isWin() ? ".cmd" : ""),
["run", buildTarget],
{
cwd: moduleInfo.moduleBuildDir,
env,
shell: true,
stdio: "inherit",
},
);
proc.on("exit", (code) => {
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});

View File

@@ -14,32 +14,49 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import childProcess from "child_process";
import fsProm from "fs/promises";
import childProcess from 'child_process';
import fsProm from 'fs/promises';
import HakEnv from "../../scripts/hak/hakEnv";
import { DependencyInfo } from "../../scripts/hak/dep";
import HakEnv from '../../scripts/hak/hakEnv';
import { DependencyInfo } from '../../scripts/hak/dep';
export default async function(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
if (hakEnv.wantsStaticSqlCipher()) {
// of course tcl doesn't have a --version
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn('tclsh', [], {
stdio: ['pipe', 'ignore', 'ignore'],
});
proc.on('exit', (code) => {
if (code !== 0) {
reject("Can't find tclsh - have you installed TCL?");
} else {
resolve();
}
});
proc.stdin.end();
});
}
export default async function (hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const tools = [
["rustc", "--version"],
["python", "--version"], // node-gyp uses python for reasons beyond comprehension
['rustc', '--version'],
['python', '--version'], // node-gyp uses python for reasons beyond comprehension
];
if (hakEnv.isWin()) {
tools.push(["perl", "--version"]); // for openssl configure
tools.push(["nasm", "-v"]); // for openssl building
tools.push(["patch", "--version"]); // to patch sqlcipher Makefile.msc
tools.push(["nmake", "/?"]);
tools.push(['perl', '--version']); // for openssl configure
tools.push(['nasm', '-v']); // for openssl building
tools.push(['patch', '--version']); // to patch sqlcipher Makefile.msc
tools.push(['nmake', '/?']);
} else {
tools.push(["make", "--version"]);
tools.push(['make', '--version']);
}
for (const tool of tools) {
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(tool[0], tool.slice(1), {
stdio: ["ignore"],
stdio: ['ignore'],
});
proc.on("exit", (code) => {
proc.on('exit', (code) => {
if (code !== 0) {
reject("Can't find " + tool);
} else {
@@ -51,26 +68,19 @@ export default async function (hakEnv: HakEnv, moduleInfo: DependencyInfo): Prom
// Ensure Rust target exists (nb. we avoid depending on rustup)
await new Promise((resolve, reject) => {
const rustc = childProcess.execFile(
"rustc",
["--target", hakEnv.getTargetId(), "--emit=obj", "-o", "tmp", "-"],
(err, out) => {
if (err) {
reject(
"rustc can't build for target " +
hakEnv.getTargetId() +
": ensure target is installed via `rustup target add " +
hakEnv.getTargetId() +
"` " +
"or your package manager if not using `rustup`",
);
}
fsProm.unlink("tmp").then(resolve);
},
);
rustc.stdin!.write("fn main() {}");
rustc.stdout!.pipe(process.stdout);
rustc.stderr!.pipe(process.stderr);
rustc.stdin!.end();
const rustc = childProcess.execFile('rustc', [
'--target', hakEnv.getTargetId(), '-o', 'tmp', '-',
], (err, out) => {
if (err) {
reject(
"rustc can't build for target " + hakEnv.getTargetId() +
": ensure target is installed via `rustup target add " + hakEnv.getTargetId() + "` " +
"or your package manager if not using `rustup`",
);
}
fsProm.unlink('tmp').then(resolve);
});
rustc.stdin.write('fn main() {}');
rustc.stdin.end();
});
}

View File

@@ -0,0 +1,132 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import path from 'path';
import childProcess from 'child_process';
import fs from 'fs';
import fsProm from 'fs/promises';
import needle from 'needle';
import tar from 'tar';
import HakEnv from '../../scripts/hak/hakEnv';
import { DependencyInfo } from '../../scripts/hak/dep';
export default async function(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
if (hakEnv.wantsStaticSqlCipher()) {
await getSqlCipher(hakEnv, moduleInfo);
}
if (hakEnv.isWin()) {
await getOpenSsl(hakEnv, moduleInfo);
}
}
async function getSqlCipher(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const version = moduleInfo.cfg.dependencies.sqlcipher;
const sqlCipherDir = path.join(moduleInfo.moduleTargetDotHakDir, `sqlcipher-${version}`);
let haveSqlcipher: boolean;
try {
await fsProm.stat(sqlCipherDir);
haveSqlcipher = true;
} catch (e) {
haveSqlcipher = false;
}
if (haveSqlcipher) return;
const sqlCipherTarball = path.join(moduleInfo.moduleDotHakDir, `sqlcipher-${version}.tar.gz`);
let haveSqlcipherTar: boolean;
try {
await fsProm.stat(sqlCipherTarball);
haveSqlcipherTar = true;
} catch (e) {
haveSqlcipherTar = false;
}
if (!haveSqlcipherTar) {
const bob = needle('get', `https://github.com/sqlcipher/sqlcipher/archive/v${version}.tar.gz`, {
follow: 10,
output: sqlCipherTarball,
});
await bob;
}
// Extract the tarball to per-target directories, then we avoid cross-contaiminating archs
await tar.x({
file: sqlCipherTarball,
cwd: moduleInfo.moduleTargetDotHakDir,
});
if (hakEnv.isWin()) {
// On Windows, we need to patch the makefile because it forces TEMP_STORE to
// default to files (1) but the README specifically says you '*must*' set it
// set it to 2 (default to memory).
const patchFile = path.join(moduleInfo.moduleHakDir, `sqlcipher-${version}-win.patch`);
await new Promise<void>((resolve, reject) => {
const readStream = fs.createReadStream(patchFile);
const proc = childProcess.spawn(
'patch',
['-p1'],
{
cwd: sqlCipherDir,
stdio: ['pipe', 'inherit', 'inherit'],
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
readStream.pipe(proc.stdin);
});
}
}
async function getOpenSsl(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const version = moduleInfo.cfg.dependencies.openssl;
const openSslDir = path.join(moduleInfo.moduleTargetDotHakDir, `openssl-${version}`);
let haveOpenSsl: boolean;
try {
await fsProm.stat(openSslDir);
haveOpenSsl = true;
} catch (e) {
haveOpenSsl = false;
}
if (haveOpenSsl) return;
const openSslTarball = path.join(moduleInfo.moduleDotHakDir, `openssl-${version}.tar.gz`);
let haveOpenSslTar: boolean;
try {
await fsProm.stat(openSslTarball);
haveOpenSslTar = true;
} catch (e) {
haveOpenSslTar = false;
}
if (!haveOpenSslTar) {
await needle('get', `https://www.openssl.org/source/openssl-${version}.tar.gz`, {
follow: 10,
output: openSslTarball,
});
}
console.log("extracting " + openSslTarball + " in " + moduleInfo.moduleTargetDotHakDir);
await tar.x({
file: openSslTarball,
cwd: moduleInfo.moduleTargetDotHakDir,
});
}

View File

@@ -1,7 +1,13 @@
{
"scripts": {
"check": "check.ts",
"fetchDeps": "fetchDeps.ts",
"build": "build.ts"
},
"copy": "index.node"
"prune": "native",
"copy": "native/index.node",
"dependencies": {
"openssl": "1.1.1f",
"sqlcipher": "4.3.0"
}
}

View File

@@ -0,0 +1,14 @@
diff -Nur sqlcipher-4.3.0-orig/Makefile.msc sqlcipher-4.3.0-mod/Makefile.msc
--- sqlcipher-4.3.0-orig/Makefile.msc 2019-12-20 16:40:26.000000000 +0000
+++ sqlcipher-4.3.0-mod/Makefile.msc 2020-02-14 11:31:39.000000000 +0000
@@ -985,8 +985,8 @@
# default to file, 2 to default to memory, and 3 to force temporary
# tables to always be in memory.
#
-TCC = $(TCC) -DSQLITE_TEMP_STORE=1
-RCC = $(RCC) -DSQLITE_TEMP_STORE=1
+TCC = $(TCC) -DSQLITE_TEMP_STORE=2
+RCC = $(RCC) -DSQLITE_TEMP_STORE=2
# Enable/disable loadable extensions, and other optional features
# based on configuration. (-DSQLITE_OMIT*, -DSQLITE_ENABLE*).

View File

@@ -1,14 +1,17 @@
{
"compilerOptions": {
"moduleResolution": "node",
"esModuleInterop": true,
"target": "es2016",
"sourceMap": false,
"strict": true,
"lib": ["es2020"]
},
"include": ["../scripts/@types/*.d.ts", "./**/*.ts"],
"ts-node": {
"transpileOnly": true
}
"compilerOptions": {
"moduleResolution": "node",
"esModuleInterop": true,
"target": "es2016",
"sourceMap": false,
"lib": [
"es2019",
]
},
"include": [
"./**/*.ts"
],
"ts-node": {
"transpileOnly": true
}
}

View File

@@ -1,37 +0,0 @@
{
"readKey": "a7688614897667993891-866e2615b0a22e6ccef56aea9b10e815efa3e1296752a7a30bd9925f1a8f33e7",
"upload": {
"type": "json",
"keySeparator": "|",
"deprecate": "file",
"features": ["plural_object", "filter_untranslated"],
"files": [
{
"pattern": "src/i18n/strings/en_EN.json",
"file": "element-desktop.json",
"lang": "inherited"
},
{
"group": "existing",
"pattern": "src/i18n/strings/*.json",
"file": "element-desktop.json",
"excludes": ["src/i18n/strings/en_EN.json"],
"lang": "${autodetectLang}"
}
]
},
"download": {
"files": [
{
"conditions": "equals: ${file}, element-desktop.json",
"output": "src/i18n/strings/${langLsrUnderscore}.json"
}
],
"includeSourceLang": "${includeSourceLang|false}",
"langAliases": {
"en": "en-EN"
}
}
}

View File

@@ -1,213 +1,168 @@
{
"name": "element-desktop",
"productName": "Element",
"main": "lib/electron-main.js",
"version": "1.11.47",
"description": "A feature-rich client for Matrix.org",
"author": "Element",
"homepage": "https://element.io",
"repository": {
"type": "git",
"url": "https://github.com/vector-im/element-desktop"
"name": "element-desktop",
"productName": "Element",
"main": "lib/electron-main.js",
"version": "1.11.10",
"description": "A feature-rich client for Matrix.org",
"author": "Element",
"repository": {
"type": "git",
"url": "https://github.com/vector-im/element-desktop"
},
"license": "Apache-2.0",
"files": [],
"scripts": {
"i18n": "matrix-gen-i18n",
"prunei18n": "matrix-prune-i18n",
"diff-i18n": "cp src/i18n/strings/en_EN.json src/i18n/strings/en_EN_orig.json && matrix-gen-i18n && matrix-compare-i18n-files src/i18n/strings/en_EN_orig.json src/i18n/strings/en_EN.json",
"mkdirs": "mkdirp packages deploys",
"fetch": "yarn run mkdirs && node scripts/fetch-package.js",
"asar-webapp": "asar p webapp webapp.asar",
"start": "yarn run build:ts && yarn run build:res && electron .",
"lint": "yarn lint:types && yarn lint:js",
"lint:js": "eslint --max-warnings 0 src scripts hak",
"lint:js-fix": "eslint --fix src scripts hak",
"lint:types": "tsc --noEmit && tsc -p scripts/hak/tsconfig.json --noEmit && tsc -p hak/tsconfig.json --noEmit",
"build:native": "yarn run hak",
"build:native:universal": "yarn run hak --target x86_64-apple-darwin fetchandbuild && yarn run hak --target aarch64-apple-darwin fetchandbuild && yarn run hak --target x86_64-apple-darwin --target aarch64-apple-darwin copyandlink",
"build:32": "yarn run build:ts && yarn run build:res && electron-builder --ia32",
"build:64": "yarn run build:ts && yarn run build:res && electron-builder --x64",
"build:universal": "yarn run build:ts && yarn run build:res && electron-builder --universal",
"build": "yarn run build:ts && yarn run build:res && electron-builder",
"build:ts": "tsc",
"build:res": "node scripts/copy-res.js",
"docker:setup": "docker build -t element-desktop-dockerbuild dockerbuild",
"docker:build:native": "scripts/in-docker.sh yarn run hak",
"docker:build": "scripts/in-docker.sh yarn run build",
"docker:install": "scripts/in-docker.sh yarn install",
"debrepo": "scripts/mkrepo.sh",
"clean": "rimraf webapp.asar dist packages deploys lib",
"hak": "ts-node scripts/hak/index.ts",
"test": "jest"
},
"dependencies": {
"auto-launch": "^5.0.5",
"counterpart": "^0.18.6",
"electron-store": "^8.0.2",
"electron-window-state": "^5.0.3",
"minimist": "^1.2.6",
"png-to-ico": "^2.1.1",
"request": "^2.88.2"
},
"devDependencies": {
"@babel/core": "^7.18.10",
"@babel/preset-env": "^7.18.10",
"@babel/preset-typescript": "^7.18.6",
"@types/auto-launch": "^5.0.1",
"@types/counterpart": "^0.18.1",
"@types/detect-libc": "^1.0.0",
"@types/jest": "^28",
"@types/minimist": "^1.2.1",
"@types/mkdirp": "^1.0.2",
"@types/pacote": "^11.1.1",
"@types/rimraf": "^3.0.2",
"@typescript-eslint/eslint-plugin": "^5.6.0",
"@typescript-eslint/parser": "^5.6.0",
"allchange": "^1.0.6",
"app-builder-lib": "^22.14.10",
"asar": "^2.0.1",
"babel-jest": "^28.1.3",
"chokidar": "^3.5.2",
"detect-libc": "^1.0.3",
"electron": "^20",
"electron-builder": "22.11.4",
"electron-builder-squirrel-windows": "22.11.4",
"electron-devtools-installer": "^3.1.1",
"electron-notarize": "^1.0.0",
"eslint": "7.18.0",
"eslint-config-google": "^0.14.0",
"eslint-plugin-import": "^2.25.4",
"eslint-plugin-matrix-org": "^0.4.0",
"expect-playwright": "^0.8.0",
"find-npm-prefix": "^1.0.2",
"fs-extra": "^8.1.0",
"glob": "^7.1.6",
"jest": "^28",
"matrix-web-i18n": "^1.3.0",
"mkdirp": "^1.0.3",
"needle": "^2.5.0",
"node-pre-gyp": "^0.15.0",
"pacote": "^11.3.5",
"playwright": "^1.25.0",
"rimraf": "^3.0.2",
"tar": "^6.1.2",
"ts-jest": "^28.0.8",
"ts-node": "^10.4.0",
"typescript": "4.5.5"
},
"hakDependencies": {
"matrix-seshat": "^2.3.3",
"keytar": "^7.9.0"
},
"resolutions": {
"@types/node": "16.11.38"
},
"build": {
"appId": "im.riot.app",
"asarUnpack": "**/*.node",
"files": [
"package.json",
{
"from": ".hak/hakModules",
"to": "node_modules"
},
"lib/**"
],
"extraResources": [
{
"from": "res/img",
"to": "img"
},
"webapp.asar"
],
"linux": {
"target": "deb",
"category": "Network;InstantMessaging;Chat",
"maintainer": "support@element.io",
"desktop": {
"StartupWMClass": "element"
}
},
"license": "Apache-2.0",
"files": [],
"engines": {
"node": ">=16.0.0"
"mac": {
"category": "public.app-category.social-networking",
"darkModeSupport": true
},
"scripts": {
"i18n": "matrix-gen-i18n && yarn i18n:sort && yarn i18n:lint",
"i18n:sort": "jq --sort-keys '.' src/i18n/strings/en_EN.json > src/i18n/strings/en_EN.json.tmp && mv src/i18n/strings/en_EN.json.tmp src/i18n/strings/en_EN.json",
"i18n:lint": "prettier --write src/i18n/strings/ --ignore-path /dev/null",
"i18n:diff": "cp src/i18n/strings/en_EN.json src/i18n/strings/en_EN_orig.json && yarn i18n && matrix-compare-i18n-files src/i18n/strings/en_EN_orig.json src/i18n/strings/en_EN.json",
"mkdirs": "mkdirp packages deploys",
"fetch": "yarn run mkdirs && ts-node scripts/fetch-package.ts",
"asar-webapp": "asar p webapp webapp.asar",
"start": "yarn run build:ts && yarn run build:res && electron .",
"lint": "yarn lint:types && yarn lint:js",
"lint:js": "yarn lint:js:src && yarn lint:js:test && yarn lint:js:scripts && yarn lint:js:hak",
"lint:js:src": "eslint --max-warnings 0 src",
"lint:js:test": "eslint --max-warnings 0 --config .eslintrc-test.js test",
"lint:js:scripts": "eslint --max-warnings 0 --config .eslintrc-scripts.js scripts",
"lint:js:hak": "eslint --max-warnings 0 --config .eslintrc-hak.js hak",
"lint:js-fix": "yarn lint:js-fix:src &&yarn lint:js-fix:test && yarn lint:js-fix:scripts && yarn lint:js-fix:hak",
"lint:js-fix:src": "eslint --fix --max-warnings 0 src",
"lint:js-fix:test": "eslint --fix --max-warnings 0 --config .eslintrc-test.js test",
"lint:js-fix:scripts": "eslint --fix --max-warnings 0 --config .eslintrc-scripts.js scripts",
"lint:js-fix:hak": "eslint --fix --max-warnings 0 --config .eslintrc-hak.js hak",
"lint:types": "yarn lint:types:src && yarn lint:types:test && yarn lint:types:scripts && yarn lint:types:hak",
"lint:types:src": "tsc --noEmit",
"lint:types:test": "tsc --noEmit -p test/tsconfig.json",
"lint:types:scripts": "tsc --noEmit -p scripts/tsconfig.json",
"lint:types:hak": "tsc --noEmit -p hak/tsconfig.json",
"build:native": "yarn run hak",
"build:native:universal": "yarn run hak --target x86_64-apple-darwin fetchandbuild && yarn run hak --target aarch64-apple-darwin fetchandbuild && yarn run hak --target x86_64-apple-darwin --target aarch64-apple-darwin copyandlink",
"build:32": "yarn run build:ts && yarn run build:res && electron-builder --ia32",
"build:64": "yarn run build:ts && yarn run build:res && electron-builder --x64",
"build:universal": "yarn run build:ts && yarn run build:res && electron-builder --universal",
"build": "yarn run build:ts && yarn run build:res && electron-builder",
"build:ts": "tsc",
"build:res": "ts-node scripts/copy-res.ts",
"docker:setup": "docker build -t element-desktop-dockerbuild dockerbuild",
"docker:build:native": "scripts/in-docker.sh yarn run hak",
"docker:build": "scripts/in-docker.sh yarn run build",
"docker:install": "scripts/in-docker.sh yarn install",
"clean": "rimraf webapp.asar dist packages deploys lib",
"hak": "ts-node scripts/hak/index.ts",
"test": "jest"
"win": {
"target": [
"squirrel",
"msi"
],
"sign": "scripts/electron_winSign"
},
"dependencies": {
"@sentry/electron": "^4.3.0",
"auto-launch": "^5.0.5",
"counterpart": "^0.18.6",
"electron-clear-data": "^1.0.5",
"electron-store": "^8.0.2",
"electron-window-state": "^5.0.3",
"minimist": "^1.2.6",
"node-fetch": "^2",
"png-to-ico": "^2.1.1",
"uuid": "^9.0.0"
"msi": {
"perMachine": true
},
"devDependencies": {
"@babel/core": "^7.18.10",
"@babel/preset-env": "^7.18.10",
"@babel/preset-typescript": "^7.18.6",
"@electron/asar": "^3.2.3",
"@electron/notarize": "^2.0.0",
"@types/auto-launch": "^5.0.1",
"@types/counterpart": "^0.18.1",
"@types/detect-libc": "^1.0.0",
"@types/jest": "^29.0.0",
"@types/minimist": "^1.2.1",
"@types/mkdirp": "^1.0.2",
"@types/node": "16.18.52",
"@types/pacote": "^11.1.1",
"@types/tar": "^6.1.3",
"@types/uuid": "^9.0.2",
"@typescript-eslint/eslint-plugin": "^5.42.0",
"@typescript-eslint/parser": "^5.42.0",
"allchange": "^1.0.6",
"app-builder-lib": "24.6.5",
"babel-jest": "^29.0.0",
"chokidar": "^3.5.2",
"detect-libc": "^1.0.3",
"electron": "^26.2.1",
"electron-builder": "24.6.4",
"electron-builder-squirrel-windows": "24.6.5",
"electron-devtools-installer": "^3.2.0",
"eslint": "^8.26.0",
"eslint-config-google": "^0.14.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-import": "^2.25.4",
"eslint-plugin-matrix-org": "^1.0.0",
"eslint-plugin-unicorn": "^48.0.0",
"expect-playwright": "^0.8.0",
"find-npm-prefix": "^1.0.2",
"fs-extra": "^11.0.0",
"glob": "^10.0.0",
"jest": "^29.0.0",
"matrix-web-i18n": "^3.1.3",
"mkdirp": "^3.0.0",
"node-pre-gyp": "^0.17.0",
"pacote": "^17.0.0",
"playwright": "^1.25.0",
"prettier": "^2.8.1",
"rimraf": "^5.0.0",
"tar": "^6.1.2",
"ts-jest": "^29.0.0",
"ts-node": "^10.9.1",
"typescript": "5.1.6"
"directories": {
"output": "dist"
},
"hakDependencies": {
"matrix-seshat": "^3.0.1",
"keytar": "^7.9.0"
},
"resolutions": {
"@types/node": "16.18.52"
},
"build": {
"appId": "im.riot.app",
"asarUnpack": "**/*.node",
"files": [
"package.json",
{
"from": ".hak/hakModules",
"to": "node_modules"
},
"lib/**"
],
"extraResources": [
{
"from": "res/img",
"to": "img"
},
"webapp.asar"
],
"linux": {
"target": [
"tar.gz",
"deb"
],
"category": "Network;InstantMessaging;Chat",
"maintainer": "support@element.io",
"icon": "build/icons"
},
"deb": {
"packageCategory": "net",
"depends": [
"libgtk-3-0",
"libnotify4",
"libnss3",
"libxss1",
"libxtst6",
"xdg-utils",
"libatspi2.0-0",
"libuuid1",
"libsecret-1-0",
"libasound2",
"libgbm1"
],
"recommends": [
"libsqlcipher0",
"element-io-archive-keyring"
]
},
"mac": {
"category": "public.app-category.social-networking",
"darkModeSupport": true,
"hardenedRuntime": true,
"gatekeeperAssess": true,
"entitlements": "./build/entitlements.mac.plist",
"icon": "build/icons/icon.icns"
},
"win": {
"target": [
"squirrel"
],
"signingHashAlgorithms": [
"sha256"
],
"icon": "build/icons/icon.ico"
},
"directories": {
"output": "dist"
},
"protocols": [
{
"name": "element",
"schemes": [
"element"
]
}
"afterPack": "scripts/electron_afterPack",
"afterSign": "scripts/electron_afterSign",
"protocols": [
{
"name": "element",
"schemes": [
"element"
]
},
"jest": {
"testEnvironment": "node",
"testMatch": [
"<rootDir>/test/**/*-test.[jt]s?(x)"
],
"setupFilesAfterEnv": [
"expect-playwright"
]
}
}
]
},
"jest": {
"testEnvironment": "node",
"testMatch": [
"<rootDir>/test/**/*-test.[jt]s?(x)"
],
"setupFilesAfterEnv": [
"expect-playwright"
]
}
}

View File

@@ -1,19 +0,0 @@
/*
Copyright 2022 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
declare module "find-npm-prefix" {
export default function findPrefix(dir: string): Promise<string>;
}

View File

@@ -1,20 +0,0 @@
/*
Copyright 2022 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
declare module "node-pre-gyp/lib/util/versioning" {
// eslint-disable-next-line @typescript-eslint/naming-convention
export function get_runtime_abi(runtime: string, version: string): string;
}

127
scripts/copy-res.js Executable file
View File

@@ -0,0 +1,127 @@
#!/usr/bin/env node
// copies resources into the lib directory.
const parseArgs = require('minimist');
const chokidar = require('chokidar');
const path = require('path');
const fs = require('fs');
const argv = parseArgs(process.argv.slice(2), {});
const watch = argv.w;
const verbose = argv.v;
function errCheck(err) {
if (err) {
console.error(err.message);
process.exit(1);
}
}
const I18N_BASE_PATH = "src/i18n/strings/";
const INCLUDE_LANGS = fs.readdirSync(I18N_BASE_PATH).filter(fn => fn.endsWith(".json"));
// Ensure lib, lib/i18n and lib/i18n/strings all exist
fs.mkdirSync('lib/i18n/strings', { recursive: true });
function genLangFile(file, dest) {
let translations = {};
[file].forEach(function(f) {
if (fs.existsSync(f)) {
try {
Object.assign(
translations,
JSON.parse(fs.readFileSync(f).toString()),
);
} catch (e) {
console.error("Failed: " + f, e);
throw e;
}
}
});
translations = weblateToCounterpart(translations);
const json = JSON.stringify(translations, null, 4);
const filename = path.basename(file);
fs.writeFileSync(dest + filename, json);
if (verbose) {
console.log("Generated language file: " + filename);
}
}
/*
* Convert translation key from weblate format
* (which only supports a single level) to counterpart
* which requires object values for 'count' translations.
*
* eg.
* "there are %(count)s badgers|one": "a badger",
* "there are %(count)s badgers|other": "%(count)s badgers"
* becomes
* "there are %(count)s badgers": {
* "one": "a badger",
* "other": "%(count)s badgers"
* }
*/
function weblateToCounterpart(inTrs) {
const outTrs = {};
for (const key of Object.keys(inTrs)) {
const keyParts = key.split('|', 2);
if (keyParts.length === 2) {
let obj = outTrs[keyParts[0]];
if (obj === undefined) {
obj = outTrs[keyParts[0]] = {};
} else if (typeof obj === "string") {
// This is a transitional edge case if a string went from singular to pluralised and both still remain
// in the translation json file. Use the singular translation as `other` and merge pluralisation atop.
obj = outTrs[keyParts[0]] = {
"other": inTrs[key],
};
console.warn("Found entry in i18n file in both singular and pluralised form", keyParts[0]);
}
obj[keyParts[1]] = inTrs[key];
} else {
outTrs[key] = inTrs[key];
}
}
return outTrs;
}
/*
watch the input files for a given language,
regenerate the file, and regenerating languages.json with the new filename
*/
function watchLanguage(file, dest) {
// XXX: Use a debounce because for some reason if we read the language
// file immediately after the FS event is received, the file contents
// appears empty. Possibly https://github.com/nodejs/node/issues/6112
let makeLangDebouncer;
const makeLang = () => {
if (makeLangDebouncer) {
clearTimeout(makeLangDebouncer);
}
makeLangDebouncer = setTimeout(() => {
genLangFile(file, dest);
}, 500);
};
chokidar.watch(file)
.on('add', makeLang)
.on('change', makeLang)
.on('error', errCheck);
}
// language resources
const I18N_DEST = "lib/i18n/strings/";
INCLUDE_LANGS.forEach((file) => {
genLangFile(I18N_BASE_PATH + file, I18N_DEST);
}, {});
if (watch) {
INCLUDE_LANGS.forEach(file => watchLanguage(I18N_BASE_PATH + file, I18N_DEST));
}

View File

@@ -1,81 +0,0 @@
#!/usr/bin/env -S npx ts-node
// copies resources into the lib directory.
import parseArgs from "minimist";
import * as chokidar from "chokidar";
import * as path from "path";
import * as fs from "fs";
const argv = parseArgs(process.argv.slice(2), {});
const watch = argv.w;
const verbose = argv.v;
function errCheck(err?: Error): void {
if (err) {
console.error(err.message);
process.exit(1);
}
}
const I18N_BASE_PATH = "src/i18n/strings/";
const INCLUDE_LANGS = fs.readdirSync(I18N_BASE_PATH).filter((fn) => fn.endsWith(".json"));
// Ensure lib, lib/i18n and lib/i18n/strings all exist
fs.mkdirSync("lib/i18n/strings", { recursive: true });
type Translations = Record<string, Record<string, string> | string>;
function genLangFile(file: string, dest: string): void {
const translations: Translations = {};
[file].forEach(function (f) {
if (fs.existsSync(f)) {
try {
Object.assign(translations, JSON.parse(fs.readFileSync(f).toString()));
} catch (e) {
console.error("Failed: " + f, e);
throw e;
}
}
});
const json = JSON.stringify(translations, null, 4);
const filename = path.basename(file);
fs.writeFileSync(dest + filename, json);
if (verbose) {
console.log("Generated language file: " + filename);
}
}
/*
watch the input files for a given language,
regenerate the file, and regenerating languages.json with the new filename
*/
function watchLanguage(file: string, dest: string): void {
// XXX: Use a debounce because for some reason if we read the language
// file immediately after the FS event is received, the file contents
// appears empty. Possibly https://github.com/nodejs/node/issues/6112
let makeLangDebouncer: NodeJS.Timeout | undefined;
const makeLang = (): void => {
if (makeLangDebouncer) {
clearTimeout(makeLangDebouncer);
}
makeLangDebouncer = setTimeout(() => {
genLangFile(file, dest);
}, 500);
};
chokidar.watch(file).on("add", makeLang).on("change", makeLang).on("error", errCheck);
}
// language resources
const I18N_DEST = "lib/i18n/strings/";
INCLUDE_LANGS.forEach((file): void => {
genLangFile(I18N_BASE_PATH + file, I18N_DEST);
}, {});
if (watch) {
INCLUDE_LANGS.forEach((file) => watchLanguage(I18N_BASE_PATH + file, I18N_DEST));
}

View File

@@ -0,0 +1,15 @@
const fsProm = require('fs').promises;
const path = require('path');
exports.default = async function(context) {
const { electronPlatformName, appOutDir } = context;
// Squirrel windows will try to relaunch the app using an executable of the same name as
// before in the new version, so will fail if the executable is now called something else.
// We add a fake Riot.exe that it can run which runs the real one.
// This also gets signed automatically, presumably because electron-build just looks for all
// exe files and signs them all...
if (electronPlatformName === 'win32') {
await fsProm.copyFile('build/rebrand_stub/rebrand_stub.exe', path.join(appOutDir, "Riot.exe"));
}
};

View File

@@ -0,0 +1,33 @@
const { notarize } = require('electron-notarize');
let warned = false;
exports.default = async function(context) {
const { electronPlatformName, appOutDir } = context;
const appId = context.packager.info.appInfo.id;
if (electronPlatformName === 'darwin') {
const appName = context.packager.appInfo.productFilename;
// We get the password from keychain. The keychain stores
// user IDs too, but apparently altool can't get the user ID
// from the keychain, so we need to get it from the environment.
const userId = process.env.NOTARIZE_APPLE_ID;
if (userId === undefined) {
if (!warned) {
console.log("*************************************");
console.log("* NOTARIZE_APPLE_ID is not set. *");
console.log("* This build will NOT be notarised. *");
console.log("*************************************");
warned = true;
}
return;
}
console.log("Notarising macOS app. This may be some time.");
return await notarize({
appBundleId: appId,
appPath: `${appOutDir}/${appName}.app`,
appleId: userId,
appleIdPassword: '@keychain:NOTARIZE_CREDS',
});
}
};

View File

@@ -0,0 +1,78 @@
const { execFile } = require('child_process');
// Loosely based on computeSignToolArgs from app-builder-lib/src/codeSign/windowsCodeSign.ts
function computeSignToolArgs(options, keyContainer) {
const args = [];
if (process.env.ELECTRON_BUILDER_OFFLINE !== "true") {
const timestampingServiceUrl = options.options.timeStampServer || "http://timestamp.digicert.com";
args.push(
options.isNest || options.hash === "sha256" ? "/tr" : "/t",
options.isNest || options.hash === "sha256" ? (
options.options.rfc3161TimeStampServer || "http://timestamp.comodoca.com/rfc3161"
) : timestampingServiceUrl,
);
}
args.push('/kc', keyContainer);
// To use the hardware token (this should probably be less hardcoded)
args.push('/csp', 'eToken Base Cryptographic Provider');
// The certificate file. Somehow this appears to be the only way to specify
// the cert that works. If you specify the subject name or hash, it will
// say it can't associate the private key to the certificate.
// TODO: Find a way to pass this through from the electron-builder config
// so we don't have to hard-code this here
// fwiw https://stackoverflow.com/questions/17927895/automate-extended-validation-ev-code-signing
// is about the most useful resource on automating code signing...
args.push('/f', 'element.io\\New_Vector_Ltd.pem');
if (options.hash !== "sha1") {
args.push("/fd", options.hash);
if (process.env.ELECTRON_BUILDER_OFFLINE !== "true") {
args.push("/td", "sha256");
}
}
// msi does not support dual-signing
if (options.isNest) {
args.push("/as");
}
// https://github.com/electron-userland/electron-builder/issues/2875#issuecomment-387233610
args.push("/debug");
// must be last argument
args.push(options.path);
return args;
}
let warned = false;
exports.default = async function(options) {
const keyContainer = process.env.SIGNING_KEY_CONTAINER;
if (keyContainer === undefined) {
if (!warned) {
console.warn(
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" +
"! Skipping Windows signing. !\n" +
"! SIGNING_KEY_CONTAINER not defined. !\n" +
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!",
);
warned = true;
}
return;
}
return new Promise((resolve, reject) => {
const args = ['sign'].concat(computeSignToolArgs(options, keyContainer));
execFile('signtool', args, {}, (error, stdout) => {
if (error) {
console.error("signtool failed with code " + error);
reject("signtool failed with code " + error);
console.log(stdout);
} else {
resolve();
}
});
});
};

View File

@@ -1,41 +1,43 @@
#!/usr/bin/env -S npx ts-node --resolveJsonModule
#!/usr/bin/env node
import * as path from "path";
import { createWriteStream, promises as fs } from "fs";
import * as childProcess from "child_process";
import tar from "tar";
import * as asar from "@electron/asar";
import fetch from "node-fetch";
import { promises as stream } from "stream";
const process = require('process');
const path = require('path');
const fs = require('fs');
const fsPromises = require('fs').promises;
const childProcess = require('child_process');
const tar = require('tar');
const asar = require('asar');
const needle = require('needle');
import riotDesktopPackageJson from "../package.json";
import { setPackageVersion } from "./set-version";
const riotDesktopPackageJson = require('../package.json');
const { setPackageVersion } = require('./set-version.js');
const PUB_KEY_URL = "https://packages.riot.im/element-release-key.asc";
const PACKAGE_URL_PREFIX = "https://github.com/vector-im/element-web/releases/download/";
const DEVELOP_TGZ_URL = "https://develop.element.io/develop.tar.gz";
const ASAR_PATH = "webapp.asar";
const DEVELOP_TGZ_URL = "https://vector-im.github.io/element-web/develop.tar.gz";
const ASAR_PATH = 'webapp.asar';
async function downloadToFile(url: string, filename: string): Promise<void> {
async function downloadToFile(url, filename) {
console.log("Downloading " + url + "...");
try {
const resp = await fetch(url);
if (!resp.ok) throw new Error(`unexpected response ${resp.statusText}`);
if (!resp.body) throw new Error(`unexpected response has no body ${resp.statusText}`);
await stream.pipeline(resp.body, createWriteStream(filename));
await needle('get', url, null,
{
follow_max: 5,
output: filename,
},
);
} catch (e) {
console.error(e);
try {
await fs.unlink(filename);
await fsPromises.unlink(filename);
} catch (_) {}
throw e;
}
}
async function verifyFile(filename: string): Promise<void> {
return new Promise<void>((resolve, reject) => {
childProcess.execFile("gpg", ["--verify", filename + ".asc", filename], (error) => {
async function verifyFile(filename) {
return new Promise((resolve, reject) => {
childProcess.execFile('gpg', ['--verify', filename + '.asc', filename], (error) => {
if (error) {
reject(error);
} else {
@@ -45,35 +47,35 @@ async function verifyFile(filename: string): Promise<void> {
});
}
async function main(): Promise<number | undefined> {
async function main() {
let verify = true;
let importkey = false;
let pkgDir = "packages";
let deployDir = "deploys";
let cfgDir: string | undefined;
let targetVersion: string | undefined;
let filename: string | undefined;
let url: string | undefined;
let pkgDir = 'packages';
let deployDir = 'deploys';
let cfgDir;
let targetVersion;
let filename;
let url;
let setVersion = false;
while (process.argv.length > 2) {
switch (process.argv[2]) {
case "--noverify":
case '--noverify':
verify = false;
break;
case "--importkey":
case '--importkey':
importkey = true;
break;
case "--packages":
case '--packages':
process.argv.shift();
pkgDir = process.argv[2];
break;
case "--deploys":
case '--deploys':
process.argv.shift();
deployDir = process.argv[2];
break;
case "--cfgdir":
case "-d":
case '--cfgdir':
case '-d':
process.argv.shift();
cfgDir = process.argv[2];
break;
@@ -84,13 +86,13 @@ async function main(): Promise<number | undefined> {
}
if (targetVersion === undefined) {
targetVersion = "v" + riotDesktopPackageJson.version;
} else if (targetVersion !== "develop") {
targetVersion = 'v' + riotDesktopPackageJson.version;
} else if (targetVersion !== 'develop') {
setVersion = true; // version was specified
}
if (targetVersion === "develop") {
filename = "develop.tar.gz";
if (targetVersion === 'develop') {
filename = 'develop.tar.gz';
url = DEVELOP_TGZ_URL;
verify = false; // develop builds aren't signed
} else if (targetVersion.includes("://")) {
@@ -99,11 +101,11 @@ async function main(): Promise<number | undefined> {
verify = false; // manually verified
} else {
filename = `element-${targetVersion}.tar.gz`;
url = PACKAGE_URL_PREFIX + targetVersion + "/" + filename;
url = PACKAGE_URL_PREFIX + targetVersion + '/' + filename;
}
const haveGpg = await new Promise<boolean>((resolve) => {
childProcess.execFile("gpg", ["--version"], (error) => {
const haveGpg = await new Promise((resolve) => {
childProcess.execFile('gpg', ['--version'], (error) => {
resolve(!error);
});
});
@@ -114,8 +116,8 @@ async function main(): Promise<number | undefined> {
return 1;
}
await new Promise<boolean>((resolve) => {
const gpgProc = childProcess.execFile("gpg", ["--import"], (error) => {
await new Promise((resolve) => {
const gpgProc = childProcess.execFile('gpg', ['--import'], (error) => {
if (error) {
console.log("Failed to import key", error);
} else {
@@ -123,9 +125,7 @@ async function main(): Promise<number | undefined> {
}
resolve(!error);
});
fetch(PUB_KEY_URL).then((resp) => {
stream.pipeline(resp.body, gpgProc.stdin!);
});
needle.get(PUB_KEY_URL).pipe(gpgProc.stdin);
});
return 0;
}
@@ -143,17 +143,18 @@ async function main(): Promise<number | undefined> {
}
let haveDeploy = false;
let expectedDeployDir = path.join(deployDir, path.basename(filename).replace(/\.tar\.gz/, ""));
let expectedDeployDir = path.join(deployDir, path.basename(filename).replace(/\.tar\.gz/, ''));
try {
await fs.opendir(expectedDeployDir);
console.log(expectedDeployDir + "already exists");
haveDeploy = true;
} catch (e) {}
} catch (e) {
}
if (!haveDeploy) {
const outPath = path.join(pkgDir, filename);
try {
await fs.stat(outPath);
await fsPromises.stat(outPath);
console.log("Already have " + filename + ": not redownloading");
} catch (e) {
try {
@@ -166,11 +167,11 @@ async function main(): Promise<number | undefined> {
if (verify) {
try {
await fs.stat(outPath + ".asc");
await fsPromises.stat(outPath+'.asc');
console.log("Already have " + filename + ".asc: not redownloading");
} catch (e) {
try {
await downloadToFile(url + ".asc", outPath + ".asc");
await downloadToFile(url + '.asc', outPath + '.asc');
} catch (e) {
console.log("Failed to download " + url, e);
return 1;
@@ -191,7 +192,7 @@ async function main(): Promise<number | undefined> {
await tar.x({
file: outPath,
cwd: deployDir,
onentry: (entry) => {
onentry: entry => {
// Find the appropriate extraction path, only needed for `develop` where the dir name is unknown
if (entry.type === "Directory" && !path.join(deployDir, entry.path).startsWith(expectedDeployDir)) {
expectedDeployDir = path.join(deployDir, entry.path);
@@ -201,16 +202,17 @@ async function main(): Promise<number | undefined> {
}
try {
await fs.stat(ASAR_PATH);
await fsPromises.stat(ASAR_PATH);
console.log(ASAR_PATH + " already present: removing");
await fs.unlink(ASAR_PATH);
} catch (e) {}
await fsPromises.unlink(ASAR_PATH);
} catch (e) {
}
if (cfgDir.length) {
const configJsonSource = path.join(cfgDir, "config.json");
const configJsonDest = path.join(expectedDeployDir, "config.json");
console.log(configJsonSource + " -> " + configJsonDest);
await fs.copyFile(configJsonSource, configJsonDest);
const configJsonSource = path.join(cfgDir, 'config.json');
const configJsonDest = path.join(expectedDeployDir, 'config.json');
console.log(configJsonSource + ' -> ' + configJsonDest);
await fsPromises.copyFile(configJsonSource, configJsonDest);
} else {
console.log("Skipping config file");
}
@@ -219,7 +221,7 @@ async function main(): Promise<number | undefined> {
await asar.createPackage(expectedDeployDir, ASAR_PATH);
if (setVersion) {
const semVer = (await fs.readFile(path.join(expectedDeployDir, "version"), "utf-8")).trim();
const semVer = fs.readFileSync(path.join(expectedDeployDir, "version"), "utf-8").trim();
console.log("Updating version to " + semVer);
await setPackageVersion(semVer);
}
@@ -227,11 +229,9 @@ async function main(): Promise<number | undefined> {
console.log("Done!");
}
main()
.then((ret) => {
process.exit(ret);
})
.catch((e) => {
console.error(e);
process.exit(1);
});
main().then((ret) => {
process.exit(ret);
}).catch(e => {
console.error(e);
process.exit(1);
});

View File

@@ -1,129 +0,0 @@
#!/usr/bin/env -S npx ts-node
/**
* Script to generate electron-builder.json config files for builds which don't match package.json, e.g. nightlies
* This script has different outputs depending on your os platform.
*
* On Windows:
* Prefixes the nightly version with `0.0.1-nightly.` as it breaks if it is not semver
*
* On macOS:
* Passes --notarytool-team-id to build.mac.notarize.notarize if specified
*
* On Linux:
* Replaces spaces in the product name with dashes as spaces in paths can cause issues
* Passes --deb-custom-control to build.deb.fpm if specified
* Removes libsqlcipher0 recommended dependency if env SQLCIPHER_BUNDLED is asserted.
*/
import parseArgs from "minimist";
import fsProm from "fs/promises";
import * as os from "os";
import { Configuration } from "app-builder-lib";
const ELECTRON_BUILDER_CFG_FILE = "electron-builder.json";
const NIGHTLY_APP_ID = "im.riot.nightly";
const NIGHTLY_APP_NAME = "element-desktop-nightly";
const NIGHTLY_DEB_NAME = "element-nightly";
const argv = parseArgs<{
"nightly"?: string;
"signtool-thumbprint"?: string;
"signtool-subject-name"?: string;
"notarytool-team-id"?: string;
"deb-changelog"?: string;
}>(process.argv.slice(2), {
string: ["nightly", "deb-changelog", "signtool-thumbprint", "signtool-subject-name", "notarytool-team-id"],
});
type DeepWriteable<T> = { -readonly [P in keyof T]: DeepWriteable<T[P]> };
interface PackageBuild extends DeepWriteable<Omit<Configuration, "extraMetadata">> {
extraMetadata?: {
productName?: string;
name?: string;
version?: string;
description?: string;
};
}
interface Package {
build: PackageBuild;
productName: string;
description: string;
}
async function main(): Promise<number | void> {
// Electron builder doesn't overlay with the config in package.json, so load it here
const pkg: Package = JSON.parse(await fsProm.readFile("package.json", "utf8"));
const cfg: PackageBuild = {
...pkg.build,
extraMetadata: {
productName: pkg.productName,
description: pkg.description,
},
};
if (!cfg.deb!.fpm) cfg.deb!.fpm = [];
if (argv.nightly) {
cfg.appId = NIGHTLY_APP_ID;
cfg.extraMetadata!.productName += " Nightly";
cfg.extraMetadata!.name = NIGHTLY_APP_NAME;
cfg.extraMetadata!.description += " (nightly unstable build)";
cfg.deb!.fpm!.push("--name", NIGHTLY_DEB_NAME);
let version = argv.nightly;
if (os.platform() === "win32") {
// The windows packager relies on parsing this as semver, so we have to make it look like one.
// This will give our update packages really stupid names, but we probably can't change that either
// because squirrel windows parses them for the version too. We don't really care: nobody sees them.
// We just give the installer a static name, so you'll just see this in the 'about' dialog.
// Turns out if you use 0.0.0 here it makes Squirrel windows crash, so we use 0.0.1.
version = "0.0.1-nightly." + version;
}
cfg.extraMetadata!.version = version;
} else {
cfg.deb!.fpm!.push("--deb-field", "Replaces: riot-desktop (<< 1.7.0), riot-web (<< 1.7.0)");
cfg.deb!.fpm!.push("--deb-field", "Breaks: riot-desktop (<< 1.7.0), riot-web (<< 1.7.0)");
}
if (argv["signtool-thumbprint"] && argv["signtool-subject-name"]) {
cfg.win!.certificateSubjectName = argv["signtool-subject-name"];
cfg.win!.certificateSha1 = argv["signtool-thumbprint"];
}
if (argv["notarytool-team-id"]) {
cfg.mac!.notarize = {
teamId: argv["notarytool-team-id"],
};
}
if (os.platform() === "linux") {
// Electron crashes on debian if there's a space in the path.
// https://github.com/vector-im/element-web/issues/13171
cfg.extraMetadata!.productName = cfg.extraMetadata!.productName!.replace(/ /g, "-");
if (argv["deb-changelog"]) {
cfg.deb!.fpm!.push(`--deb-changelog=${argv["deb-changelog"]}`);
}
if (process.env.SQLCIPHER_BUNDLED) {
// Remove sqlcipher dependency when using bundled
cfg.deb!.recommends = cfg.deb!.recommends?.filter((d) => d !== "libsqlcipher0");
}
}
await fsProm.writeFile(ELECTRON_BUILDER_CFG_FILE, JSON.stringify(cfg, null, 4));
}
main()
.then((ret) => {
process.exit(ret!);
})
.catch((e) => {
console.error(e);
process.exit(1);
});

View File

@@ -1,41 +0,0 @@
#!/usr/bin/env -S npx ts-node
/**
* Script to generate incremental Nightly build versions, based on the latest Nightly build version of that kind.
* The version format is YYYYMMDDNN where NN is in case we need to do multiple versions in a day.
*
* NB. on windows, squirrel will try to parse the version number parts, including this string, into 32-bit integers,
* which is fine as long as we only add two digits to the end...
*/
import parseArgs from "minimist";
const argv = parseArgs<{
latest?: string;
}>(process.argv.slice(2), {
string: ["latest"],
});
function parseVersion(version: string): [Date, number] {
const year = parseInt(version.slice(0, 4), 10);
const month = parseInt(version.slice(4, 6), 10);
const day = parseInt(version.slice(6, 8), 10);
const num = parseInt(version.slice(8, 10), 10);
return [new Date(year, month - 1, day), num];
}
const [latestDate, latestNum] = argv.latest ? parseVersion(argv.latest) : [];
const now = new Date();
const month = (now.getMonth() + 1).toString().padStart(2, "0");
const date = now.getDate().toString().padStart(2, "0");
let buildNum = 1;
if (latestDate && new Date(latestDate).getDate().toString().padStart(2, "0") === date) {
buildNum = latestNum! + 1;
}
if (buildNum > 99) {
throw new Error("Maximum number of Nightlies exceeded on this day.");
}
console.log(now.getFullYear() + month + date + buildNum.toString().padStart(2, "0"));

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env -S npx ts-node
/*
* Checks for the presence of a webapp, inspects its version and prints it
*/
import { versionFromAsar } from "./set-version";
async function main(): Promise<number> {
const version = await versionFromAsar();
console.log(version);
return 0;
}
if (require.main === module) {
main()
.then((ret) => {
process.exit(ret);
})
.catch((e) => {
console.error(e);
process.exit(1);
});
}

View File

@@ -1,40 +1,40 @@
# hak
hak
===
This tool builds native dependencies for element-desktop. Here follows some very minimal
documentation for it.
Goals:
- Must build compiled native node modules in a shippable state
(ie. only dynamically linked against libraries that will be on the
target system, all unnecessary files removed).
- Must be able to build any native module, no matter what build system
it uses (electron-rebuild is supposed to do this job but only works
for modules that use gyp).
* Must build compiled native node modules in a shippable state
(ie. only dynamically linked against libraries that will be on the
target system, all unnecessary files removed).
* Must be able to build any native module, no matter what build system
it uses (electron-rebuild is supposed to do this job but only works
for modules that use gyp).
It's also loosely designed to be a general tool and agnostic to what it's
actually building. It's used here to build modules for the electron app
but should work equally well for building modules for normal node.
# Running
Running
=======
Hak is invoked with a command and a dependency, eg. `yarn run hak fetch matrix-seshat`.
If no dependencies are given, hak runs the command on all dependencies.
# Files
Files
=====
There are a lot of files involved:
- scripts/hak/... - The tool itself
- hak/[dependency] - Files provided by the app that tell hak how to build each of its native dependencies.
Contains a hak.json file and also some script files, each of which must be referenced in hak.json.
- .hak/ - Files generated by hak in the course of doing its job. Includes the dependency module itself and
any of the native dependency's native dependencies.
- .hak/[dependency]/build - An extracted copy of the dependency's node module used to build it.
- .hak/[dependency]/out - Another extracted copy of the dependency, this one contains only what will be shipped.
# Workings
* scripts/hak/... - The tool itself
* hak/[dependency] - Files provided by the app that tell hak how to build each of its native dependencies.
Contains a hak.json file and also some script files, each of which must be referenced in hak.json.
* .hak/ - Files generated by hak in the course of doing its job. Includes the dependency module itself and
any of the native dependency's native dependencies.
* .hak/[dependency]/build - An extracted copy of the dependency's node module used to build it.
* .hak/[dependency]/out - Another extracted copy of the dependency, this one contains only what will be shipped.
Workings
========
Hak works around native node modules that try to fetch or build their native component in
the npm 'install' phase - modules that do this will typically end up with native components
targeted to the build platform and the node that npm/yarn is using, which is no good for an
@@ -49,33 +49,33 @@ This also means that the dependencies cannot be listed in `dependencies` or
try to fetch their native parts. Instead, they are listed in `hakDependencies` which
hak reads to install them for you.
Hak will _not_ install dependencies for the copy of the module it links into your
Hak will *not* install dependencies for the copy of the module it links into your
project, so if your native module has javascript dependencies that are actually needed at
runtime (and not just to fetch / build the native parts), it won't work.
Hak will generate a `.yarnrc` in the project directory to set the link directory to its
own in the .hak directory (unless one already exists, in which case this is your problem).
# Lifecycle
Lifecycle
=========
Hak is divided into lifecycle stages, in order:
* fetch - Download and extract the source of the dependency
* link - Link the copy of the dependency into your node_modules directory
* fetchDeps - Fetch & extract any native dependencies required to build the module.
* build - The Good Stuff. Configure and build any native dependencies, then the module itself.
* copy - Copy the built artifact from the module build directory to the module output directory.
- fetch - Download and extract the source of the dependency
- link - Link the copy of the dependency into your node_modules directory
- build - The Good Stuff. Configure and build any native dependencies, then the module itself.
- copy - Copy the built artifact from the module build directory to the module output directory.
# hak.json
hak.json
========
The scripts section contains scripts used for lifecycle stages that need them (fetch, fetchDeps, build).
It also contains 'prune' and 'copy' which are globs of files to delete from the output module directory
and copy over from the module build directory to the output module directory, respectively.
# Shortcomings
Shortcomings
============
Hak doesn't know about dependencies between lifecycle stages, ie. it doesn't know that you need to
'fetch' and 'fetchDeps' before you can 'build', etc. You get to run each individually, and remember
the right order.
There is also a _lot_ of duplication in the command execution: we should abstract away
There is also a *lot* of duplication in the command execution: we should abstract away
some of the boilerplate required to run commands & so forth.

View File

@@ -14,14 +14,40 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from "path";
import { rimraf } from "rimraf";
import path from 'path';
import rimraf from 'rimraf';
import { DependencyInfo } from "./dep";
import HakEnv from "./hakEnv";
import { DependencyInfo } from './dep';
import HakEnv from './hakEnv';
export default async function clean(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
await rimraf(moduleInfo.moduleDotHakDir);
await rimraf(path.join(hakEnv.dotHakDir, "links", moduleInfo.name));
await rimraf(path.join(hakEnv.projectRoot, "node_modules", moduleInfo.name));
await new Promise<void>((resolve, reject) => {
rimraf(moduleInfo.moduleDotHakDir, (err: Error) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
await new Promise<void>((resolve, reject) => {
rimraf(path.join(hakEnv.dotHakDir, 'links', moduleInfo.name), (err: Error) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
await new Promise<void>((resolve, reject) => {
rimraf(path.join(hakEnv.projectRoot, 'node_modules', moduleInfo.name), (err: Error) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}

View File

@@ -14,15 +14,15 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from "path";
import fsProm from "fs/promises";
import childProcess from "child_process";
import { rimraf } from "rimraf";
import { glob } from "glob";
import { mkdirp } from "mkdirp";
import path from 'path';
import fsProm from 'fs/promises';
import childProcess from 'child_process';
import rimraf from 'rimraf';
import glob from 'glob';
import mkdirp from 'mkdirp';
import HakEnv from "./hakEnv";
import { DependencyInfo } from "./dep";
import HakEnv from './hakEnv';
import { DependencyInfo } from './dep';
export default async function copy(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
if (moduleInfo.cfg.prune) {
@@ -33,7 +33,11 @@ export default async function copy(hakEnv: HakEnv, moduleInfo: DependencyInfo):
try {
await mkdirp(moduleInfo.moduleOutDir);
process.chdir(moduleInfo.moduleOutDir);
await rimraf(moduleInfo.cfg.prune);
await new Promise<void>((resolve, reject) => {
rimraf(moduleInfo.cfg.prune, {}, err => {
err ? reject(err) : resolve();
});
});
} finally {
process.chdir(oldCwd);
}
@@ -43,35 +47,47 @@ export default async function copy(hakEnv: HakEnv, moduleInfo: DependencyInfo):
// If there are multiple moduleBuildDirs, singular moduleBuildDir
// is the same as moduleBuildDirs[0], so we're just listing the contents
// of the first one.
const files = await glob(moduleInfo.cfg.copy, {
cwd: moduleInfo.moduleBuildDir,
const files = await new Promise<string[]>((resolve, reject) => {
glob(moduleInfo.cfg.copy, {
nosort: true,
silent: true,
cwd: moduleInfo.moduleBuildDir,
}, (err, files) => {
err ? reject(err) : resolve(files);
});
});
if (moduleInfo.moduleBuildDirs.length > 1) {
if (!hakEnv.isMac()) {
console.error(
"You asked me to copy multiple targets but I've only been taught " + "how to do that on macOS.",
"You asked me to copy multiple targets but I've only been taught " +
"how to do that on macOS.",
);
throw new Error("Can't copy multiple targets on this platform");
}
for (const f of files) {
const components = moduleInfo.moduleBuildDirs.map((dir) => path.join(dir, f));
const components = moduleInfo.moduleBuildDirs.map(dir => path.join(dir, f));
const dst = path.join(moduleInfo.moduleOutDir, f);
await mkdirp(path.dirname(dst));
await new Promise<void>((resolve, reject) => {
childProcess.execFile("lipo", ["-create", "-output", dst, ...components], (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
childProcess.execFile('lipo',
['-create', '-output', dst, ...components], (err) => {
if (err) {
reject(err);
} else {
resolve();
}
},
);
});
}
} else {
console.log("Copying files from " + moduleInfo.moduleBuildDir + " to " + moduleInfo.moduleOutDir);
console.log(
"Copying files from " +
moduleInfo.moduleBuildDir + " to " + moduleInfo.moduleOutDir,
);
for (const f of files) {
console.log("\t" + f);
const src = path.join(moduleInfo.moduleBuildDir, f);

View File

@@ -28,5 +28,5 @@ export interface DependencyInfo {
moduleOutDir: string;
nodeModuleBinDir: string;
depPrefix: string;
scripts: Record<string, (hakEnv: HakEnv, moduleInfo: DependencyInfo) => Promise<void>>;
scripts: Record<string, (hakEnv: HakEnv, moduleInfo: DependencyInfo) => Promise<void> >;
}

View File

@@ -14,12 +14,12 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import fsProm from "fs/promises";
import childProcess from "child_process";
import pacote from "pacote";
import fsProm from 'fs/promises';
import childProcess from 'child_process';
import pacote from 'pacote';
import HakEnv from "./hakEnv";
import { DependencyInfo } from "./dep";
import HakEnv from './hakEnv';
import { DependencyInfo } from './dep';
export default async function fetch(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
let haveModuleBuildDir;
@@ -41,11 +41,15 @@ export default async function fetch(hakEnv: HakEnv, moduleInfo: DependencyInfo):
console.log("Running yarn install in " + moduleInfo.moduleBuildDir);
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(hakEnv.isWin() ? "yarn.cmd" : "yarn", ["install", "--ignore-scripts"], {
stdio: "inherit",
cwd: moduleInfo.moduleBuildDir,
});
proc.on("exit", (code) => {
const proc = childProcess.spawn(
hakEnv.isWin() ? 'yarn.cmd' : 'yarn',
['install', '--ignore-scripts'],
{
stdio: 'inherit',
cwd: moduleInfo.moduleBuildDir,
},
);
proc.on('exit', code => {
code ? reject(code) : resolve();
});
});

View File

@@ -14,10 +14,10 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import { mkdirp } from "mkdirp";
import mkdirp from 'mkdirp';
import { DependencyInfo } from "./dep";
import HakEnv from "./hakEnv";
import { DependencyInfo } from './dep';
import HakEnv from './hakEnv';
export default async function fetchDeps(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
await mkdirp(moduleInfo.moduleDotHakDir);

View File

@@ -14,16 +14,16 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from "path";
import os from "os";
import path from 'path';
import os from 'os';
import nodePreGypVersioning from "node-pre-gyp/lib/util/versioning";
import { getElectronVersion } from "app-builder-lib/out/electron/electronVersion";
import { Arch, Target, TARGETS, getHost, isHostId, TargetId } from "./target";
import { Arch, Target, TARGETS, getHost, isHostId, TargetId } from './target';
async function getRuntime(projectRoot: string): Promise<string> {
const electronVersion = await getElectronVersion(projectRoot);
return electronVersion ? "electron" : "node-webkit";
return electronVersion ? 'electron' : 'node-webkit';
}
async function getRuntimeVersion(projectRoot: string): Promise<string> {
@@ -37,32 +37,38 @@ async function getRuntimeVersion(projectRoot: string): Promise<string> {
export default class HakEnv {
public readonly target: Target;
public runtime?: string;
public runtimeVersion?: string;
public runtime: string;
public runtimeVersion: string;
public dotHakDir: string;
public constructor(public readonly projectRoot: string, targetId: TargetId | null) {
const target = targetId ? TARGETS[targetId] : getHost();
constructor(public readonly projectRoot: string, targetId: TargetId | null) {
if (targetId) {
this.target = TARGETS[targetId];
} else {
this.target = getHost();
}
if (!target) {
if (!this.target) {
throw new Error(`Unknown target ${targetId}!`);
}
this.target = target;
this.dotHakDir = path.join(this.projectRoot, ".hak");
this.dotHakDir = path.join(this.projectRoot, '.hak');
}
public async init(): Promise<void> {
public async init() {
this.runtime = await getRuntime(this.projectRoot);
this.runtimeVersion = await getRuntimeVersion(this.projectRoot);
}
public getRuntimeAbi(): string {
return nodePreGypVersioning.get_runtime_abi(this.runtime!, this.runtimeVersion!);
return nodePreGypVersioning.get_runtime_abi(
this.runtime,
this.runtimeVersion,
);
}
// {node_abi}-{platform}-{arch}
public getNodeTriple(): string {
return this.getRuntimeAbi() + "-" + this.target.platform + "-" + this.target.arch;
return this.getRuntimeAbi() + '-' + this.target.platform + '-' + this.target.arch;
}
public getTargetId(): TargetId {
@@ -70,19 +76,15 @@ export default class HakEnv {
}
public isWin(): boolean {
return this.target.platform === "win32";
return this.target.platform === 'win32';
}
public isMac(): boolean {
return this.target.platform === "darwin";
return this.target.platform === 'darwin';
}
public isLinux(): boolean {
return this.target.platform === "linux";
}
public isFreeBSD(): boolean {
return this.target.platform === "freebsd";
return this.target.platform === 'linux';
}
public getTargetArch(): Arch {
@@ -93,11 +95,11 @@ export default class HakEnv {
return isHostId(this.target.id);
}
public makeGypEnv(): Record<string, string | undefined> {
public makeGypEnv(): Record<string, string> {
return Object.assign({}, process.env, {
npm_config_arch: this.target.arch,
npm_config_target_arch: this.target.arch,
npm_config_disturl: "https://electronjs.org/headers",
npm_config_disturl: 'https://electronjs.org/headers',
npm_config_runtime: this.runtime,
npm_config_target: this.runtimeVersion,
npm_config_build_from_source: true,
@@ -105,7 +107,15 @@ export default class HakEnv {
});
}
public getNodeModuleBin(name: string): string {
return path.join(this.projectRoot, 'node_modules', '.bin', name);
}
public wantsStaticSqlCipherUnix(): boolean {
return this.isMac() || process.env.SQLCIPHER_STATIC == '1';
}
public wantsStaticSqlCipher(): boolean {
return !(this.isLinux() || this.isFreeBSD()) || process.env.SQLCIPHER_BUNDLED == "1";
return this.isWin() || this.wantsStaticSqlCipherUnix();
}
}

View File

@@ -14,29 +14,44 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from "path";
import findNpmPrefix from "find-npm-prefix";
import path from 'path';
import findNpmPrefix from 'find-npm-prefix';
import HakEnv from "./hakEnv";
import { TargetId } from "./target";
import { DependencyInfo } from "./dep";
import HakEnv from './hakEnv';
import { TargetId } from './target';
import { DependencyInfo } from './dep';
const GENERALCOMMANDS = ["target"];
const GENERALCOMMANDS = [
'target',
];
// These can only be run on specific modules
const MODULECOMMANDS = ["check", "fetch", "link", "build", "copy", "clean"];
const MODULECOMMANDS = [
'check',
'fetch',
'link',
'fetchDeps',
'build',
'copy',
'clean',
];
// Shortcuts for multiple commands at once (useful for building universal binaries
// because you can run the fetch/fetchDeps/build for each arch and then copy/link once)
const METACOMMANDS: Record<string, string[]> = {
fetchandbuild: ["check", "fetch", "build"],
copyandlink: ["copy", "link"],
const METACOMMANDS = {
'fetchandbuild': ['check', 'fetch', 'fetchDeps', 'build'],
'copyandlink': ['copy', 'link'],
};
// Scripts valid in a hak.json 'scripts' section
const HAKSCRIPTS = ["check", "fetch", "build"];
const HAKSCRIPTS = [
'check',
'fetch',
'fetchDeps',
'build',
];
async function main(): Promise<void> {
async function main() {
const prefix = await findNpmPrefix(process.cwd());
let packageJson;
try {
@@ -50,12 +65,11 @@ async function main(): Promise<void> {
// Apply `--target <target>` option if specified
// Can be specified multiple times for the copy command to bundle
// multiple archs into a single universal output module)
for (;;) {
// eslint-disable-line no-constant-condition
const targetIndex = process.argv.indexOf("--target");
while (true) { // eslint-disable-line no-constant-condition
const targetIndex = process.argv.indexOf('--target');
if (targetIndex === -1) break;
if (targetIndex + 1 >= process.argv.length) {
if ((targetIndex + 1) >= process.argv.length) {
console.error("--target option specified without a target");
process.exit(1);
}
@@ -63,7 +77,7 @@ async function main(): Promise<void> {
targetIds.push(process.argv.splice(targetIndex, 2)[1] as TargetId);
}
const hakEnvs = targetIds.map((tid) => new HakEnv(prefix, tid));
const hakEnvs = targetIds.map(tid => new HakEnv(prefix, tid));
if (hakEnvs.length == 0) hakEnvs.push(new HakEnv(prefix, null));
for (const h of hakEnvs) {
await h.init();
@@ -75,7 +89,7 @@ async function main(): Promise<void> {
const hakDepsCfg = packageJson.hakDependencies || {};
for (const dep of Object.keys(hakDepsCfg)) {
const hakJsonPath = path.join(prefix, "hak", dep, "hak.json");
const hakJsonPath = path.join(prefix, 'hak', dep, 'hak.json');
let hakJson: Record<string, any>;
try {
hakJson = await require(hakJsonPath);
@@ -88,20 +102,20 @@ async function main(): Promise<void> {
name: dep,
version: hakDepsCfg[dep],
cfg: hakJson,
moduleHakDir: path.join(prefix, "hak", dep),
moduleHakDir: path.join(prefix, 'hak', dep),
moduleDotHakDir: path.join(hakEnv.dotHakDir, dep),
moduleTargetDotHakDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId()),
moduleBuildDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), "build"),
moduleBuildDirs: hakEnvs.map((h) => path.join(h.dotHakDir, dep, h.getTargetId(), "build")),
moduleOutDir: path.join(hakEnv.dotHakDir, "hakModules", dep),
nodeModuleBinDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), "build", "node_modules", ".bin"),
depPrefix: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), "opt"),
moduleBuildDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), 'build'),
moduleBuildDirs: hakEnvs.map(h => path.join(h.dotHakDir, dep, h.getTargetId(), 'build')),
moduleOutDir: path.join(hakEnv.dotHakDir, 'hakModules', dep),
nodeModuleBinDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), 'build', 'node_modules', '.bin'),
depPrefix: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), 'opt'),
scripts: {},
};
for (const s of HAKSCRIPTS) {
if (hakJson.scripts && hakJson.scripts[s]) {
const scriptModule = await import(path.join(prefix, "hak", dep, hakJson.scripts[s]));
const scriptModule = await import(path.join(prefix, 'hak', dep, hakJson.scripts[s]));
if (scriptModule.__esModule) {
deps[dep].scripts[s] = scriptModule.default;
} else {
@@ -113,14 +127,14 @@ async function main(): Promise<void> {
let cmds: string[];
if (process.argv.length < 3) {
cmds = ["check", "fetch", "build", "copy", "link"];
cmds = ['check', 'fetch', 'fetchDeps', 'build', 'copy', 'link'];
} else if (METACOMMANDS[process.argv[2]]) {
cmds = METACOMMANDS[process.argv[2]];
} else {
cmds = [process.argv[2]];
}
if (hakEnvs.length > 1 && cmds.some((c) => !["copy", "link"].includes(c))) {
if (hakEnvs.length > 1 && cmds.some(c => !['copy', 'link'].includes(c))) {
// We allow link here too for convenience because it's completely arch independent
console.error("Multiple targets only supported with the copy command");
return;
@@ -131,7 +145,7 @@ async function main(): Promise<void> {
for (const cmd of cmds) {
if (GENERALCOMMANDS.includes(cmd)) {
if (cmd === "target") {
if (cmd === 'target') {
console.log(hakEnv.getNodeTriple());
}
return;
@@ -146,12 +160,15 @@ async function main(): Promise<void> {
process.exit(1);
}
const cmdFunc = (await import("./" + cmd)).default;
const cmdFunc = (await import('./' + cmd)).default;
for (const mod of modules) {
const depInfo = deps[mod];
if (depInfo === undefined) {
console.log("Module " + mod + " not found - is it in hakDependencies " + "in your package.json?");
console.log(
"Module " + mod + " not found - is it in hakDependencies " +
"in your package.json?",
);
process.exit(1);
}
console.log("hak " + cmd + ": " + mod);
@@ -160,7 +177,7 @@ async function main(): Promise<void> {
}
}
main().catch((err) => {
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -14,16 +14,16 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from "path";
import os from "os";
import fsProm from "fs/promises";
import childProcess from "child_process";
import path from 'path';
import os from 'os';
import fsProm from 'fs/promises';
import childProcess from 'child_process';
import HakEnv from "./hakEnv";
import { DependencyInfo } from "./dep";
import HakEnv from './hakEnv';
import { DependencyInfo } from './dep';
export default async function link(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const yarnrc = path.join(hakEnv.projectRoot, ".yarnrc");
const yarnrc = path.join(hakEnv.projectRoot, '.yarnrc');
// this is fairly terrible but it's reasonably clunky to either parse a yarnrc
// properly or get yarn to do it, so this will probably suffice for now.
// We just check to see if there is a local .yarnrc at all, and assume that
@@ -43,28 +43,28 @@ export default async function link(hakEnv: HakEnv, moduleInfo: DependencyInfo):
// (ie. Windows absolute paths) but strings in quotes get parsed as
// JSON so need to be valid JSON encoded strings (ie. have the
// backslashes escaped). JSON.stringify will add quotes and escape.
"--link-folder " + JSON.stringify(path.join(hakEnv.dotHakDir, "links")) + os.EOL,
'--link-folder ' + JSON.stringify(path.join(hakEnv.dotHakDir, 'links')) + os.EOL,
);
}
const yarnCmd = "yarn" + (hakEnv.isWin() ? ".cmd" : "");
const yarnCmd = 'yarn' + (hakEnv.isWin() ? '.cmd' : '');
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(yarnCmd, ["link"], {
const proc = childProcess.spawn(yarnCmd, ['link'], {
cwd: moduleInfo.moduleOutDir,
stdio: "inherit",
stdio: 'inherit',
});
proc.on("exit", (code) => {
proc.on('exit', code => {
code ? reject(code) : resolve();
});
});
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(yarnCmd, ["link", moduleInfo.name], {
const proc = childProcess.spawn(yarnCmd, ['link', moduleInfo.name], {
cwd: hakEnv.projectRoot,
stdio: "inherit",
stdio: 'inherit',
});
proc.on("exit", (code) => {
proc.on('exit', code => {
code ? reject(code) : resolve();
});
});

View File

@@ -20,33 +20,29 @@ import { GLIBC, MUSL, family as processLibC } from "detect-libc";
// details in a single string.
// See https://doc.rust-lang.org/rustc/platform-support.html.
export type TargetId =
| "aarch64-apple-darwin"
| "x86_64-apple-darwin"
| "universal-apple-darwin"
| "i686-pc-windows-msvc"
| "x86_64-pc-windows-msvc"
| "aarch64-pc-windows-msvc"
| "i686-unknown-freebsd"
| "x86_64-unknown-freebsd"
| "aarch64-unknown-freebsd"
| "i686-unknown-linux-musl"
| "i686-unknown-linux-gnu"
| "x86_64-unknown-linux-musl"
| "x86_64-unknown-linux-gnu"
| "aarch64-unknown-linux-musl"
| "aarch64-unknown-linux-gnu"
| "powerpc64le-unknown-linux-musl"
| "powerpc64le-unknown-linux-gnu";
'aarch64-apple-darwin' |
'x86_64-apple-darwin' |
'universal-apple-darwin' |
'i686-pc-windows-msvc' |
'x86_64-pc-windows-msvc' |
'i686-unknown-linux-musl' |
'i686-unknown-linux-gnu' |
'x86_64-unknown-linux-musl' |
'x86_64-unknown-linux-gnu' |
'aarch64-unknown-linux-musl' |
'aarch64-unknown-linux-gnu' |
'powerpc64le-unknown-linux-musl' |
'powerpc64le-unknown-linux-gnu';
// Values are expected to match those used in `process.platform`.
export type Platform = "darwin" | "freebsd" | "linux" | "win32";
export type Platform = 'darwin' | 'linux' | 'win32';
// Values are expected to match those used in `process.arch`.
export type Arch = "arm64" | "ia32" | "x64" | "ppc64" | "universal";
export type Arch = 'arm64' | 'ia32' | 'x64' | 'ppc64' | 'universal';
// Values are expected to match those used by Visual Studio's `vcvarsall.bat`.
// See https://docs.microsoft.com/cpp/build/building-on-the-command-line?view=msvc-160#vcvarsall-syntax
export type VcVarsArch = "amd64" | "arm64" | "x86";
export type VcVarsArch = 'amd64' | 'arm64' | 'x86';
export type Target = {
id: TargetId;
@@ -55,165 +51,140 @@ export type Target = {
};
export type WindowsTarget = Target & {
platform: "win32";
platform: 'win32';
vcVarsArch: VcVarsArch;
};
export type LinuxTarget = Target & {
platform: "linux";
platform: 'linux';
libC: typeof processLibC;
};
export type UniversalTarget = Target & {
arch: "universal";
arch: 'universal';
subtargets: Target[];
};
const aarch64AppleDarwin: Target = {
id: "aarch64-apple-darwin",
platform: "darwin",
arch: "arm64",
id: 'aarch64-apple-darwin',
platform: 'darwin',
arch: 'arm64',
};
const x8664AppleDarwin: Target = {
id: "x86_64-apple-darwin",
platform: "darwin",
arch: "x64",
id: 'x86_64-apple-darwin',
platform: 'darwin',
arch: 'x64',
};
const universalAppleDarwin: UniversalTarget = {
id: "universal-apple-darwin",
platform: "darwin",
arch: "universal",
subtargets: [aarch64AppleDarwin, x8664AppleDarwin],
id: 'universal-apple-darwin',
platform: 'darwin',
arch: 'universal',
subtargets: [
aarch64AppleDarwin,
x8664AppleDarwin,
],
};
const i686PcWindowsMsvc: WindowsTarget = {
id: "i686-pc-windows-msvc",
platform: "win32",
arch: "ia32",
vcVarsArch: "x86",
id: 'i686-pc-windows-msvc',
platform: 'win32',
arch: 'ia32',
vcVarsArch: 'x86',
};
const x8664PcWindowsMsvc: WindowsTarget = {
id: "x86_64-pc-windows-msvc",
platform: "win32",
arch: "x64",
vcVarsArch: "amd64",
};
const aarch64WindowsMsvc: WindowsTarget = {
id: "aarch64-pc-windows-msvc",
platform: "win32",
arch: "arm64",
vcVarsArch: "arm64",
};
const i686UnknownFreebsd: Target = {
id: "i686-unknown-freebsd",
platform: "freebsd",
arch: "ia32",
};
const x8664UnknownFreebsd: Target = {
id: "x86_64-unknown-freebsd",
platform: "freebsd",
arch: "x64",
};
const aarch64UnknownFreebsd: Target = {
id: "aarch64-unknown-freebsd",
platform: "freebsd",
arch: "arm64",
id: 'x86_64-pc-windows-msvc',
platform: 'win32',
arch: 'x64',
vcVarsArch: 'amd64',
};
const x8664UnknownLinuxGnu: LinuxTarget = {
id: "x86_64-unknown-linux-gnu",
platform: "linux",
arch: "x64",
id: 'x86_64-unknown-linux-gnu',
platform: 'linux',
arch: 'x64',
libC: GLIBC,
};
const x8664UnknownLinuxMusl: LinuxTarget = {
id: "x86_64-unknown-linux-musl",
platform: "linux",
arch: "x64",
id: 'x86_64-unknown-linux-musl',
platform: 'linux',
arch: 'x64',
libC: MUSL,
};
const i686UnknownLinuxGnu: LinuxTarget = {
id: "i686-unknown-linux-gnu",
platform: "linux",
arch: "ia32",
id: 'i686-unknown-linux-gnu',
platform: 'linux',
arch: 'ia32',
libC: GLIBC,
};
const i686UnknownLinuxMusl: LinuxTarget = {
id: "i686-unknown-linux-musl",
platform: "linux",
arch: "ia32",
id: 'i686-unknown-linux-musl',
platform: 'linux',
arch: 'ia32',
libC: MUSL,
};
const aarch64UnknownLinuxGnu: LinuxTarget = {
id: "aarch64-unknown-linux-gnu",
platform: "linux",
arch: "arm64",
id: 'aarch64-unknown-linux-gnu',
platform: 'linux',
arch: 'arm64',
libC: GLIBC,
};
const aarch64UnknownLinuxMusl: LinuxTarget = {
id: "aarch64-unknown-linux-musl",
platform: "linux",
arch: "arm64",
id: 'aarch64-unknown-linux-musl',
platform: 'linux',
arch: 'arm64',
libC: MUSL,
};
const powerpc64leUnknownLinuxGnu: LinuxTarget = {
id: "powerpc64le-unknown-linux-gnu",
platform: "linux",
arch: "ppc64",
id: 'powerpc64le-unknown-linux-gnu',
platform: 'linux',
arch: 'ppc64',
libC: GLIBC,
};
const powerpc64leUnknownLinuxMusl: LinuxTarget = {
id: "powerpc64le-unknown-linux-musl",
platform: "linux",
arch: "ppc64",
id: 'powerpc64le-unknown-linux-musl',
platform: 'linux',
arch: 'ppc64',
libC: MUSL,
};
export const TARGETS: Record<TargetId, Target> = {
// macOS
"aarch64-apple-darwin": aarch64AppleDarwin,
"x86_64-apple-darwin": x8664AppleDarwin,
"universal-apple-darwin": universalAppleDarwin,
'aarch64-apple-darwin': aarch64AppleDarwin,
'x86_64-apple-darwin': x8664AppleDarwin,
'universal-apple-darwin': universalAppleDarwin,
// Windows
"i686-pc-windows-msvc": i686PcWindowsMsvc,
"x86_64-pc-windows-msvc": x8664PcWindowsMsvc,
"aarch64-pc-windows-msvc": aarch64WindowsMsvc,
// FreeBSD
"i686-unknown-freebsd": i686UnknownFreebsd,
"x86_64-unknown-freebsd": x8664UnknownFreebsd,
"aarch64-unknown-freebsd": aarch64UnknownFreebsd,
'i686-pc-windows-msvc': i686PcWindowsMsvc,
'x86_64-pc-windows-msvc': x8664PcWindowsMsvc,
// Linux
"i686-unknown-linux-musl": i686UnknownLinuxMusl,
"i686-unknown-linux-gnu": i686UnknownLinuxGnu,
"x86_64-unknown-linux-musl": x8664UnknownLinuxMusl,
"x86_64-unknown-linux-gnu": x8664UnknownLinuxGnu,
"aarch64-unknown-linux-musl": aarch64UnknownLinuxMusl,
"aarch64-unknown-linux-gnu": aarch64UnknownLinuxGnu,
"powerpc64le-unknown-linux-musl": powerpc64leUnknownLinuxMusl,
"powerpc64le-unknown-linux-gnu": powerpc64leUnknownLinuxGnu,
'i686-unknown-linux-musl': i686UnknownLinuxMusl,
'i686-unknown-linux-gnu': i686UnknownLinuxGnu,
'x86_64-unknown-linux-musl': x8664UnknownLinuxMusl,
'x86_64-unknown-linux-gnu': x8664UnknownLinuxGnu,
'aarch64-unknown-linux-musl': aarch64UnknownLinuxMusl,
'aarch64-unknown-linux-gnu': aarch64UnknownLinuxGnu,
'powerpc64le-unknown-linux-musl': powerpc64leUnknownLinuxMusl,
'powerpc64le-unknown-linux-gnu': powerpc64leUnknownLinuxGnu,
};
export function getHost(): Target | undefined {
return Object.values(TARGETS).find(
(target) =>
target.platform === process.platform &&
target.arch === process.arch &&
(process.platform !== "linux" || (target as LinuxTarget).libC === processLibC),
);
return Object.values(TARGETS).find(target => (
target.platform === process.platform &&
target.arch === process.arch &&
(
process.platform !== 'linux' ||
(target as LinuxTarget).libC === processLibC
)
));
}
export function isHostId(id: TargetId): boolean {

18
scripts/hak/tsconfig.json Normal file
View File

@@ -0,0 +1,18 @@
{
"compilerOptions": {
"moduleResolution": "node",
"esModuleInterop": true,
"target": "es2017",
"module": "commonjs",
"sourceMap": false,
"lib": [
"es2019",
]
},
"include": [
"./**/*.ts"
],
"ts-node": {
"transpileOnly": true
}
}

32
scripts/mkrepo.sh Executable file
View File

@@ -0,0 +1,32 @@
#!/bin/sh
# Take the deb and bundle it into a apt repository
if [[ $# -lt 1 ]]
then
echo "Usage $0 <config dir>"
exit
fi
confdir=$1
set -ex
ver=`jq -r .version package.json`
distdir=$PWD/dist
confdir=$PWD/$confdir
repodir=`mktemp -d -t repo`
mkdir $repodir/conf
cp $confdir/conf_distributions $repodir/conf/distributions
pushd $repodir
for i in `cat conf/distributions | grep Codename | cut -d ' ' -f 2`
do
reprepro includedeb $i $distdir/element-desktop_${ver}_amd64.deb
done
tar cvzf $distdir/element-desktop_repo_$ver.tar.gz .
popd
rm -r $repodir

55
scripts/set-version.js Executable file
View File

@@ -0,0 +1,55 @@
#!/usr/bin/env node
/*
* Checks for the presence of a webapp, inspects its version and sets the
* version metadata of the package to match.
*/
const fs = require('fs').promises;
const asar = require('asar');
const childProcess = require('child_process');
async function versionFromAsar() {
try {
await fs.stat('webapp.asar');
} catch (e) {
console.log("No 'webapp.asar' found. Run 'yarn run fetch'");
return 1;
}
return asar.extractFile('webapp.asar', 'version').toString().trim();
}
async function setPackageVersion(ver) {
// set version in package.json: electron-builder will use this to populate
// all the various version fields
await new Promise((resolve, reject) => {
childProcess.execFile(process.platform === 'win32' ? 'yarn.cmd' : 'yarn', [
'version',
'-s',
'--no-git-tag-version', // This also means "don't commit to git" as it turns out
'--new-version',
ver,
], (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
async function main(args) {
let version = args[0];
if (version === undefined) version = await versionFromAsar();
await setPackageVersion(version);
}
if (require.main === module) {
main(process.argv.slice(2)).then((ret) => process.exit(ret));
}
module.exports = { versionFromAsar, setPackageVersion };

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env -S npx ts-node
/*
* Checks for the presence of a webapp, inspects its version and sets the
* version metadata of the package to match.
*/
import { promises as fs } from "fs";
import * as asar from "@electron/asar";
import * as childProcess from "child_process";
export async function versionFromAsar(): Promise<string> {
try {
await fs.stat("webapp.asar");
} catch (e) {
throw new Error("No 'webapp.asar' found. Run 'yarn run fetch'");
}
return asar.extractFile("webapp.asar", "version").toString().trim();
}
export async function setPackageVersion(ver: string): Promise<void> {
// set version in package.json: electron-builder will use this to populate
// all the various version fields
await new Promise<void>((resolve, reject) => {
childProcess.execFile(
process.platform === "win32" ? "yarn.cmd" : "yarn",
[
"version",
"-s",
"--no-git-tag-version", // This also means "don't commit to git" as it turns out
"--new-version",
ver,
],
(err) => {
if (err) {
reject(err);
} else {
resolve();
}
},
);
});
}
async function main(args: string[]): Promise<number> {
let version = args[0];
if (version === undefined) version = await versionFromAsar();
await setPackageVersion(version);
return 0;
}
if (require.main === module) {
main(process.argv.slice(2))
.then((ret) => {
process.exit(ret);
})
.catch((e) => {
console.error(e);
process.exit(1);
});
}

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"resolveJsonModule": true,
"moduleResolution": "node",
"esModuleInterop": true,
"target": "es2017",
"module": "commonjs",
"sourceMap": false,
"strict": true,
"lib": ["es2020", "dom"]
},
"include": ["../src/@types", "./**/*.ts"],
"ts-node": {
"transpileOnly": true
}
}

View File

@@ -1,6 +0,0 @@
import { GlobOptions } from "glob";
declare module "glob" {
// Workaround for @electron/asar importing IOptions instead of GlobOptions
export type IOptions = GlobOptions;
}

View File

@@ -20,26 +20,27 @@ import AutoLaunch from "auto-launch";
import { AppLocalization } from "../language-helper";
// global type extensions need to use var for whatever reason
/* eslint-disable no-var */
declare global {
var mainWindow: BrowserWindow | null;
var appQuitting: boolean;
var appLocalization: AppLocalization;
var launcher: AutoLaunch;
var vectorConfig: Record<string, any>;
var trayConfig: {
// eslint-disable-next-line camelcase
icon_path: string;
brand: string;
};
var store: Store<{
warnBeforeExit?: boolean;
minimizeToTray?: boolean;
spellCheckerEnabled?: boolean;
autoHideMenuBar?: boolean;
locale?: string | string[];
disableHardwareAcceleration?: boolean;
}>;
namespace NodeJS {
interface Global {
mainWindow: BrowserWindow;
appQuitting: boolean;
appLocalization: AppLocalization;
launcher: AutoLaunch;
vectorConfig: Record<string, any>;
trayConfig: {
// eslint-disable-next-line camelcase
icon_path: string;
brand: string;
};
store: Store<{
warnBeforeExit?: boolean;
minimizeToTray?: boolean;
spellCheckerEnabled?: boolean;
autoHideMenuBar?: boolean;
locale?: string | string[];
disableHardwareAcceleration?: boolean;
}>;
}
}
}
/* eslint-enable no-var */

View File

@@ -50,5 +50,5 @@ declare module "keytar" {
*
* @returns A promise for the array of found credentials.
*/
export function findCredentials(service: string): Promise<Array<{ account: string; password: string }>>;
export function findCredentials(service: string): Promise<Array<{ account: string, password: string}>>;
}

View File

@@ -86,7 +86,7 @@ declare module "matrix-seshat" {
}
export class Seshat {
public constructor(path: string, config?: IConfig);
constructor(path: string, config?: IConfig);
public addEvent(matrixEvent: IMatrixEvent, profile?: IMatrixProfile): void;
public deleteEvent(eventId: string): Promise<boolean>;
public commit(force?: boolean): Promise<number>;
@@ -132,7 +132,7 @@ declare module "matrix-seshat" {
}
export class SeshatRecovery {
public constructor(path: string, config?: IConfig);
constructor(path: string, config?: IConfig);
public info(): IRecoveryInfo;
public getUserVersion(): Promise<number>;
public shutdown(): Promise<void>;
@@ -140,6 +140,6 @@ declare module "matrix-seshat" {
}
export class ReindexError extends Error {
public constructor(message?: string);
constructor(message?: string);
}
}

View File

@@ -1,29 +0,0 @@
/*
Copyright 2023 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { Streams } from "electron";
type DisplayMediaCallback = (streams: Streams) => void;
let displayMediaCallback: DisplayMediaCallback | null;
export const getDisplayMediaCallback = (): DisplayMediaCallback | null => {
return displayMediaCallback;
};
export const setDisplayMediaCallback = (callback: DisplayMediaCallback | null): void => {
displayMediaCallback = callback;
};

View File

@@ -19,12 +19,18 @@ limitations under the License.
// Squirrel on windows starts the app with various flags as hooks to tell us when we've been installed/uninstalled etc.
import "./squirrelhooks";
import { app, BrowserWindow, Menu, autoUpdater, protocol, dialog, Input, Event, session } from "electron";
import * as Sentry from "@sentry/electron/main";
import {
app,
BrowserWindow,
Menu,
autoUpdater,
protocol,
dialog,
} from "electron";
import AutoLaunch from "auto-launch";
import path from "path";
import windowStateKeeper from "electron-window-state";
import Store from "electron-store";
import windowStateKeeper from 'electron-window-state';
import Store from 'electron-store';
import fs, { promises as afs } from "fs";
import { URL } from "url";
import minimist from "minimist";
@@ -34,18 +40,23 @@ import "./keytar";
import "./seshat";
import "./settings";
import * as tray from "./tray";
import { buildMenuTemplate } from "./vectormenu";
import webContentsHandler from "./webcontents-handler";
import * as updater from "./updater";
import { getProfileFromDeeplink, protocolInit } from "./protocol";
import { _t, AppLocalization } from "./language-helper";
import { setDisplayMediaCallback } from "./displayMediaCallback";
import { setupMacosTitleBar } from "./macos-titlebar";
import { buildMenuTemplate } from './vectormenu';
import webContentsHandler from './webcontents-handler';
import * as updater from './updater';
import { getProfileFromDeeplink, protocolInit } from './protocol';
import { _t, AppLocalization } from './language-helper';
import Input = Electron.Input;
const argv = minimist(process.argv, {
alias: { help: "h" },
});
// Things we need throughout the file but need to be created
// async to are initialised in setupGlobals()
let asarPath: string;
let resPath: string;
let iconPath: string;
if (argv["help"]) {
console.log("Options:");
console.log(" --profile-dir {path}: Path to where to store the profile.");
@@ -54,7 +65,8 @@ if (argv["help"]) {
console.log(" --no-update: Disable automatic updating.");
console.log(" --hidden: Start the application hidden in the system tray.");
console.log(" --help: Displays this help message.");
console.log("And more such as --proxy, see:" + "https://electronjs.org/docs/api/command-line-switches");
console.log("And more such as --proxy, see:" +
"https://electronjs.org/docs/api/command-line-switches");
app.exit();
}
@@ -62,7 +74,7 @@ if (argv["help"]) {
// as soon as the app path is set, so pick a random path in it that must exist if it's a
// real user data directory.
function isRealUserDataDir(d: string): boolean {
return fs.existsSync(path.join(d, "IndexedDB"));
return fs.existsSync(path.join(d, 'IndexedDB'));
}
// check if we are passed a profile in the SSO callback url
@@ -71,22 +83,22 @@ let userDataPath: string;
const userDataPathInProtocol = getProfileFromDeeplink(argv["_"]);
if (userDataPathInProtocol) {
userDataPath = userDataPathInProtocol;
} else if (argv["profile-dir"]) {
userDataPath = argv["profile-dir"];
} else if (argv['profile-dir']) {
userDataPath = argv['profile-dir'];
} else {
let newUserDataPath = app.getPath("userData");
if (argv["profile"]) {
newUserDataPath += "-" + argv["profile"];
let newUserDataPath = app.getPath('userData');
if (argv['profile']) {
newUserDataPath += '-' + argv['profile'];
}
const newUserDataPathExists = isRealUserDataDir(newUserDataPath);
let oldUserDataPath = path.join(app.getPath("appData"), app.getName().replace("Element", "Riot"));
if (argv["profile"]) {
oldUserDataPath += "-" + argv["profile"];
let oldUserDataPath = path.join(app.getPath('appData'), app.getName().replace('Element', 'Riot'));
if (argv['profile']) {
oldUserDataPath += '-' + argv['profile'];
}
const oldUserDataPathExists = isRealUserDataDir(oldUserDataPath);
console.log(newUserDataPath + " exists: " + (newUserDataPathExists ? "yes" : "no"));
console.log(oldUserDataPath + " exists: " + (oldUserDataPathExists ? "yes" : "no"));
console.log(newUserDataPath + " exists: " + (newUserDataPathExists ? 'yes' : 'no'));
console.log(oldUserDataPath + " exists: " + (oldUserDataPathExists ? 'yes' : 'no'));
if (!newUserDataPathExists && oldUserDataPathExists) {
console.log("Using legacy user data path: " + oldUserDataPath);
userDataPath = oldUserDataPath;
@@ -94,57 +106,52 @@ if (userDataPathInProtocol) {
userDataPath = newUserDataPath;
}
}
app.setPath("userData", userDataPath);
app.setPath('userData', userDataPath);
async function tryPaths(name: string, root: string, rawPaths: string[]): Promise<string> {
// Make everything relative to root
const paths = rawPaths.map((p) => path.join(root, p));
const paths = rawPaths.map(p => path.join(root, p));
for (const p of paths) {
try {
await afs.stat(p);
return p + "/";
} catch (e) {}
return p + '/';
} catch (e) {
}
}
console.log(`Couldn't find ${name} files in any of: `);
for (const p of paths) {
console.log("\t" + path.resolve(p));
console.log("\t"+path.resolve(p));
}
throw new Error(`Failed to find ${name} files`);
}
const homeserverProps = ["default_is_url", "default_hs_url", "default_server_name", "default_server_config"] as const;
// Find the webapp resources and set up things that require them
async function setupGlobals(): Promise<void> {
// find the webapp asar.
asarPath = await tryPaths("webapp", __dirname, [
// If run from the source checkout, this will be in the directory above
'../webapp.asar',
// but if run from a packaged application, electron-main.js will be in
// a different asar file so it will be two levels above
'../../webapp.asar',
// also try without the 'asar' suffix to allow symlinking in a directory
'../webapp',
// from a packaged application
'../../webapp',
]);
let asarPathPromise: Promise<string> | undefined;
// Get the webapp resource file path, memoizes result
function getAsarPath(): Promise<string> {
if (!asarPathPromise) {
asarPathPromise = tryPaths("webapp", __dirname, [
// If run from the source checkout, this will be in the directory above
"../webapp.asar",
// but if run from a packaged application, electron-main.js will be in
// a different asar file, so it will be two levels above
"../../webapp.asar",
// also try without the 'asar' suffix to allow symlinking in a directory
"../webapp",
// from a packaged application
"../../webapp",
]);
}
return asarPathPromise;
}
// Loads the config from asar, and applies a config.json from userData atop if one exists
// Writes config to `global.vectorConfig`. Does nothing if `global.vectorConfig` is already set.
async function loadConfig(): Promise<void> {
if (global.vectorConfig) return;
const asarPath = await getAsarPath();
// we assume the resources path is in the same place as the asar
resPath = await tryPaths("res", path.dirname(asarPath), [
// If run from the source checkout
'res',
// if run from packaged application
'',
]);
try {
// eslint-disable-next-line @typescript-eslint/no-var-requires
global.vectorConfig = require(asarPath + "config.json");
global.vectorConfig = require(asarPath + 'config.json');
} catch (e) {
// it would be nice to check the error code here and bail if the config
// is unparsable, but we get MODULE_NOT_FOUND in the case of a missing
@@ -156,19 +163,17 @@ async function loadConfig(): Promise<void> {
try {
// Load local config and use it to override values from the one baked with the build
// eslint-disable-next-line @typescript-eslint/no-var-requires
const localConfig = require(path.join(app.getPath("userData"), "config.json"));
const localConfig = require(path.join(app.getPath('userData'), 'config.json'));
// If the local config has a homeserver defined, don't use the homeserver from the build
// config. This is to avoid a problem where Riot thinks there are multiple homeservers
// defined, and panics as a result.
if (Object.keys(localConfig).find((k) => homeserverProps.includes(<any>k))) {
const homeserverProps = ['default_is_url', 'default_hs_url', 'default_server_name', 'default_server_config'];
if (Object.keys(localConfig).find(k => homeserverProps.includes(k))) {
// Rip out all the homeserver options from the vector config
global.vectorConfig = Object.keys(global.vectorConfig)
.filter((k) => !homeserverProps.includes(<any>k))
.reduce((obj, key) => {
obj[key] = global.vectorConfig[key];
return obj;
}, {} as Omit<Partial<(typeof global)["vectorConfig"]>, keyof typeof homeserverProps>);
.filter(k => !homeserverProps.includes(k))
.reduce((obj, key) => {obj[key] = global.vectorConfig[key]; return obj;}, {});
}
global.vectorConfig = Object.assign(global.vectorConfig, localConfig);
@@ -176,57 +181,28 @@ async function loadConfig(): Promise<void> {
if (e instanceof SyntaxError) {
dialog.showMessageBox({
type: "error",
title: `Your ${global.vectorConfig.brand || "Element"} is misconfigured`,
message:
`Your custom ${global.vectorConfig.brand || "Element"} configuration contains invalid JSON. ` +
`Please correct the problem and reopen ${global.vectorConfig.brand || "Element"}.`,
title: `Your ${global.vectorConfig.brand || 'Element'} is misconfigured`,
message: `Your custom ${global.vectorConfig.brand || 'Element'} configuration contains invalid JSON. ` +
`Please correct the problem and reopen ${global.vectorConfig.brand || 'Element'}.`,
detail: e.message || "",
});
}
// Could not load local config, this is expected in most cases.
}
}
// Configure Electron Sentry and crashReporter using sentry.dsn in config.json if one is present.
async function configureSentry(): Promise<void> {
await loadConfig();
const { dsn, environment } = global.vectorConfig.sentry || {};
if (dsn) {
console.log(`Enabling Sentry with dsn=${dsn} environment=${environment}`);
Sentry.init({
dsn,
environment,
// We don't actually use this IPC, but we do not want Sentry injecting preloads
ipcMode: Sentry.IPCMode.Classic,
});
}
}
// Set up globals for Tray and AutoLaunch
async function setupGlobals(): Promise<void> {
const asarPath = await getAsarPath();
await loadConfig();
// we assume the resources path is in the same place as the asar
const resPath = await tryPaths("res", path.dirname(asarPath), [
// If run from the source checkout
"res",
// if run from packaged application
"",
]);
// The tray icon
// It's important to call `path.join` so we don't end up with the packaged asar in the final path.
const iconFile = `element.${process.platform === "win32" ? "ico" : "png"}`;
const iconFile = `element.${process.platform === 'win32' ? 'ico' : 'png'}`;
iconPath = path.join(resPath, "img", iconFile);
global.trayConfig = {
icon_path: path.join(resPath, "img", iconFile),
brand: global.vectorConfig.brand || "Element",
icon_path: iconPath,
brand: global.vectorConfig.brand || 'Element',
};
// launcher
global.launcher = new AutoLaunch({
name: global.vectorConfig.brand || "Element",
name: global.vectorConfig.brand || 'Element',
isHidden: true,
mac: {
useLaunchAgent: true,
@@ -234,12 +210,12 @@ async function setupGlobals(): Promise<void> {
});
}
// Look for an auto-launcher under 'Riot' and if we find one,
// port its enabled/disabled-ness over to the new 'Element' launcher
async function moveAutoLauncher(): Promise<void> {
if (!global.vectorConfig.brand || global.vectorConfig.brand === "Element") {
// Look for an auto-launcher under 'Riot' and if we find one, port it's
// enabled/disabled-ness over to the new 'Element' launcher
if (!global.vectorConfig.brand || global.vectorConfig.brand === 'Element') {
const oldLauncher = new AutoLaunch({
name: "Riot",
name: 'Riot',
isHidden: true,
mac: {
useLaunchAgent: true,
@@ -258,31 +234,24 @@ global.store = new Store({ name: "electron-config" });
global.appQuitting = false;
const exitShortcuts: Array<(input: Input, platform: string) => boolean> = [
(input, platform): boolean => platform !== "darwin" && input.alt && input.key.toUpperCase() === "F4",
(input, platform): boolean => platform !== "darwin" && input.control && input.key.toUpperCase() === "Q",
(input, platform): boolean =>
platform === "darwin" && input.meta && !input.control && input.key.toUpperCase() === "Q",
(input, platform) => platform !== 'darwin' && input.alt && input.key.toUpperCase() === 'F4',
(input, platform) => platform !== 'darwin' && input.control && input.key.toUpperCase() === 'Q',
(input, platform) => platform === 'darwin' && input.meta && input.key.toUpperCase() === 'Q',
];
const warnBeforeExit = (event: Event, input: Input): void => {
const shouldWarnBeforeExit = global.store.get("warnBeforeExit", true);
const shouldWarnBeforeExit = global.store.get('warnBeforeExit', true);
const exitShortcutPressed =
input.type === "keyDown" && exitShortcuts.some((shortcutFn) => shortcutFn(input, process.platform));
input.type === 'keyDown' && exitShortcuts.some(shortcutFn => shortcutFn(input, process.platform));
if (shouldWarnBeforeExit && exitShortcutPressed && global.mainWindow) {
const shouldCancelCloseRequest =
dialog.showMessageBoxSync(global.mainWindow, {
type: "question",
buttons: [
_t("action|cancel"),
_t("action|close_brand", {
brand: global.vectorConfig.brand || "Element",
}),
],
message: _t("confirm_quit"),
defaultId: 1,
cancelId: 0,
}) === 0;
if (shouldWarnBeforeExit && exitShortcutPressed) {
const shouldCancelCloseRequest = dialog.showMessageBoxSync(global.mainWindow, {
type: "question",
buttons: [_t("Cancel"), _t("Close Element")],
message: _t("Are you sure you want to quit?"),
defaultId: 1,
cancelId: 0,
}) === 0;
if (shouldCancelCloseRequest) {
event.preventDefault();
@@ -290,26 +259,24 @@ const warnBeforeExit = (event: Event, input: Input): void => {
}
};
configureSentry();
// handle uncaught errors otherwise it displays
// stack traces in popup dialogs, which is terrible (which
// it will do any time the auto update poke fails, and there's
// no other way to catch this error).
// Assuming we generally run from the console when developing,
// this is far preferable.
process.on("uncaughtException", function (error: Error): void {
console.log("Unhandled exception", error);
process.on('uncaughtException', function(error: Error): void {
console.log('Unhandled exception', error);
});
app.commandLine.appendSwitch("--enable-usermedia-screen-capturing");
if (!app.commandLine.hasSwitch("enable-features")) {
app.commandLine.appendSwitch("enable-features", "WebRTCPipeWireCapturer");
app.commandLine.appendSwitch('--enable-usermedia-screen-capturing');
if (!app.commandLine.hasSwitch('enable-features')) {
app.commandLine.appendSwitch('enable-features', 'WebRTCPipeWireCapturer');
}
const gotLock = app.requestSingleInstanceLock();
if (!gotLock) {
console.log("Other instance detected: exiting");
console.log('Other instance detected: exiting');
app.exit();
}
@@ -321,16 +288,14 @@ protocolInit();
// work.
// Also mark it as secure (ie. accessing resources from this
// protocol and HTTPS won't trigger mixed content warnings).
protocol.registerSchemesAsPrivileged([
{
scheme: "vector",
privileges: {
standard: true,
secure: true,
supportFetchAPI: true,
},
protocol.registerSchemesAsPrivileged([{
scheme: 'vector',
privileges: {
standard: true,
secure: true,
supportFetchAPI: true,
},
]);
}]);
// Turn the sandbox on for *all* windows we might generate. Doing this means we don't
// have to specify a `sandbox: true` to each BrowserWindow.
@@ -344,19 +309,16 @@ protocol.registerSchemesAsPrivileged([
app.enableSandbox();
// We disable media controls here. We do this because calls use audio and video elements and they sometimes capture the media keys. See https://github.com/vector-im/element-web/issues/15704
app.commandLine.appendSwitch("disable-features", "HardwareMediaKeyHandling,MediaSessionService");
app.commandLine.appendSwitch('disable-features', 'HardwareMediaKeyHandling,MediaSessionService');
// Disable hardware acceleration if the setting has been set.
if (global.store.get("disableHardwareAcceleration", false) === true) {
if (global.store.get('disableHardwareAcceleration', false) === true) {
console.log("Disabling hardware acceleration.");
app.disableHardwareAcceleration();
}
app.on("ready", async () => {
let asarPath: string;
app.on('ready', async () => {
try {
asarPath = await getAsarPath();
await setupGlobals();
await moveAutoLauncher();
} catch (e) {
@@ -369,51 +331,51 @@ app.on("ready", async () => {
return;
}
if (argv["devtools"]) {
if (argv['devtools']) {
try {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { default: installExt, REACT_DEVELOPER_TOOLS, REACT_PERF } = require("electron-devtools-installer");
const { default: installExt, REACT_DEVELOPER_TOOLS, REACT_PERF } = require('electron-devtools-installer');
installExt(REACT_DEVELOPER_TOOLS)
.then((name: string) => console.log(`Added Extension: ${name}`))
.catch((err: unknown) => console.log("An error occurred: ", err));
.then((name) => console.log(`Added Extension: ${name}`))
.catch((err) => console.log('An error occurred: ', err));
installExt(REACT_PERF)
.then((name: string) => console.log(`Added Extension: ${name}`))
.catch((err: unknown) => console.log("An error occurred: ", err));
.then((name) => console.log(`Added Extension: ${name}`))
.catch((err) => console.log('An error occurred: ', err));
} catch (e) {
console.log(e);
}
}
protocol.registerFileProtocol("vector", (request, callback) => {
if (request.method !== "GET") {
protocol.registerFileProtocol('vector', (request, callback) => {
if (request.method !== 'GET') {
callback({ error: -322 }); // METHOD_NOT_SUPPORTED from chromium/src/net/base/net_error_list.h
return null;
}
const parsedUrl = new URL(request.url);
if (parsedUrl.protocol !== "vector:") {
if (parsedUrl.protocol !== 'vector:') {
callback({ error: -302 }); // UNKNOWN_URL_SCHEME
return;
}
if (parsedUrl.host !== "vector") {
if (parsedUrl.host !== 'vector') {
callback({ error: -105 }); // NAME_NOT_RESOLVED
return;
}
const target = parsedUrl.pathname.split("/");
const target = parsedUrl.pathname.split('/');
// path starts with a '/'
if (target[0] !== "") {
if (target[0] !== '') {
callback({ error: -6 }); // FILE_NOT_FOUND
return;
}
if (target[target.length - 1] == "") {
target[target.length - 1] = "index.html";
if (target[target.length - 1] == '') {
target[target.length - 1] = 'index.html';
}
let baseDir: string;
if (target[1] === "webapp") {
if (target[1] === 'webapp') {
baseDir = asarPath;
} else {
callback({ error: -6 }); // FILE_NOT_FOUND
@@ -425,7 +387,7 @@ app.on("ready", async () => {
baseDir = path.normalize(baseDir);
const relTarget = path.normalize(path.join(...target.slice(2)));
if (relTarget.startsWith("..")) {
if (relTarget.startsWith('..')) {
callback({ error: -6 }); // FILE_NOT_FOUND
return;
}
@@ -436,13 +398,13 @@ app.on("ready", async () => {
});
});
if (argv["no-update"]) {
if (argv['no-update']) {
console.log('Auto update disabled via command line flag "--no-update"');
} else if (global.vectorConfig["update_base_url"]) {
console.log(`Starting auto update with base URL: ${global.vectorConfig["update_base_url"]}`);
updater.start(global.vectorConfig["update_base_url"]);
} else if (global.vectorConfig['update_base_url']) {
console.log(`Starting auto update with base URL: ${global.vectorConfig['update_base_url']}`);
updater.start(global.vectorConfig['update_base_url']);
} else {
console.log("No update_base_url is defined: auto update is disabled");
console.log('No update_base_url is defined: auto update is disabled');
}
// Load the previous window state with fallback to defaults
@@ -454,14 +416,11 @@ app.on("ready", async () => {
const preloadScript = path.normalize(`${__dirname}/preload.js`);
global.mainWindow = new BrowserWindow({
// https://www.electronjs.org/docs/faq#the-font-looks-blurry-what-is-this-and-what-can-i-do
backgroundColor: "#fff",
backgroundColor: '#fff',
titleBarStyle: process.platform === "darwin" ? "hidden" : "default",
trafficLightPosition: { x: 9, y: 8 },
icon: global.trayConfig.icon_path,
icon: iconPath,
show: false,
autoHideMenuBar: global.store.get("autoHideMenuBar", true),
autoHideMenuBar: global.store.get('autoHideMenuBar', true),
x: mainWindowState.x,
y: mainWindowState.y,
@@ -475,24 +434,19 @@ app.on("ready", async () => {
webgl: true,
},
});
global.mainWindow.loadURL("vector://vector/webapp/");
if (process.platform === "darwin") {
setupMacosTitleBar(global.mainWindow);
}
global.mainWindow.loadURL('vector://vector/webapp/');
// Handle spellchecker
// For some reason spellCheckerEnabled isn't persisted, so we have to use the store here
global.mainWindow.webContents.session.setSpellCheckerEnabled(global.store.get("spellCheckerEnabled", true));
// Create trayIcon icon
if (global.store.get("minimizeToTray", true)) tray.create(global.trayConfig);
if (global.store.get('minimizeToTray', true)) tray.create(global.trayConfig);
global.mainWindow.once("ready-to-show", () => {
if (!global.mainWindow) return;
global.mainWindow.once('ready-to-show', () => {
mainWindowState.manage(global.mainWindow);
if (!argv["hidden"]) {
if (!argv['hidden']) {
global.mainWindow.show();
} else {
// hide here explicitly because window manage above sometimes shows it
@@ -500,37 +454,37 @@ app.on("ready", async () => {
}
});
global.mainWindow.webContents.on("before-input-event", warnBeforeExit);
global.mainWindow.webContents.on('before-input-event', warnBeforeExit);
global.mainWindow.on("closed", () => {
global.mainWindow.on('closed', () => {
global.mainWindow = null;
});
global.mainWindow.on("close", async (e) => {
global.mainWindow.on('close', async (e) => {
// If we are not quitting and have a tray icon then minimize to tray
if (!global.appQuitting && (tray.hasTray() || process.platform === "darwin")) {
if (!global.appQuitting && (tray.hasTray() || process.platform === 'darwin')) {
// On Mac, closing the window just hides it
// (this is generally how single-window Mac apps
// behave, eg. Mail.app)
e.preventDefault();
if (global.mainWindow?.isFullScreen()) {
global.mainWindow.once("leave-full-screen", () => global.mainWindow?.hide());
if (global.mainWindow.isFullScreen()) {
global.mainWindow.once('leave-full-screen', () => global.mainWindow.hide());
global.mainWindow.setFullScreen(false);
} else {
global.mainWindow?.hide();
global.mainWindow.hide();
}
return false;
}
});
if (process.platform === "win32") {
if (process.platform === 'win32') {
// Handle forward/backward mouse buttons in Windows
global.mainWindow.on("app-command", (e, cmd) => {
if (cmd === "browser-backward" && global.mainWindow?.webContents.canGoBack()) {
global.mainWindow.on('app-command', (e, cmd) => {
if (cmd === 'browser-backward' && global.mainWindow.webContents.canGoBack()) {
global.mainWindow.webContents.goBack();
} else if (cmd === "browser-forward" && global.mainWindow?.webContents.canGoForward()) {
} else if (cmd === 'browser-forward' && global.mainWindow.webContents.canGoForward()) {
global.mainWindow.webContents.goForward();
}
});
@@ -540,34 +494,32 @@ app.on("ready", async () => {
global.appLocalization = new AppLocalization({
store: global.store,
components: [(): void => tray.initApplicationMenu(), (): void => Menu.setApplicationMenu(buildMenuTemplate())],
});
session.defaultSession.setDisplayMediaRequestHandler((_, callback) => {
global.mainWindow?.webContents.send("openDesktopCapturerSourcePicker");
setDisplayMediaCallback(callback);
components: [
() => tray.initApplicationMenu(),
() => Menu.setApplicationMenu(buildMenuTemplate()),
],
});
});
app.on("window-all-closed", () => {
app.on('window-all-closed', () => {
app.quit();
});
app.on("activate", () => {
global.mainWindow?.show();
app.on('activate', () => {
global.mainWindow.show();
});
function beforeQuit(): void {
global.appQuitting = true;
global.mainWindow?.webContents.send("before-quit");
global.mainWindow?.webContents.send('before-quit');
}
app.on("before-quit", beforeQuit);
autoUpdater.on("before-quit-for-update", beforeQuit);
app.on('before-quit', beforeQuit);
autoUpdater.on('before-quit-for-update', beforeQuit);
app.on("second-instance", (ev, commandLine, workingDirectory) => {
app.on('second-instance', (ev, commandLine, workingDirectory) => {
// If other instance launched with --hidden then skip showing window
if (commandLine.includes("--hidden")) return;
if (commandLine.includes('--hidden')) return;
// Someone tried to run a second instance, we should focus our window.
if (global.mainWindow) {
@@ -581,4 +533,4 @@ app.on("second-instance", (ev, commandLine, workingDirectory) => {
// installer uses for the shortcut icon.
// This makes notifications work on windows 8.1 (and is
// a noop on other platforms).
app.setAppUserModelId("com.squirrel.element-desktop.Element");
app.setAppUserModelId('com.squirrel.element-desktop.Element');

46
src/i18n/strings/ar.json Normal file
View File

@@ -0,0 +1,46 @@
{
"File": "ملف",
"Close": "أغلِق",
"Actual Size": "المقاس الفعلي",
"View": "منظور",
"Select All": "حدّد الكل",
"Delete": "احذف",
"Copy": "انسخ",
"Edit": "تحرير",
"Close Element": "أغلِق Element",
"Cancel": "ألغِ",
"Bring All to Front": "ضَع الكل في المقدّمة",
"Speech": "نطق",
"Add to dictionary": "أضِف إلى القاموس",
"The image failed to save": "فشل حفظ الصورة",
"Failed to save image": "فشل حفظ الصورة",
"Save image as...": "احفظ الصورة كَ‍...",
"Copy link address": "انسخ عنوان الرابط",
"Copy email address": "انسخ عنوان البريد الإلكتروني",
"Copy image": "انسخ الصورة",
"Zoom": "تقريب",
"Stop Speaking": "أوقِف النطق",
"Start Speaking": "ابدأ النطق",
"Unhide": "اعرض",
"Hide Others": "أخفِ البقية",
"Hide": "أخفِ",
"Services": "الخدمات",
"About": "عن",
"Element Help": "مساعدة Element",
"Help": "مساعدة",
"Minimize": "صغّر",
"Window": "نافذة",
"Toggle Developer Tools": "فعّل/عطّل أدوات المطوّرين",
"Toggle Full Screen": "فعّل/عطّل ملء الشاشة",
"Preferences": "التفضيلات",
"Zoom In": "قرّب",
"Zoom Out": "بعّد",
"Paste and Match Style": "ألصِق وطابِق النمط",
"Paste": "ألصِق",
"Cut": "قصّ",
"Redo": "أعِد",
"Undo": "تراجَع",
"Quit": "غادِر",
"Show/Hide": "اعرض/أخفِ",
"Are you sure you want to quit?": "أمتأكّد من الإغلاق؟"
}

View File

@@ -0,0 +1 @@
{}

46
src/i18n/strings/be.json Normal file
View File

@@ -0,0 +1,46 @@
{
"Add to dictionary": "Дадаць у слоўнік",
"The image failed to save": "Не атрымалася захаваць малюнак",
"Failed to save image": "Не атрымалася захаваць малюнак",
"Save image as...": "Захаваць малюнак як...",
"Copy link address": "Скапіраваць спасылку",
"Copy email address": "Скапіраваць адрас пошты",
"Copy image": "Скапіраваць малюнак",
"File": "Файл",
"Bring All to Front": "Вынесці ўсё наперад",
"Zoom": "Маштаб",
"Stop Speaking": "Перастаць гаварыць",
"Start Speaking": "Гаварыць",
"Speech": "Голас",
"Unhide": "Паказаць",
"Hide Others": "Схаваць іншыя",
"Hide": "Схаваць",
"Services": "Сервісы",
"About": "Аб праграме",
"Element Help": "Даведка Element",
"Help": "Даведка",
"Close": "Зачыніць",
"Minimize": "Згарнуць",
"Window": "Акно",
"Toggle Developer Tools": "Пераключэнне інструментаў распрацоўніка",
"Toggle Full Screen": "Пераключэнне на ўвесь экран",
"Preferences": "Параметры",
"Zoom Out": "Паменшыць",
"Zoom In": "Павялічыць",
"Actual Size": "Фактычны Памер",
"View": "Прагляд",
"Select All": "Выбраць усё",
"Delete": "Выдаліць",
"Paste and Match Style": "Уставіць і супаставіць стыль",
"Paste": "Уставіць",
"Copy": "Капіяваць",
"Cut": "Выразаць",
"Redo": "Паўтарыць",
"Undo": "Адмяніць",
"Edit": "Змяніць",
"Quit": "Выйсці",
"Show/Hide": "Паказаць / схаваць",
"Are you sure you want to quit?": "Вы ўпэўненыя, што хочаце выйсці?",
"Close Element": "Зачыніць Element",
"Cancel": "Адмена"
}

47
src/i18n/strings/bg.json Normal file
View File

@@ -0,0 +1,47 @@
{
"Add to dictionary": "Добави към речника",
"The image failed to save": "Изображението не успя да се запази",
"Failed to save image": "Неуспешно запазване на изображението",
"Save image as...": "Запази изображението като...",
"Copy link address": "Копирай линка",
"Copy image address": "Копирай адреса на изображението",
"Copy email address": "Копирай имейл адрес",
"Copy image": "Копирай изображение",
"File": "Файл",
"Bring All to Front": "Покажи всички най-отгоре",
"Zoom": "Мащабирай",
"Stop Speaking": "Спри да говориш",
"Start Speaking": "Започни да говориш",
"Speech": "Говор",
"Unhide": "Покажи",
"Hide Others": "Скрий Останалите",
"Hide": "Скрий",
"Services": "Услуги",
"About": "Относно",
"Element Help": "Помощ за Елемент",
"Help": "Помощ",
"Close": "Затвори",
"Minimize": "Минимизирай",
"Window": "Прозорец",
"Toggle Developer Tools": "Превключи инструментите за разработчици",
"Toggle Full Screen": "Превключи на Цял екран",
"Preferences": "Предпочитания",
"Zoom Out": "Намали",
"Zoom In": "Увеличи",
"Actual Size": "Действителен Размер",
"View": "Преглед",
"Select All": "Избери Всичко",
"Delete": "Изтрий",
"Paste and Match Style": "Постави и Използвай текущия стил",
"Paste": "Постави",
"Copy": "Копирай",
"Cut": "Изрежи",
"Redo": "Върни",
"Undo": "Отмени",
"Edit": "Редактирай",
"Quit": "Напусни",
"Show/Hide": "Покажи/Скрий",
"Are you sure you want to quit?": "Сигурен ли си че искаш да напуснеш?",
"Close Element": "Затвори Елемент",
"Cancel": "Отказ"
}

47
src/i18n/strings/bn.json Normal file
View File

@@ -0,0 +1,47 @@
{
"Are you sure you want to quit?": "তুমি কি আসলেই বের হতে চাও?",
"Close Element": "এলিমেন্ট বন্ধ করো",
"Cancel": "বাতিল",
"Save image as...": "ছবি সংরক্ষণের ধরন...",
"Failed to save image": "ছবি সংরক্ষণ ব্যর্থ",
"The image failed to save": "ছবি সংরক্ষণ ব্যর্থ",
"Add to dictionary": "অভিধানে যোগ করি",
"Copy link address": "সংযোগের ঠিকানা অনুলিপি করো",
"Copy image address": "ছবির ঠিকানা অনুলিপি করো",
"Copy email address": "ইমেইল ঠিকানা অনুলিপি করো",
"Copy image": "ছবি অনুলিপি করো",
"File": "নথি",
"Bring All to Front": "সবকিছু সামনে আনো",
"Zoom": "বড় করা",
"Stop Speaking": "কথা বন্ধ করো",
"Start Speaking": "কথা শুরু করো",
"Speech": "বাচন",
"Unhide": "দেখাও",
"Hide Others": "অন্যগুলো লুকাও",
"Hide": "লুকাও",
"Services": "সেবা",
"About": "আমাদের সম্পর্কে",
"Element Help": "এলিমেন্ট সাহায্য",
"Help": "সাহায্য",
"Close": "বন্ধ",
"Minimize": "সংকোচন",
"Window": "জানালা",
"Toggle Developer Tools": "ডেভেলপার সরঞ্জামাদি",
"Toggle Full Screen": "পূর্ণ পর্দা করো/বের হও",
"Preferences": "পছন্দসমূহ",
"Zoom Out": "ছোট করো",
"Zoom In": "বড়ো করো",
"Actual Size": "আসল আকার",
"View": "দেখো",
"Select All": "সব নির্বাচন",
"Delete": "অপসারণ",
"Paste and Match Style": "লেপন ও একই ধরনে",
"Paste": "লেপন",
"Copy": "অনুলিপি",
"Cut": "কাটো",
"Redo": "পুন",
"Undo": "ফিরত",
"Edit": "সম্পাদনা",
"Quit": "প্রস্থান",
"Show/Hide": "দেখাও/লুকাও"
}

46
src/i18n/strings/ca.json Normal file
View File

@@ -0,0 +1,46 @@
{
"Add to dictionary": "Afegeix al diccionari",
"The image failed to save": "S'ha fallat en desar la imatge",
"Failed to save image": "S'ha fallat en desar la imatge",
"Save image as...": "Anomena i desa la imatge...",
"Copy link address": "Copia l'adreça de l'enllaç",
"Copy email address": "Copia l'adreça de correu electrònic",
"Copy image": "Copia la imatge",
"File": "Fitxer",
"Bring All to Front": "Porta-ho tot al davant",
"Zoom": "Escala",
"Stop Speaking": "Para la veu",
"Start Speaking": "Comença la veu",
"Speech": "Veu",
"Unhide": "Deixa d'amagar",
"Hide Others": "Amaga les altres",
"Hide": "Amaga",
"Services": "Serveis",
"About": "Quant a",
"Element Help": "Ajuda sobre l'Element",
"Help": "Ajuda",
"Close": "Tanca",
"Minimize": "Minimitza",
"Window": "Finestra",
"Toggle Developer Tools": "Commuta les eines per a desenvolupadors",
"Toggle Full Screen": "Commuta la pantalla completa",
"Preferences": "Preferències",
"Zoom Out": "Allunya",
"Zoom In": "Apropia",
"Actual Size": "Mida real",
"View": "Visualitza",
"Select All": "Selecciona-ho tot",
"Delete": "Suprimeix",
"Paste and Match Style": "Enganxa i fes coincidir l'estil",
"Paste": "Enganxa",
"Copy": "Copia",
"Cut": "Retalla",
"Redo": "Refés",
"Undo": "Desfés",
"Edit": "Edita",
"Quit": "Surt",
"Show/Hide": "Mostra/Amaga",
"Are you sure you want to quit?": "Esteu segur que voleu sortir?",
"Close Element": "Tanca l'Element",
"Cancel": "Cancel·la"
}

View File

@@ -1,63 +0,0 @@
{
"action": {
"cancel": "Storno",
"close": "Zavřít",
"close_brand": "Zavřít %(brand)s",
"copy": "Zkopírovat",
"cut": "Vyjmout",
"delete": "Smazat",
"edit": "Upravit",
"minimise": "Minimalizovat",
"paste": "Vložit",
"paste_match_style": "Vložit a přizpůsobit styl",
"quit": "Ukončit",
"redo": "Znovu",
"select_all": "Vybrat vše",
"show_hide": "Zobrazit/Skrýt",
"undo": "Zpět",
"zoom_in": "Přiblížit",
"zoom_out": "Oddálit"
},
"common": {
"about": "O",
"brand_help": "%(brand)s nápověda",
"help": "Nápověda",
"preferences": "Předvolby"
},
"confirm_quit": "Opravdu chcete ukončit aplikaci?",
"edit_menu": {
"speech": "Řeč",
"speech_start_speaking": "Spustit nahrávání hlasu",
"speech_stop_speaking": "Zastavit nahrávání hlasu"
},
"file_menu": {
"label": "Soubor"
},
"menu": {
"hide": "Skrýt",
"hide_others": "Skrýt ostatní",
"services": "Služby",
"unhide": "Zrušit skrytí"
},
"right_click_menu": {
"add_to_dictionary": "Přidat do slovníku",
"copy_email": "Kopírovat e-mailovou adresu",
"copy_image": "Kopírovat obrázek",
"copy_image_url": "Kopírovat adresu obrázku",
"copy_link_url": "Kopírovat adresu odkazu",
"save_image_as": "Uložit obrázek jako...",
"save_image_as_error_description": "Obrázek se nepodařilo uložit",
"save_image_as_error_title": "Chyba při ukládání obrázku"
},
"view_menu": {
"actual_size": "Aktuální velikost",
"toggle_developer_tools": "Přepnout zobrazení nástrojů pro vývojáře",
"toggle_full_screen": "Přepnout zobrazení celé obrazovky",
"view": "Zobrazit"
},
"window_menu": {
"bring_all_to_front": "Přenést vše do popředí",
"label": "Okno",
"zoom": "Lupa"
}
}

47
src/i18n/strings/de.json Normal file
View File

@@ -0,0 +1,47 @@
{
"Speech": "Sprache",
"Paste and Match Style": "Einfügen und Formatierung beibehalten",
"Stop Speaking": "Aufnahme beenden",
"Start Speaking": "Aufnahme starten",
"Services": "Dienste",
"Are you sure you want to quit?": "Wirklich beenden?",
"Add to dictionary": "Wörterbuch hinzufügen",
"The image failed to save": "Das Bild konnte nicht gespeichert werden",
"Failed to save image": "Bild kann nicht gespeichert werden",
"Save image as...": "Bild speichern unter...",
"Copy link address": "Link-Adresse kopieren",
"Copy email address": "Email-Adresse kopieren",
"Copy image": "Bild kopieren",
"File": "Datei",
"Bring All to Front": "Alles in den Vordergrund",
"Zoom": "Zoom",
"Unhide": "Wieder anzeigen",
"Hide Others": "Andere verstecken",
"Hide": "Verstecken",
"About": "Über",
"Element Help": "Hilfe zu Element",
"Help": "Hilfe",
"Close": "Schließen",
"Minimize": "Minimieren",
"Window": "Fenster",
"Toggle Developer Tools": "Developer-Tools an/aus",
"Toggle Full Screen": "Vollbildschirm an/aus",
"Preferences": "Einstellungen",
"Zoom Out": "Verkleinern",
"Zoom In": "Vergrößern",
"Actual Size": "Tatsächliche Größe",
"View": "Ansicht",
"Select All": "Alles auswählen",
"Delete": "Löschen",
"Paste": "Einfügen",
"Copy": "Kopieren",
"Cut": "Ausschneiden",
"Redo": "Wiederherstellen",
"Undo": "Rückgängig",
"Edit": "Bearbeiten",
"Quit": "Beenden",
"Show/Hide": "Anzeigen/Ausblenden",
"Close Element": "Element schließen",
"Cancel": "Abbrechen",
"Copy image address": "Bild-Adresse kopieren"
}

View File

@@ -1,63 +0,0 @@
{
"action": {
"cancel": "Abbrechen",
"close": "Schließen",
"close_brand": "%(brand)s schließen",
"copy": "Kopieren",
"cut": "Ausschneiden",
"delete": "Löschen",
"edit": "Bearbeiten",
"minimise": "Minimieren",
"paste": "Einfügen",
"paste_match_style": "Einfügen und Formatierung beibehalten",
"quit": "Beenden",
"redo": "Wiederherstellen",
"select_all": "Alles auswählen",
"show_hide": "Anzeigen/Ausblenden",
"undo": "Rückgängig",
"zoom_in": "Vergrößern",
"zoom_out": "Verkleinern"
},
"common": {
"about": "Über",
"brand_help": "%(brand)s Hilfe",
"help": "Hilfe",
"preferences": "Einstellungen"
},
"confirm_quit": "Wirklich beenden?",
"edit_menu": {
"speech": "Sprache",
"speech_start_speaking": "Aufnahme starten",
"speech_stop_speaking": "Aufnahme beenden"
},
"file_menu": {
"label": "Datei"
},
"menu": {
"hide": "Verstecken",
"hide_others": "Andere verstecken",
"services": "Dienste",
"unhide": "Wieder anzeigen"
},
"right_click_menu": {
"add_to_dictionary": "Wörterbuch hinzufügen",
"copy_email": "Email-Adresse kopieren",
"copy_image": "Bild kopieren",
"copy_image_url": "Bild-Adresse kopieren",
"copy_link_url": "Link-Adresse kopieren",
"save_image_as": "Bild speichern unter...",
"save_image_as_error_description": "Das Bild konnte nicht gespeichert werden",
"save_image_as_error_title": "Bild kann nicht gespeichert werden"
},
"view_menu": {
"actual_size": "Tatsächliche Größe",
"toggle_developer_tools": "Developer-Tools an/aus",
"toggle_full_screen": "Vollbildschirm an/aus",
"view": "Ansicht"
},
"window_menu": {
"bring_all_to_front": "Alles in den Vordergrund",
"label": "Fenster",
"zoom": "Zoomen"
}
}

View File

@@ -1,63 +1,47 @@
{
"action": {
"cancel": "Ακύρωση",
"close": "Κλείσιμο",
"close_brand": "Κλείσιμο %(brand)s",
"copy": "Αντιγραφή",
"cut": "Αποκοπή",
"delete": "Διαγραφή",
"edit": "Επεξεργασία",
"minimise": "Ελαχιστοποίηση",
"paste": "Επικόλληση",
"paste_match_style": "Επικόλληση και Ταίριασμα Στυλ",
"quit": "Κλείσιμο",
"redo": "Επανάληψη",
"select_all": "Επιλογή Όλων",
"show_hide": "Eμφάνιση/Απόκρυψη",
"undo": "Αναίρεση",
"zoom_in": "Μεγέθυνση",
"zoom_out": "Σμίκρυνση"
},
"common": {
"about": "Σχετικά με",
"brand_help": "%(brand)s Υποστήριξη",
"help": "Βοήθεια",
"preferences": "Προτιμήσεις"
},
"confirm_quit": "Είστε βέβαιος ότι θέλετε να εγκαταλείψετε;",
"edit_menu": {
"speech": "Ομιλία",
"speech_start_speaking": "Ξεκινήστε να μιλάτε",
"speech_stop_speaking": "Τερματίστε να μιλάτε"
},
"file_menu": {
"label": "Αρχείο"
},
"menu": {
"hide": "Απόκρυψη",
"hide_others": "Απόκρυψη Άλλων",
"services": "Υπηρεσίες",
"unhide": "Εμφάνιση"
},
"right_click_menu": {
"add_to_dictionary": "Προσθήκη στο λεξικό",
"copy_email": "Αντιγραφή διεύθυνσης email",
"copy_image": "Αντιγραφή εικόνας",
"copy_image_url": "Αντιγραφή διεύθυνσης εικόνας",
"copy_link_url": "Αντιγραφή διεύθυνσης συνδέσμου",
"save_image_as": "Αποθήκευση εικόνας ως...",
"save_image_as_error_description": "Η αποθήκευση της εικόνας απέτυχε",
"save_image_as_error_title": "Αποτυχία αποθήκευσης εικόνας"
},
"view_menu": {
"actual_size": "Πραγματικό Μέγεθος",
"toggle_developer_tools": "Άνοιγμα Εργαλείων Προγραμματιστή",
"toggle_full_screen": "Εναλλαγή σε Πλήρη Οθόνη",
"view": "Προβολή"
},
"window_menu": {
"bring_all_to_front": "Μεταφορά Όλων στο Προσκήνιο",
"label": "Παράθυρο",
"zoom": "Ζουμ"
}
"Are you sure you want to quit?": "Είστε βέβαιος ότι θέλετε να εγκαταλείψετε;",
"Zoom": "Ζουμ",
"Unhide": "Εμφάνιση",
"Window": "Παράθυρο",
"Toggle Developer Tools": "Άνοιγμα Εργαλείων Προγραμματιστή",
"Toggle Full Screen": "Εναλλαγή σε Πλήρη Οθόνη",
"Copy email address": "Αντιγραφή διεύθυνσης email",
"File": "Αρχείο",
"Bring All to Front": "Μεταφορά Όλων στο Προσκήνιο",
"Stop Speaking": "Τερματίστε να μιλάτε",
"Start Speaking": "Ξεκινήστε να μιλάτε",
"Speech": "Ομιλία",
"Hide Others": "Απόκρυψη Άλλων",
"Hide": "Απόκρυψη",
"Services": "Υπηρεσίες",
"About": "Σχετικά με",
"Element Help": "Βοήθεια για το Element",
"Help": "Βοήθεια",
"Close": "Κλείσιμο",
"Minimize": "Ελαχιστοποίηση",
"Preferences": "Προτιμήσεις",
"Zoom Out": "Σμίκρυνση",
"Zoom In": "Μεγέθυνση",
"Actual Size": "Πραγματικό Μέγεθος",
"View": "Προβολή",
"Select All": "Επιλογή Όλων",
"Delete": "Διαγραφή",
"Paste and Match Style": "Επικόλληση και Ταίριασμα Στυλ",
"Paste": "Επικόλληση",
"Copy": "Αντιγραφή",
"Cut": "Αποκοπή",
"Redo": "Επανάληψη",
"Undo": "Αναίρεση",
"Edit": "Επεξεργασία",
"Quit": "Κλείσιμο",
"Show/Hide": "Eμφάνιση/Απόκρυψη",
"Close Element": "Κλείστε το Element",
"Cancel": "Ακύρωση",
"Add to dictionary": "Προσθήκη στο λεξικό",
"The image failed to save": "Η αποθήκευση της εικόνας απέτυχε",
"Failed to save image": "Αποτυχία αποθήκευσης εικόνας",
"Save image as...": "Αποθήκευση εικόνας ως...",
"Copy link address": "Αντιγραφή διεύθυνσης συνδέσμου",
"Copy image address": "Αντιγραφή διεύθυνσης εικόνας",
"Copy image": "Αντιγραφή εικόνας"
}

Some files were not shown because too many files have changed in this diff Show More