mirror of
https://github.com/penpot/penpot.git
synced 2026-02-19 15:48:04 -05:00
Compare commits
1 Commits
eva-replac
...
elenatorro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4baa894ee4 |
351
.circleci/config.yml
Normal file
351
.circleci/config.yml
Normal file
@@ -0,0 +1,351 @@
|
||||
version: 2.1
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: "fmt check"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "lint clj common"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:common
|
||||
|
||||
- run:
|
||||
name: "lint clj frontend"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:frontend
|
||||
|
||||
- run:
|
||||
name: "lint clj backend"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:backend
|
||||
|
||||
- run:
|
||||
name: "lint clj exporter"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:exporter
|
||||
|
||||
- run:
|
||||
name: "lint clj library"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:library
|
||||
|
||||
test-common:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "JVM tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
clojure -M:dev:test
|
||||
|
||||
- run:
|
||||
name: "NODE tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
- ~/.yarn
|
||||
- ~/.gitlibs
|
||||
- ~/.cache/ms-playwright
|
||||
key: v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
|
||||
|
||||
test-frontend:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "install dependencies"
|
||||
working_directory: "./frontend"
|
||||
# We install playwright here because the dependent tasks
|
||||
# uses the same cache as this task so we prepopulate it
|
||||
command: |
|
||||
yarn install
|
||||
yarn run playwright install chromium
|
||||
|
||||
- run:
|
||||
name: "lint scss on frontend"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn run lint:scss
|
||||
|
||||
- run:
|
||||
name: "unit tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
- ~/.yarn
|
||||
- ~/.gitlibs
|
||||
- ~/.cache/ms-playwright
|
||||
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
test-library:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: Install dependencies and build
|
||||
working_directory: "./library"
|
||||
command: |
|
||||
yarn install
|
||||
|
||||
- run:
|
||||
name: Build and Test
|
||||
working_directory: "./library"
|
||||
command: |
|
||||
./scripts/build
|
||||
yarn run test
|
||||
|
||||
test-components:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g -Xms2g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: Install dependencies
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run playwright install chromium
|
||||
|
||||
- run:
|
||||
name: Build Storybook
|
||||
working_directory: "./frontend"
|
||||
command: yarn run build:storybook
|
||||
|
||||
- run:
|
||||
name: Serve Storybook and run tests
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:6006 && yarn test:storybook"
|
||||
|
||||
test-integration:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: large
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g -Xms2g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
# Build frontend
|
||||
- run:
|
||||
name: "frontend build"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run build:app:assets
|
||||
yarn run build:app
|
||||
yarn run build:app:libs
|
||||
|
||||
# Build the wasm bundle
|
||||
- run:
|
||||
name: "wasm build"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
EMSDK_QUIET=1 . /opt/emsdk/emsdk_env.sh
|
||||
./build release
|
||||
|
||||
# Run integration tests
|
||||
- run:
|
||||
name: "integration tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn run playwright install chromium
|
||||
yarn run test:e2e -x --workers=4
|
||||
|
||||
test-backend:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
- image: cimg/postgres:14.5
|
||||
environment:
|
||||
POSTGRES_USER: penpot_test
|
||||
POSTGRES_PASSWORD: penpot_test
|
||||
POSTGRES_DB: penpot_test
|
||||
- image: cimg/redis:7.0.5
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
- run:
|
||||
name: "tests"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
clojure -M:dev:test --reporter kaocha.report/documentation
|
||||
|
||||
environment:
|
||||
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
|
||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
- ~/.gitlibs
|
||||
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
test-render-wasm:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
environment:
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: "fmt check"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
cargo fmt --check
|
||||
|
||||
- run:
|
||||
name: "lint"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
./lint
|
||||
|
||||
- run:
|
||||
name: "cargo tests"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
./test
|
||||
|
||||
workflows:
|
||||
penpot:
|
||||
jobs:
|
||||
- test-frontend:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-library:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-components:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-backend:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-common:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- lint
|
||||
- test-integration
|
||||
- test-render-wasm
|
||||
15
.github/workflows/build-tag.yml
vendored
15
.github/workflows/build-tag.yml
vendored
@@ -21,21 +21,6 @@ jobs:
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
|
||||
notify:
|
||||
name: Notifications
|
||||
needs: build-docker
|
||||
|
||||
steps:
|
||||
- name: Notify Mattermost
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
🐳 *[PENPOT] Docker image available.*
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
|
||||
publish-final-tag:
|
||||
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
|
||||
needs: build-docker
|
||||
|
||||
289
.github/workflows/tests.yml
vendored
289
.github/workflows/tests.yml
vendored
@@ -1,289 +0,0 @@
|
||||
name: "CI"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: "Linter"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check clojure code format
|
||||
run: |
|
||||
./scripts/lint
|
||||
|
||||
test-common:
|
||||
name: "Common Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run tests on JVM
|
||||
working-directory: ./common
|
||||
run: |
|
||||
clojure -M:dev:test
|
||||
|
||||
- name: Run tests on NODE
|
||||
working-directory: ./common
|
||||
run: |
|
||||
./scripts/test
|
||||
|
||||
test-frontend:
|
||||
name: "Frontend Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Unit Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test
|
||||
|
||||
- name: Component Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-components
|
||||
|
||||
test-render-wasm:
|
||||
name: "Render WASM Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Format
|
||||
working-directory: ./render-wasm
|
||||
run: |
|
||||
cargo fmt --check
|
||||
|
||||
- name: Lint
|
||||
working-directory: ./render-wasm
|
||||
run: |
|
||||
./lint
|
||||
|
||||
- name: Test
|
||||
working-directory: ./render-wasm
|
||||
run: |
|
||||
./test
|
||||
|
||||
test-backend:
|
||||
name: "Backend Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17
|
||||
# Provide the password for postgres
|
||||
env:
|
||||
POSTGRES_USER: penpot_test
|
||||
POSTGRES_PASSWORD: penpot_test
|
||||
POSTGRES_DB: penpot_test
|
||||
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
redis:
|
||||
image: valkey/valkey:9
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./backend
|
||||
env:
|
||||
PENPOT_TEST_DATABASE_URI: "postgresql://postgres/penpot_test"
|
||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||
PENPOT_TEST_REDIS_URI: "redis://redis/1"
|
||||
|
||||
run: |
|
||||
clojure -M:dev:test --reporter kaocha.report/documentation
|
||||
|
||||
test-library:
|
||||
name: "Library Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./library
|
||||
run: |
|
||||
./scripts/test
|
||||
|
||||
build-integration:
|
||||
name: "Build Integration Bundle"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build Bundle
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/build 0.0.0
|
||||
|
||||
- name: Store Bundle Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
|
||||
test-integration-1:
|
||||
name: "Integration Tests 1/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-e2e --shard="1/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-1
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-2:
|
||||
name: "Integration Tests 2/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-e2e --shard="2/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-2
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-3:
|
||||
name: "Integration Tests 3/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-e2e --shard="3/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-3
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-4:
|
||||
name: "Integration Tests 4/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
./scripts/test-e2e --shard="4/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-4
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -80,4 +80,3 @@ node_modules
|
||||
/playwright/.cache/
|
||||
/render-wasm/target/
|
||||
/**/.yarn/*
|
||||
/.pnpm-store
|
||||
|
||||
44
CHANGES.md
44
CHANGES.md
@@ -1,21 +1,5 @@
|
||||
# CHANGELOG
|
||||
|
||||
## 2.13.0 (Unreleased)
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
|
||||
### :sparkles: New features & Enhancements
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix problem when drag+duplicate a full grid [Taiga #12565](https://tree.taiga.io/project/penpot/issue/12565)
|
||||
- Fix problem when pasting elements in reverse flex layout [Taiga #12460](https://tree.taiga.io/project/penpot/issue/12460)
|
||||
|
||||
|
||||
## 2.12.0 (Unreleased)
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
@@ -23,7 +7,7 @@
|
||||
#### Backend RPC API changes
|
||||
|
||||
The backend RPC API URLS are changed from `/api/rpc/command/<name>` to
|
||||
`/api/main/methods/<name>`. The previous PATH is preserved for backward
|
||||
`/api/main/methods/<name>` (the previou PATH is preserved for backward
|
||||
compatibility; however, if you are a user of this API, it is strongly
|
||||
recommended that you adapt your code to use the new PATH.
|
||||
|
||||
@@ -51,7 +35,7 @@ If you have SSO/Social-Auth configured on your on-premise instance,
|
||||
the following actions are required before update:
|
||||
|
||||
Update your OAuth or SSO provider configuration (e.g., Okta, Google,
|
||||
Azure AD, etc.) to use the new callback URL. Failure to update may
|
||||
Azure AD, etc.) to use the new callback URL. Failure to update may
|
||||
result in authentication failures after upgrading.
|
||||
|
||||
**Reason for change:**
|
||||
@@ -61,33 +45,15 @@ and makis it more modular, enabling the ability to configure SSO auth
|
||||
provider dinamically.
|
||||
|
||||
|
||||
#### Changes on default docker compose
|
||||
|
||||
We have updated the `docker/images/docker-compose.yaml` with a small
|
||||
change related to the `PENPOT_SECRET_KEY`. Since this version, this
|
||||
environment variable is also required on exporter. So if you are using
|
||||
penpot on-premise you will need to apply the same changes on your own
|
||||
`docker-compose.yaml` file.
|
||||
|
||||
We have removed the Minio server from the `docker/images/docker-compose.yml`
|
||||
example. It's still usable as before, we just removed the example.
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
|
||||
- Ensure consistent snap behavior across all zoom levels [Github #7774](https://github.com/penpot/penpot/pull/7774) by [@Tokytome](https://github.com/Tokytome)
|
||||
|
||||
### :sparkles: New features & Enhancements
|
||||
|
||||
- Add the ability to select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
|
||||
- Add toggle for switching boolean property values [Taiga #12341](https://tree.taiga.io/project/penpot/us/12341)
|
||||
- Make the file export process more reliable [Taiga #12555](https://tree.taiga.io/project/penpot/us/12555)
|
||||
- Select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
|
||||
- Toggle for switching boolean property values [Taiga #12341](https://tree.taiga.io/project/penpot/us/12341)
|
||||
- Add auth flow changes [Taiga #12333](https://tree.taiga.io/project/penpot/us/12333)
|
||||
- Add new shape validation mechanism for shapes [Github #7696](https://github.com/penpot/penpot/pull/7696)
|
||||
- Apply color tokens from sidebar [Taiga #11353](https://tree.taiga.io/project/penpot/us/11353)
|
||||
- Display tokens in the inspect tab [Taiga #9313](https://tree.taiga.io/project/penpot/us/9313)
|
||||
- Refactor clipboard behavior to assess some minor inconsistencies and make pasting binary data faster. [Taiga #12571](https://tree.taiga.io/project/penpot/task/12571)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
@@ -102,8 +68,6 @@ example. It's still usable as before, we just removed the example.
|
||||
- Fix shortcut conflict in text editor (increase/decrease font size vs word selection)
|
||||
- Fix problem with plugins generating code for pages different than current one [Taiga #12312](https://tree.taiga.io/project/penpot/issue/12312)
|
||||
- Fix input confirmation behavior is not uniform [Taiga #12294](https://tree.taiga.io/project/penpot/issue/12294)
|
||||
- Fix copy/pasting application/transit+json [Taiga #12721](https://tree.taiga.io/project/penpot/issue/12721)
|
||||
- Fix problem with plugins content attribute [Plugins #209](https://github.com/penpot/penpot-plugins/issues/209)
|
||||
|
||||
## 2.11.1
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
|
||||
Subject: {{feedback-subject}}
|
||||
Type: {{feedback-type}}
|
||||
|
||||
{% if feedback-error-href %}
|
||||
{%- if feedback-error-href %}
|
||||
HREF: {{feedback-error-href}}
|
||||
{% endif -%}
|
||||
|
||||
|
||||
@@ -25,7 +25,8 @@
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
|
||||
@@ -25,7 +25,8 @@
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
|
||||
|
||||
export PENPOT_MANAGEMENT_API_SHARED_KEY=super-secret-management-api-key
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
|
||||
|
||||
@@ -255,8 +255,6 @@
|
||||
|
||||
(write-entry! output path params)
|
||||
|
||||
(events/tap :progress {:section :storage-object :id id})
|
||||
|
||||
(with-open [input (sto/get-object-data storage sobject)]
|
||||
(.putNextEntry ^ZipOutputStream output (ZipEntry. (str "objects/" id ext)))
|
||||
(io/copy input output :size (:size sobject))
|
||||
@@ -281,8 +279,6 @@
|
||||
|
||||
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
|
||||
|
||||
(events/tap :progress {:section :file :id file-id})
|
||||
|
||||
(vswap! bfc/*state* update :files assoc file-id
|
||||
{:id file-id
|
||||
:name (:name file)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.config
|
||||
"A configuration management."
|
||||
(:refer-clojure :exclude [get])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
@@ -102,7 +103,7 @@
|
||||
[:http-server-io-threads {:optional true} ::sm/int]
|
||||
[:http-server-max-worker-threads {:optional true} ::sm/int]
|
||||
|
||||
[:management-api-key {:optional true} :string]
|
||||
[:management-api-shared-key {:optional true} :string]
|
||||
|
||||
[:telemetry-uri {:optional true} :string]
|
||||
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
|
||||
|
||||
@@ -50,27 +50,23 @@
|
||||
(db/tx-run! cfg handler request)))))})
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::setup/props] :as cfg}]
|
||||
[_ cfg]
|
||||
["" {:middleware [[mw/shared-key-auth (cf/get :management-api-shared-key)]
|
||||
[default-system cfg]
|
||||
[transaction]]}
|
||||
["/authenticate"
|
||||
{:handler authenticate
|
||||
:allowed-methods #{:post}}]
|
||||
|
||||
(let [management-key (or (cf/get :management-api-key)
|
||||
(get props :management-key))]
|
||||
["/get-customer"
|
||||
{:handler get-customer
|
||||
:transaction true
|
||||
:allowed-methods #{:post}}]
|
||||
|
||||
["" {:middleware [[mw/shared-key-auth management-key]
|
||||
[default-system cfg]
|
||||
[transaction]]}
|
||||
["/authenticate"
|
||||
{:handler authenticate
|
||||
:allowed-methods #{:post}}]
|
||||
|
||||
["/get-customer"
|
||||
{:handler get-customer
|
||||
:transaction true
|
||||
:allowed-methods #{:post}}]
|
||||
|
||||
["/update-customer"
|
||||
{:handler update-customer
|
||||
:allowed-methods #{:post}
|
||||
:transaction true}]]))
|
||||
["/update-customer"
|
||||
{:handler update-customer
|
||||
:allowed-methods #{:post}
|
||||
:transaction true}]])
|
||||
|
||||
;; ---- HELPERS
|
||||
|
||||
|
||||
@@ -14,9 +14,7 @@
|
||||
[app.config :as cf]
|
||||
[app.http :as-alias http]
|
||||
[app.http.errors :as errors]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[buddy.core.codecs :as bc]
|
||||
[cuerdas.core :as str]
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.middleware :as ymw]
|
||||
@@ -244,6 +242,7 @@
|
||||
(handler request)
|
||||
{::yres/status 405}))))))})
|
||||
|
||||
|
||||
(defn- wrap-auth
|
||||
[handler decoders]
|
||||
(let [token-re
|
||||
@@ -273,24 +272,9 @@
|
||||
process-request
|
||||
(fn [request]
|
||||
(if-let [{:keys [type token] :as auth} (get-token request)]
|
||||
(let [decode-fn (get decoders type)]
|
||||
(if (or (= type :cookie) (= type :bearer))
|
||||
(let [metadata (tokens/decode-header token)]
|
||||
;; NOTE: we only proceed to decode claims on new
|
||||
;; cookie tokens. The old cookies dont need to be
|
||||
;; decoded because they use the token string as ID
|
||||
(if (and (= (:kid metadata) 1)
|
||||
(= (:ver metadata) 1)
|
||||
(some? decode-fn))
|
||||
(assoc request ::http/auth-data (assoc auth
|
||||
:claims (decode-fn token)
|
||||
:metadata metadata))
|
||||
(assoc request ::http/auth-data (assoc auth :metadata {:ver 0}))))
|
||||
|
||||
(if decode-fn
|
||||
(assoc request ::http/auth-data (assoc auth :claims (decode-fn token)))
|
||||
(assoc request ::http/auth-data auth))))
|
||||
|
||||
(if-let [decode-fn (get decoders type)]
|
||||
(assoc request ::http/auth-data (assoc auth :claims (decode-fn token)))
|
||||
(assoc request ::http/auth-data auth))
|
||||
request))]
|
||||
|
||||
(fn [request]
|
||||
@@ -303,14 +287,11 @@
|
||||
(defn- wrap-shared-key-auth
|
||||
[handler shared-key]
|
||||
(if shared-key
|
||||
(let [shared-key (if (string? shared-key)
|
||||
shared-key
|
||||
(bc/bytes->b64-str shared-key true))]
|
||||
(fn [request]
|
||||
(let [key (yreq/get-header request "x-shared-key")]
|
||||
(if (= key shared-key)
|
||||
(handler request)
|
||||
{::yres/status 403}))))
|
||||
(fn [request]
|
||||
(let [key (yreq/get-header request "x-shared-key")]
|
||||
(if (= key shared-key)
|
||||
(handler request)
|
||||
{::yres/status 403})))
|
||||
(fn [_ _]
|
||||
{::yres/status 403})))
|
||||
|
||||
|
||||
@@ -93,15 +93,15 @@
|
||||
(update-session [_ session]
|
||||
(let [modified-at (ct/now)]
|
||||
(if (string? (:id session))
|
||||
(db/insert! pool :http-session-v2
|
||||
(-> session
|
||||
(assoc :id (uuid/next))
|
||||
(assoc :created-at modified-at)
|
||||
(assoc :modified-at modified-at)))
|
||||
(let [params (-> session
|
||||
(assoc :id (uuid/next))
|
||||
(assoc :created-at modified-at)
|
||||
(assoc :modified-at modified-at))]
|
||||
(db/insert! pool :http-session-v2 params))
|
||||
|
||||
(db/update! pool :http-session-v2
|
||||
{:modified-at modified-at}
|
||||
{:id (:id session)}
|
||||
{::db/return-keys true}))))
|
||||
{:id (:id session)}))))
|
||||
|
||||
(delete-session [_ id]
|
||||
(if (string? id)
|
||||
@@ -158,15 +158,14 @@
|
||||
|
||||
(defn- assign-token
|
||||
[cfg session]
|
||||
(let [claims {:iss "authentication"
|
||||
:aud "penpot"
|
||||
:sid (:id session)
|
||||
:iat (:modified-at session)
|
||||
:uid (:profile-id session)
|
||||
:sso-provider-id (:sso-provider-id session)
|
||||
:sso-session-id (:sso-session-id session)}
|
||||
header {:kid 1 :ver 1}
|
||||
token (tokens/generate cfg claims header)]
|
||||
(let [token (tokens/generate cfg
|
||||
{:iss "authentication"
|
||||
:aud "penpot"
|
||||
:sid (:id session)
|
||||
:iat (:modified-at session)
|
||||
:uid (:profile-id session)
|
||||
:sso-provider-id (:sso-provider-id session)
|
||||
:sso-session-id (:sso-session-id session)})]
|
||||
(assoc session :token token)))
|
||||
|
||||
(defn create-fn
|
||||
@@ -226,14 +225,13 @@
|
||||
[handler {:keys [::manager] :as cfg}]
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(fn [request]
|
||||
(let [{:keys [type token claims metadata]} (get request ::http/auth-data)]
|
||||
(let [{:keys [type token claims]} (get request ::http/auth-data)]
|
||||
(cond
|
||||
(= type :cookie)
|
||||
(let [session (case (:ver metadata)
|
||||
(let [session (if-let [sid (:sid claims)]
|
||||
(read-session manager sid)
|
||||
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
|
||||
0 (read-session manager token)
|
||||
1 (some->> (:sid claims) (read-session manager))
|
||||
nil)
|
||||
(read-session manager token))
|
||||
|
||||
request (cond-> request
|
||||
(some? session)
|
||||
@@ -242,7 +240,7 @@
|
||||
|
||||
response (handler request)]
|
||||
|
||||
(if (and session (renew-session? session))
|
||||
(if (renew-session? session)
|
||||
(let [session (->> session
|
||||
(update-session manager)
|
||||
(assign-token cfg))]
|
||||
@@ -250,11 +248,11 @@
|
||||
response))
|
||||
|
||||
(= type :bearer)
|
||||
(let [session (case (:ver metadata)
|
||||
(let [session (if-let [sid (:sid claims)]
|
||||
(read-session manager sid)
|
||||
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
|
||||
0 (read-session manager token)
|
||||
1 (some->> (:sid claims) (read-session manager))
|
||||
nil)
|
||||
(read-session manager token))
|
||||
|
||||
request (cond-> request
|
||||
(some? session)
|
||||
(-> (assoc ::profile-id (:profile-id session))
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
:uid uuid/zero})
|
||||
body (t/encode {:events events})
|
||||
headers {"content-type" "application/transit+json"
|
||||
"origin" (str (cf/get :public-uri))
|
||||
"origin" (cf/get :public-uri)
|
||||
"cookie" (u/map->query-string {:auth-token token})}
|
||||
params {:uri uri
|
||||
:timeout 12000
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
ctx (-> context
|
||||
(assoc :tenant (cf/get :tenant))
|
||||
(assoc :host (cf/get :host))
|
||||
(assoc :public-uri (str (cf/get :public-uri)))
|
||||
(assoc :public-uri (cf/get :public-uri))
|
||||
(assoc :logger/name logger)
|
||||
(assoc :logger/level level)
|
||||
(dissoc :request/params :value :params :data))]
|
||||
|
||||
@@ -295,8 +295,7 @@
|
||||
[cfg]
|
||||
(let [cfg (assoc cfg ::type "management" ::metrics-id :rpc-management-timing)]
|
||||
(->> (sv/scan-ns
|
||||
'app.rpc.management.subscription
|
||||
'app.rpc.management.exporter)
|
||||
'app.rpc.management.subscription)
|
||||
(map (partial process-method cfg "management" wrap-management))
|
||||
(into {}))))
|
||||
|
||||
@@ -347,16 +346,14 @@
|
||||
(assert (valid-methods? (::management-methods params)) "expect valid methods map"))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::methods ::management-methods ::setup/props] :as cfg}]
|
||||
|
||||
(let [public-uri (cf/get :public-uri)
|
||||
management-key (or (cf/get :management-api-key)
|
||||
(get props :management-key))]
|
||||
[_ {:keys [::methods ::management-methods] :as cfg}]
|
||||
|
||||
(let [public-uri (cf/get :public-uri)]
|
||||
["/api"
|
||||
|
||||
["/management"
|
||||
["/methods/:type"
|
||||
{:middleware [[mw/shared-key-auth management-key]
|
||||
{:middleware [[mw/shared-key-auth (cf/get :management-api-shared-key)]
|
||||
[session/authz cfg]]
|
||||
:handler (make-rpc-handler management-methods)}]
|
||||
|
||||
|
||||
@@ -11,9 +11,9 @@
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.binfile.v3 :as bf.v3]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.sse :as sse]
|
||||
@@ -25,12 +25,10 @@
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.services :as sv]
|
||||
[app.worker :as-alias wrk]
|
||||
[datoteka.fs :as fs]))
|
||||
[app.worker :as-alias wrk]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
@@ -40,42 +38,52 @@
|
||||
schema:export-binfile
|
||||
[:map {:title "export-binfile"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:version {:optional true} ::sm/int]
|
||||
[:include-libraries ::sm/boolean]
|
||||
[:embed-assets ::sm/boolean]])
|
||||
|
||||
(defn- export-binfile
|
||||
[{:keys [::sto/storage] :as cfg} {:keys [file-id include-libraries embed-assets]}]
|
||||
(let [output (tmp/tempfile*)]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bfc/ids #{file-id})
|
||||
(assoc ::bfc/embed-assets embed-assets)
|
||||
(assoc ::bfc/include-libraries include-libraries)
|
||||
(bf.v3/export-files! output))
|
||||
(defn stream-export-v1
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(rph/stream
|
||||
(fn [_ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bfc/ids #{file-id})
|
||||
(assoc ::bfc/embed-assets embed-assets)
|
||||
(assoc ::bfc/include-libraries include-libraries)
|
||||
(bf.v1/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
|
||||
(let [data (sto/content output)
|
||||
object (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/touched-at (ct/in-future {:minutes 60})
|
||||
:content-type "application/zip"
|
||||
:bucket "tempfile"})]
|
||||
|
||||
(-> (cf/get :public-uri)
|
||||
(u/join "/assets/by-id/")
|
||||
(u/join (str (:id object)))))
|
||||
|
||||
(finally
|
||||
(fs/delete output)))))
|
||||
(defn stream-export-v3
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(rph/stream
|
||||
(fn [_ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bfc/ids #{file-id})
|
||||
(assoc ::bfc/embed-assets embed-assets)
|
||||
(assoc ::bfc/include-libraries include-libraries)
|
||||
(bf.v3/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
|
||||
(sv/defmethod ::export-binfile
|
||||
"Export a penpot file in a binary format."
|
||||
{::doc/added "1.15"
|
||||
::doc/changes [["2.12" "Remove version parameter, only one version is supported"]]
|
||||
::webhooks/event? true
|
||||
::sm/params schema:export-binfile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
|
||||
(files/check-read-permissions! pool profile-id file-id)
|
||||
(sse/response (partial export-binfile cfg params)))
|
||||
(let [version (or version 1)]
|
||||
(case (int version)
|
||||
1 (stream-export-v1 cfg params)
|
||||
2 (throw (ex-info "not-implemented" {}))
|
||||
3 (stream-export-v3 cfg params))))
|
||||
|
||||
;; --- Command: import-binfile
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
fullname (str "Demo User " sem)
|
||||
|
||||
password (-> (bn/random-bytes 16)
|
||||
(bc/bytes->b64 true)
|
||||
(bc/bytes->b64u)
|
||||
(bc/bytes->str))
|
||||
|
||||
params {:email email
|
||||
|
||||
@@ -1209,7 +1209,7 @@
|
||||
;; --- MUTATION COMMAND: restore-files-immediatelly
|
||||
|
||||
(def ^:private sql:resolve-editable-files
|
||||
"SELECT f.id, f.project_id
|
||||
"SELECT f.id
|
||||
FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
JOIN team AS t ON (t.id = p.team_id)
|
||||
@@ -1250,38 +1250,18 @@
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(def ^:private sql:restore-projects
|
||||
"UPDATE project SET deleted_at = null WHERE id = ANY(?::uuid[])")
|
||||
|
||||
(defn- restore-projects
|
||||
[conn project-ids]
|
||||
(let [project-ids (db/create-array conn "uuid" project-ids)]
|
||||
(->> (db/exec-one! conn [sql:restore-projects project-ids])
|
||||
(db/get-update-count))))
|
||||
|
||||
(defn- restore-deleted-team-files
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id team-id ids]}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [total-files
|
||||
(count ids)
|
||||
|
||||
{:keys [files projects]}
|
||||
(reduce (fn [result {:keys [id project-id]}]
|
||||
(let [index (-> result :files count)]
|
||||
(events/tap :progress {:file-id id :index index :total total-files})
|
||||
(restore-file conn id)
|
||||
|
||||
(-> result
|
||||
(update :files conj id)
|
||||
(update :projects conj project-id))))
|
||||
|
||||
{:files #{} :projectes #{}}
|
||||
(db/plan conn [sql:resolve-editable-files team-id
|
||||
(db/create-array conn "uuid" ids)]))]
|
||||
|
||||
(restore-projects conn projects)
|
||||
|
||||
files))
|
||||
(reduce (fn [affected {:keys [id]}]
|
||||
(let [index (inc (count affected))]
|
||||
(events/tap :progress {:file-id id :index index :total (count ids)})
|
||||
(restore-file conn id)
|
||||
(conj affected id)))
|
||||
#{}
|
||||
(db/plan conn [sql:resolve-editable-files team-id
|
||||
(db/create-array conn "uuid" ids)])))
|
||||
|
||||
(def ^:private schema:restore-deleted-team-files
|
||||
[:map {:title "restore-deleted-team-files"}
|
||||
@@ -1289,8 +1269,8 @@
|
||||
[:ids [::sm/set ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::restore-deleted-team-files
|
||||
"Removes the deletion mark from the specified files (and respective
|
||||
projects) on the specified team."
|
||||
"Removes the deletion mark from the specified files (and respective projects)."
|
||||
|
||||
{::doc/added "2.12"
|
||||
::sse/stream? true
|
||||
::sm/params schema:restore-deleted-team-files}
|
||||
|
||||
@@ -96,7 +96,7 @@
|
||||
;; loading all pages into memory for find the frame set for thumbnail.
|
||||
|
||||
(defn get-file-data-for-thumbnail
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file} strip-frames-with-thumbnails]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
|
||||
(letfn [;; function responsible on finding the frame marked to be
|
||||
;; used as thumbnail; the returned frame always have
|
||||
;; the :page-id set to the page that it belongs.
|
||||
@@ -173,7 +173,7 @@
|
||||
|
||||
;; Assoc the available thumbnails and prune not visible shapes
|
||||
;; for avoid transfer unnecessary data.
|
||||
strip-frames-with-thumbnails
|
||||
:always
|
||||
(update :objects assoc-thumbnails page-id thumbs)))))
|
||||
|
||||
(def ^:private
|
||||
@@ -186,8 +186,7 @@
|
||||
[:map {:title "PartialFile"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} ::sm/int]
|
||||
[:page [:map-of :keyword ::sm/any]]
|
||||
[:strip-frames-with-thumbnails {:optional true} ::sm/boolean]])
|
||||
[:page [:map-of :keyword ::sm/any]]])
|
||||
|
||||
(sv/defmethod ::get-file-data-for-thumbnail
|
||||
"Retrieves the data for generate the thumbnail of the file. Used
|
||||
@@ -196,7 +195,7 @@
|
||||
::doc/module :files
|
||||
::sm/params schema:get-file-data-for-thumbnail
|
||||
::sm/result schema:partial-file}
|
||||
[cfg {:keys [::rpc/profile-id file-id strip-frames-with-thumbnails] :as params}]
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
|
||||
@@ -206,18 +205,14 @@
|
||||
|
||||
file (bfc/get-file cfg file-id
|
||||
:realize? true
|
||||
:read-only? true)
|
||||
|
||||
strip-frames-with-thumbnails
|
||||
(or (nil? strip-frames-with-thumbnails) ;; if not present, default to true
|
||||
(true? strip-frames-with-thumbnails))]
|
||||
:read-only? true)]
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
|
||||
{:file-id file-id
|
||||
:revn (:revn file)
|
||||
:page (get-file-data-for-thumbnail cfg file strip-frames-with-thumbnails)}))))
|
||||
:page (get-file-data-for-thumbnail cfg file)}))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MUTATION COMMANDS
|
||||
|
||||
@@ -169,19 +169,12 @@
|
||||
;; --- MUTATION: Create Project
|
||||
|
||||
(defn- create-project
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/request-at profile-id team-id] :as params}]
|
||||
(assert (ct/inst? request-at) "expect request-at assigned")
|
||||
(let [params (-> params
|
||||
(assoc :created-at request-at)
|
||||
(assoc :modified-at request-at))
|
||||
project (teams/create-project conn params)
|
||||
timestamp (::rpc/request-at params)]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(let [project (teams/create-project conn params)]
|
||||
(teams/create-project-role conn profile-id (:id project) :owner)
|
||||
(db/insert! conn :team-project-profile-rel
|
||||
{:project-id (:id project)
|
||||
:profile-id profile-id
|
||||
:created-at timestamp
|
||||
:modified-at timestamp
|
||||
:team-id team-id
|
||||
:is-pinned false})
|
||||
(assoc project :is-pinned false)))
|
||||
|
||||
@@ -39,8 +39,9 @@
|
||||
(defn- encode
|
||||
[s]
|
||||
(-> s
|
||||
(bh/blake2b-256)
|
||||
(bc/bytes->b64-str true)))
|
||||
bh/blake2b-256
|
||||
bc/bytes->b64u
|
||||
bc/bytes->str))
|
||||
|
||||
(defn- fmt-key
|
||||
[s]
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.management.exporter
|
||||
(:require
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.media :refer [schema:upload]]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as doc]
|
||||
[app.storage :as sto]
|
||||
[app.util.services :as sv]))
|
||||
|
||||
;; ---- RPC METHOD: UPLOAD-TEMPFILE
|
||||
|
||||
(def ^:private
|
||||
schema:upload-tempfile-params
|
||||
[:map {:title "upload-templfile-params"}
|
||||
[:content schema:upload]])
|
||||
|
||||
(def ^:private
|
||||
schema:upload-tempfile-result
|
||||
[:map {:title "upload-templfile-result"}])
|
||||
|
||||
(sv/defmethod ::upload-tempfile
|
||||
{::doc/added "2.12"
|
||||
::sm/params schema:upload-tempfile-params
|
||||
::sm/result schema:upload-tempfile-result}
|
||||
[cfg {:keys [::rpc/profile-id content]}]
|
||||
(let [storage (sto/resolve cfg)
|
||||
hash (sto/calculate-hash (:path content))
|
||||
data (-> (sto/content (:path content))
|
||||
(sto/wrap-with-hash hash))
|
||||
content {::sto/content data
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at (ct/in-future {:minutes 10})
|
||||
:profile-id profile-id
|
||||
:content-type (:mtype content)
|
||||
:bucket "tempfile"}
|
||||
object (sto/put-object! storage content)]
|
||||
{:id (:id object)
|
||||
:uri (-> (cf/get :public-uri)
|
||||
(u/join "/assets/by-id/")
|
||||
(u/join (str (:id object))))}))
|
||||
@@ -22,7 +22,8 @@
|
||||
(defn- generate-random-key
|
||||
[]
|
||||
(-> (bn/random-bytes 64)
|
||||
(bc/bytes->b64-str true)))
|
||||
(bc/bytes->b64u)
|
||||
(bc/bytes->str)))
|
||||
|
||||
(defn- get-all-props
|
||||
[conn]
|
||||
@@ -84,11 +85,12 @@
|
||||
(l/warn :hint (str "using autogenerated secret-key, it will change on each restart and will invalidate "
|
||||
"all sessions on each restart, it is highly recommended setting up the "
|
||||
"PENPOT_SECRET_KEY environment variable")))
|
||||
|
||||
(let [secret (or key (generate-random-key))]
|
||||
(-> (get-all-props conn)
|
||||
(assoc :secret-key secret)
|
||||
(assoc :tokens-key (keys/derive secret :salt "tokens"))
|
||||
(assoc :management-key (keys/derive secret :salt "management"))
|
||||
(update :instance-id handle-instance-id conn (db/read-only? pool)))))))
|
||||
|
||||
(sm/register! ::props [:map-of :keyword ::sm/any])
|
||||
;; FIXME
|
||||
(sm/register! ::props :any)
|
||||
|
||||
@@ -8,13 +8,13 @@
|
||||
"Keys derivation service."
|
||||
(:refer-clojure :exclude [derive])
|
||||
(:require
|
||||
[app.common.spec :as us]
|
||||
[buddy.core.kdf :as bk]))
|
||||
|
||||
(defn derive
|
||||
"Derive a key from secret-key"
|
||||
[secret-key & {:keys [salt size] :or {size 32}}]
|
||||
(assert (string? secret-key) "expect string")
|
||||
(assert (seq secret-key) "expect string")
|
||||
(us/assert! ::us/not-empty-string secret-key)
|
||||
(let [engine (bk/engine {:key secret-key
|
||||
:salt salt
|
||||
:alg :hkdf
|
||||
|
||||
@@ -41,7 +41,6 @@
|
||||
"file-object-thumbnail"
|
||||
"file-thumbnail"
|
||||
"profile"
|
||||
"tempfile"
|
||||
"file-data"
|
||||
"file-data-fragment"
|
||||
"file-change"})
|
||||
@@ -164,6 +163,9 @@
|
||||
backend
|
||||
(:metadata result))))
|
||||
|
||||
(def ^:private sql:retrieve-storage-object
|
||||
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
|
||||
|
||||
(defn row->storage-object [res]
|
||||
(let [mdata (or (some-> (:metadata res) (db/decode-transit-pgobject)) {})]
|
||||
(impl/storage-object
|
||||
@@ -175,15 +177,9 @@
|
||||
(keyword (:backend res))
|
||||
mdata)))
|
||||
|
||||
(def ^:private sql:get-storage-object
|
||||
"SELECT *
|
||||
FROM storage_object
|
||||
WHERE id = ?
|
||||
AND (deleted_at IS NULL)")
|
||||
|
||||
(defn- get-database-object
|
||||
(defn- retrieve-database-object
|
||||
[conn id]
|
||||
(some-> (db/exec-one! conn [sql:get-storage-object id])
|
||||
(some-> (db/exec-one! conn [sql:retrieve-storage-object id])
|
||||
(row->storage-object)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -206,7 +202,7 @@
|
||||
(defn get-object
|
||||
[{:keys [::db/connectable] :as storage} id]
|
||||
(assert (valid-storage? storage))
|
||||
(get-database-object connectable id))
|
||||
(retrieve-database-object connectable id))
|
||||
|
||||
(defn put-object!
|
||||
"Creates a new object with the provided content."
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
(into #{} (map :id))
|
||||
(not-empty))))
|
||||
|
||||
|
||||
(def ^:private sql:delete-sobjects
|
||||
"DELETE FROM storage_object
|
||||
WHERE id = ANY(?::uuid[])")
|
||||
@@ -76,37 +77,47 @@
|
||||
(d/group-by (comp keyword :backend) :id #{} items))
|
||||
|
||||
(def ^:private sql:get-deleted-sobjects
|
||||
"SELECT s.*
|
||||
FROM storage_object AS s
|
||||
"SELECT s.* FROM storage_object AS s
|
||||
WHERE s.deleted_at IS NOT NULL
|
||||
AND s.deleted_at <= ?
|
||||
AND s.deleted_at < now() - ?::interval
|
||||
ORDER BY s.deleted_at ASC")
|
||||
|
||||
(defn- get-buckets
|
||||
[conn]
|
||||
(let [now (ct/now)]
|
||||
[conn min-age]
|
||||
(let [age (db/interval min-age)]
|
||||
(sequence
|
||||
(comp (partition-all 25)
|
||||
(mapcat group-by-backend))
|
||||
(db/cursor conn [sql:get-deleted-sobjects now]))))
|
||||
(db/cursor conn [sql:get-deleted-sobjects age]))))
|
||||
|
||||
|
||||
(defn- clean-deleted!
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
[{:keys [::db/conn ::min-age] :as cfg}]
|
||||
(reduce (fn [total [backend-id ids]]
|
||||
(let [deleted (delete-in-bulk! cfg backend-id ids)]
|
||||
(+ total (or deleted 0))))
|
||||
0
|
||||
(get-buckets conn)))
|
||||
(get-buckets conn min-age)))
|
||||
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (sto/valid-storage? (::sto/storage params)) "expect valid storage")
|
||||
(assert (db/pool? (::db/pool params)) "expect valid storage"))
|
||||
|
||||
(defmethod ig/expand-key ::handler
|
||||
[k v]
|
||||
{k (assoc v ::min-age (ct/duration {:hours 2}))})
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [total (clean-deleted! cfg)]
|
||||
(l/inf :hint "task finished" :total total)
|
||||
{:deleted total})))))
|
||||
[_ {:keys [::min-age] :as cfg}]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [min-age (ct/duration (or (:min-age props) min-age))]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [cfg (assoc cfg ::min-age min-age)
|
||||
total (clean-deleted! cfg)]
|
||||
|
||||
(l/inf :hint "task finished"
|
||||
:min-age (ct/format-duration min-age)
|
||||
:total total)
|
||||
|
||||
{:deleted total}))))))
|
||||
|
||||
@@ -22,8 +22,6 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.impl :as impl]
|
||||
@@ -103,15 +101,14 @@
|
||||
|
||||
(def ^:private sql:mark-delete-in-bulk
|
||||
"UPDATE storage_object
|
||||
SET deleted_at = ?,
|
||||
SET deleted_at = now(),
|
||||
touched_at = NULL
|
||||
WHERE id = ANY(?::uuid[])")
|
||||
|
||||
(defn- mark-delete-in-bulk!
|
||||
[conn deletion-delay ids]
|
||||
(let [ids (db/create-array conn "uuid" ids)
|
||||
now (ct/plus (ct/now) deletion-delay)]
|
||||
(db/exec-one! conn [sql:mark-delete-in-bulk now ids])))
|
||||
[conn ids]
|
||||
(let [ids (db/create-array conn "uuid" ids)]
|
||||
(db/exec-one! conn [sql:mark-delete-in-bulk ids])))
|
||||
|
||||
;; NOTE: A getter that retrieves the key which will be used for group
|
||||
;; ids; previously we have no value, then we introduced the
|
||||
@@ -140,20 +137,18 @@
|
||||
(if-let [{:keys [id] :as object} (first objects)]
|
||||
(if (has-refs? conn object)
|
||||
(do
|
||||
(l/dbg :id (str id)
|
||||
:status "freeze"
|
||||
:bucket bucket)
|
||||
(l/debug :id (str id)
|
||||
:status "freeze"
|
||||
:bucket bucket)
|
||||
(recur (conj to-freeze id) to-delete (rest objects)))
|
||||
(do
|
||||
(l/dbg :id (str id)
|
||||
:status "delete"
|
||||
:bucket bucket)
|
||||
(l/debug :id (str id)
|
||||
:status "delete"
|
||||
:bucket bucket)
|
||||
(recur to-freeze (conj to-delete id) (rest objects))))
|
||||
(let [deletion-delay (if (= bucket "tempfile")
|
||||
(ct/duration {:hours 2})
|
||||
(cf/get-deletion-delay))]
|
||||
(do
|
||||
(some->> (seq to-freeze) (mark-freeze-in-bulk! conn))
|
||||
(some->> (seq to-delete) (mark-delete-in-bulk! conn deletion-delay))
|
||||
(some->> (seq to-delete) (mark-delete-in-bulk! conn))
|
||||
[(count to-freeze) (count to-delete)]))))
|
||||
|
||||
(defn- process-bucket!
|
||||
@@ -165,7 +160,6 @@
|
||||
"file-thumbnail" (process-objects! conn has-file-thumbnails-refs? bucket objects)
|
||||
"profile" (process-objects! conn has-profile-refs? bucket objects)
|
||||
"file-data" (process-objects! conn has-file-data-refs? bucket objects)
|
||||
"tempfile" (process-objects! conn (constantly false) bucket objects)
|
||||
(ex/raise :type :internal
|
||||
:code :unexpected-unknown-reference
|
||||
:hint (dm/fmt "unknown reference '%'" bucket))))
|
||||
@@ -179,27 +173,27 @@
|
||||
[0 0]
|
||||
(d/group-by lookup-bucket identity #{} chunk)))
|
||||
|
||||
(def ^:private sql:get-touched-storage-objects
|
||||
(def ^:private
|
||||
sql:get-touched-storage-objects
|
||||
"SELECT so.*
|
||||
FROM storage_object AS so
|
||||
WHERE so.touched_at IS NOT NULL
|
||||
AND so.touched_at <= ?
|
||||
ORDER BY touched_at ASC
|
||||
FOR UPDATE
|
||||
SKIP LOCKED
|
||||
LIMIT 10")
|
||||
|
||||
(defn get-chunk
|
||||
[conn timestamp]
|
||||
(->> (db/exec! conn [sql:get-touched-storage-objects timestamp])
|
||||
[conn]
|
||||
(->> (db/exec! conn [sql:get-touched-storage-objects])
|
||||
(map impl/decode-row)
|
||||
(not-empty)))
|
||||
|
||||
(defn- process-touched!
|
||||
[{:keys [::db/pool ::timestamp] :as cfg}]
|
||||
[{:keys [::db/pool] :as cfg}]
|
||||
(loop [freezed 0
|
||||
deleted 0]
|
||||
(if-let [chunk (get-chunk pool timestamp)]
|
||||
(if-let [chunk (get-chunk pool)]
|
||||
(let [[nfo ndo] (db/tx-run! cfg process-chunk! chunk)]
|
||||
(recur (long (+ freezed nfo))
|
||||
(long (+ deleted ndo))))
|
||||
@@ -215,6 +209,5 @@
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(process-touched! (assoc cfg ::timestamp (ct/now)))))
|
||||
(fn [_] (process-touched! cfg)))
|
||||
|
||||
|
||||
@@ -79,17 +79,14 @@
|
||||
;; API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn tempfile*
|
||||
[& {:keys [suffix prefix]
|
||||
(defn tempfile
|
||||
[& {:keys [suffix prefix min-age]
|
||||
:or {prefix "penpot."
|
||||
suffix ".tmp"}}]
|
||||
(let [attrs (fs/make-permissions "rw-r--r--")
|
||||
path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))]
|
||||
(Files/createFile path attrs)))
|
||||
|
||||
(defn tempfile
|
||||
[& {:keys [min-age] :as opts}]
|
||||
(let [path (tempfile* opts)]
|
||||
path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))
|
||||
path (Files/createFile path attrs)]
|
||||
(fs/delete-on-exit! path)
|
||||
(sp/offer! queue [path (some-> min-age ct/duration)])
|
||||
path))
|
||||
|
||||
|
||||
@@ -18,15 +18,15 @@
|
||||
(def ^:private sql:get-profiles
|
||||
"SELECT id, photo_id FROM profile
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-profiles!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-profiles timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-profiles deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id]}]
|
||||
(l/trc :obj "profile" :id (str id))
|
||||
|
||||
@@ -41,15 +41,15 @@
|
||||
(def ^:private sql:get-teams
|
||||
"SELECT deleted_at, id, photo_id FROM team
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-teams!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-teams timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-teams deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id deleted-at]}]
|
||||
(l/trc :obj "team"
|
||||
:id (str id)
|
||||
@@ -68,15 +68,15 @@
|
||||
"SELECT id, team_id, deleted_at, woff1_file_id, woff2_file_id, otf_file_id, ttf_file_id
|
||||
FROM team_font_variant
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-fonts!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-fonts timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-fonts deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at] :as font}]
|
||||
(l/trc :obj "font-variant"
|
||||
:id (str id)
|
||||
@@ -98,15 +98,15 @@
|
||||
"SELECT id, deleted_at, team_id
|
||||
FROM project
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-projects!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-projects timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-projects deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at]}]
|
||||
(l/trc :obj "project"
|
||||
:id (str id)
|
||||
@@ -124,15 +124,15 @@
|
||||
f.project_id
|
||||
FROM file AS f
|
||||
WHERE f.deleted_at IS NOT NULL
|
||||
AND f.deleted_at <= ?
|
||||
AND f.deleted_at < now() + ?::interval
|
||||
ORDER BY f.deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-files!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-files timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-files deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
|
||||
(l/trc :obj "file"
|
||||
:id (str id)
|
||||
@@ -148,15 +148,15 @@
|
||||
"SELECT file_id, revn, media_id, deleted_at
|
||||
FROM file_thumbnail
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn delete-file-thumbnails!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-thumbnails timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
|
||||
(l/trc :obj "file-thumbnail"
|
||||
:file-id (str file-id)
|
||||
@@ -175,15 +175,15 @@
|
||||
"SELECT file_id, object_id, media_id, deleted_at
|
||||
FROM file_tagged_object_thumbnail
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn delete-file-object-thumbnails!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-object-thumbnails timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-object-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
|
||||
(l/trc :obj "file-object-thumbnail"
|
||||
:file-id (str file-id)
|
||||
@@ -203,15 +203,15 @@
|
||||
"SELECT id, file_id, media_id, thumbnail_id, deleted_at
|
||||
FROM file_media_object
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-media-objects!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-media-objects timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-media-objects deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
|
||||
(l/trc :obj "file-media-object"
|
||||
:id (str id)
|
||||
@@ -231,15 +231,16 @@
|
||||
"SELECT file_id, id, type, deleted_at, metadata, backend
|
||||
FROM file_data
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-data!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-data timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
|
||||
(->> (db/plan conn [sql:get-file-data deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id id type deleted-at metadata backend]}]
|
||||
|
||||
(some->> metadata
|
||||
@@ -265,15 +266,15 @@
|
||||
"SELECT id, file_id, deleted_at
|
||||
FROM file_change
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at <= ?
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-changes!
|
||||
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-change timestamp chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-change deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
|
||||
(l/trc :obj "file-change"
|
||||
:id (str id)
|
||||
@@ -321,8 +322,9 @@
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(let [cfg (assoc cfg ::timestamp (ct/now))]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [threshold (ct/duration (get props :deletion-threshold 0))
|
||||
cfg (assoc cfg ::deletion-threshold (db/interval threshold))]
|
||||
(loop [procs (map deref deletion-proc-vars)
|
||||
total 0]
|
||||
(if-let [proc-fn (first procs)]
|
||||
|
||||
@@ -15,25 +15,19 @@
|
||||
[buddy.sign.jwe :as jwe]))
|
||||
|
||||
(defn generate
|
||||
([cfg claims] (generate cfg claims nil))
|
||||
([{:keys [::setup/props] :as cfg} claims header]
|
||||
(assert (contains? props :tokens-key) "expect props to have tokens-key")
|
||||
[{:keys [::setup/props] :as cfg} claims]
|
||||
(assert (contains? cfg ::setup/props))
|
||||
|
||||
(let [tokens-key
|
||||
(get props :tokens-key)
|
||||
(let [tokens-key
|
||||
(get props :tokens-key)
|
||||
|
||||
payload
|
||||
(-> claims
|
||||
(update :iat (fn [v] (or v (ct/now))))
|
||||
(d/without-nils)
|
||||
(t/encode))]
|
||||
payload
|
||||
(-> claims
|
||||
(update :iat (fn [v] (or v (ct/now))))
|
||||
(d/without-nils)
|
||||
(t/encode))]
|
||||
|
||||
(jwe/encrypt payload tokens-key {:alg :a256kw :enc :a256gcm :header header}))))
|
||||
|
||||
(defn decode-header
|
||||
[token]
|
||||
(ex/ignoring
|
||||
(jwe/decode-header token)))
|
||||
(jwe/encrypt payload tokens-key {:alg :a256kw :enc :a256gcm})))
|
||||
|
||||
(defn decode
|
||||
[{:keys [::setup/props] :as cfg} token]
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
(throw (IllegalArgumentException. "Missing arguments on `defmethod` macro.")))
|
||||
|
||||
(let [mdata (assoc mdata
|
||||
::docstring (some-> docs str/unindent)
|
||||
::docstring (some-> docs str/<<-)
|
||||
::spec sname
|
||||
::name (name sname))
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[selmer.parser :as sp]))
|
||||
|
||||
;; (sp/cache-off!)
|
||||
(sp/cache-off!)
|
||||
|
||||
(defn render
|
||||
[path context]
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-create :as files.create]
|
||||
[app.rpc.commands.files-update :as files.update]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.blob :as blob]
|
||||
@@ -186,17 +185,15 @@
|
||||
(defn create-project*
|
||||
([i params] (create-project* *system* i params))
|
||||
([system i {:keys [profile-id team-id] :as params}]
|
||||
(us/assert uuid? profile-id)
|
||||
(us/assert uuid? team-id)
|
||||
|
||||
(assert (uuid? profile-id))
|
||||
(assert (uuid? team-id))
|
||||
(let [timestamp (ct/now)]
|
||||
(db/run! system
|
||||
(fn [cfg]
|
||||
(->> (merge {:id (mk-uuid "project" i)
|
||||
:name (str "project" i)}
|
||||
params
|
||||
{::rpc/request-at timestamp})
|
||||
(#'projects/create-project cfg)))))))
|
||||
(db/run! system
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(->> (merge {:id (mk-uuid "project" i)
|
||||
:name (str "project" i)}
|
||||
params)
|
||||
(#'teams/create-project conn))))))
|
||||
|
||||
(defn create-file*
|
||||
([i params]
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.thumbnails :as thc]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -17,7 +16,6 @@
|
||||
[app.db.sql :as sql]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.setup.clock :as clock]
|
||||
[app.storage :as sto]
|
||||
[backend-tests.helpers :as th]
|
||||
[clojure.test :as t]
|
||||
@@ -134,10 +132,9 @@
|
||||
;; this will run pending task triggered by deleting user snapshot
|
||||
(th/run-pending-tasks!)
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
;; delete 2 snapshots and 2 file data entries
|
||||
(t/is (= 4 (:processed res)))))))))
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
;; delete 2 snapshots and 2 file data entries
|
||||
(t/is (= 4 (:processed res))))))))
|
||||
|
||||
(t/deftest snapshots-locking
|
||||
(let [profile-1 (th/create-profile* 1 {:is-active true})
|
||||
|
||||
@@ -313,7 +313,7 @@
|
||||
;; freeze because of the deduplication (we have uploaded 2 times
|
||||
;; the same files).
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 2 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
@@ -372,14 +372,14 @@
|
||||
(th/db-exec! ["update file_change set deleted_at = now() where file_id = ? and label is not null" (:id file)])
|
||||
(th/db-exec! ["update file set has_media_trimmed = false where id = ?" (:id file)])
|
||||
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
|
||||
;; this will remove the file change and file data entries for two snapshots
|
||||
(t/is (= 4 (:processed res))))
|
||||
|
||||
;; Rerun the file-gc and objects-gc
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
|
||||
;; this will remove the file media objects marked as deleted
|
||||
;; on prev file-gc
|
||||
(t/is (= 2 (:processed res))))
|
||||
@@ -387,7 +387,7 @@
|
||||
;; Now that file-gc have deleted the file-media-object usage,
|
||||
;; lets execute the touched-gc task, we should see that two of
|
||||
;; them are marked to be deleted
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
@@ -572,7 +572,7 @@
|
||||
;; Now that file-gc have deleted the file-media-object usage,
|
||||
;; lets execute the touched-gc task, we should see that two of
|
||||
;; them are marked to be deleted.
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
@@ -665,7 +665,7 @@
|
||||
;; because of the deduplication (we have uploaded 2 times the
|
||||
;; same files).
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 1 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
@@ -715,7 +715,7 @@
|
||||
|
||||
;; Now that objects-gc have deleted the object thumbnail lets
|
||||
;; execute the touched-gc task
|
||||
(let [res (th/run-task! "storage-gc-touched" {})]
|
||||
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
|
||||
(t/is (= 1 (:freeze res))))
|
||||
|
||||
;; check file media objects
|
||||
@@ -750,7 +750,7 @@
|
||||
|
||||
;; Now that file-gc have deleted the object thumbnail lets
|
||||
;; execute the touched-gc task
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 1 (:delete res))))
|
||||
|
||||
;; check file media objects
|
||||
@@ -922,9 +922,8 @@
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; run permanent deletion
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 3 (:processed result)))))
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 3 (:processed result))))
|
||||
|
||||
;; query the list of file libraries of a after hard deletion
|
||||
(let [data {::th/type :get-file-libraries
|
||||
@@ -1135,7 +1134,7 @@
|
||||
(th/sleep 300)
|
||||
|
||||
;; run the task
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
;; check that object thumbnails are still here
|
||||
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id (:id file)})]
|
||||
@@ -1164,7 +1163,7 @@
|
||||
(t/is (= 2 (count rows))))
|
||||
|
||||
;; run the task again
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
;; check that we have all object thumbnails
|
||||
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id (:id file)})]
|
||||
@@ -1227,7 +1226,7 @@
|
||||
(t/is (= 2 (count rows)))))
|
||||
|
||||
(t/testing "gc task"
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
(let [rows (th/db-query :file-thumbnail {:file-id (:id file)})]
|
||||
(t/is (= 2 (count rows)))
|
||||
@@ -1274,7 +1273,7 @@
|
||||
;; The FileGC task will schedule an inner taskq
|
||||
(th/run-pending-tasks!)
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 2 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
@@ -1368,7 +1367,7 @@
|
||||
|
||||
;; we ensure that once object-gc is passed and marked two storage
|
||||
;; objects to delete
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
@@ -1490,7 +1489,7 @@
|
||||
(t/is (some? (not-empty (:objects component))))))
|
||||
|
||||
;; Re-run the file-gc task
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(let [row (th/db-get :file {:id (:id file)})]
|
||||
(t/is (true? (:has-media-trimmed row))))
|
||||
|
||||
@@ -1520,7 +1519,7 @@
|
||||
|
||||
;; Now, we have deleted the usage of component if we pass file-gc,
|
||||
;; that component should be deleted
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
;; Check that component is properly removed
|
||||
(let [data {::th/type :get-file
|
||||
@@ -1611,8 +1610,8 @@
|
||||
:component-id c-id})}])
|
||||
|
||||
;; Run the file-gc on file and library
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-2)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-2)})))
|
||||
|
||||
;; Check that component exists
|
||||
(let [data {::th/type :get-file
|
||||
@@ -1685,7 +1684,7 @@
|
||||
|
||||
;; Now, we have deleted the usage of component if we pass file-gc,
|
||||
;; that component should be deleted
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
|
||||
|
||||
;; Check that component is properly removed
|
||||
(let [data {::th/type :get-file
|
||||
@@ -1834,8 +1833,8 @@
|
||||
(t/is (not= (:id fill) (:id fmedia)))))
|
||||
|
||||
;; Run the file-gc on file and library
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-2)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-2)})))
|
||||
|
||||
;; Now proceed to delete file and absorb it
|
||||
(let [data {::th/type :delete-file
|
||||
@@ -1926,7 +1925,7 @@
|
||||
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
|
||||
(t/is (= (:deleted-at row) now)))))))
|
||||
|
||||
(t/deftest restore-deleted-files
|
||||
(t/deftest deleted-files-restore
|
||||
(let [prof (th/create-profile* 1 {:is-active true})
|
||||
team-id (:default-team-id prof)
|
||||
proj-id (:default-project-id prof)
|
||||
@@ -1989,78 +1988,3 @@
|
||||
|
||||
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
|
||||
(t/is (nil? (:deleted-at row)))))))
|
||||
|
||||
|
||||
(t/deftest restore-deleted-files-and-projets
|
||||
(let [profile (th/create-profile* 1 {:is-active true})
|
||||
team-id (:default-team-id profile)
|
||||
now (ct/inst "2025-10-31T00:00:00Z")]
|
||||
|
||||
(binding [ct/*clock* (clock/fixed now)]
|
||||
(let [project (th/create-project* 1 {:profile-id (:id profile)
|
||||
:team-id team-id})
|
||||
file (th/create-file* 1 {:profile-id (:id profile)
|
||||
:project-id (:id project)})
|
||||
|
||||
data {::th/type :delete-project
|
||||
:id (:id project)
|
||||
::rpc/profile-id (:id profile)}
|
||||
out (th/command! data)]
|
||||
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))
|
||||
|
||||
(th/run-pending-tasks!)
|
||||
|
||||
;; get deleted files
|
||||
(let [data {::th/type :get-team-deleted-files
|
||||
::rpc/profile-id (:id profile)
|
||||
:team-id team-id}
|
||||
out (th/command! data)]
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(let [[row1 :as result] (:result out)]
|
||||
(t/is (= 1 (count result)))
|
||||
(t/is (= (:will-be-deleted-at row1) #penpot/inst "2025-11-07T00:00:00Z"))
|
||||
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
|
||||
(t/is (= (:modified-at row1) #penpot/inst "2025-10-31T00:00:00Z"))))
|
||||
|
||||
;; Check if project is deleted
|
||||
(let [[row1 :as rows] (th/db-query :project {:id (:id project)})]
|
||||
;; (pp/pprint rows)
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (= (:deleted-at row1) #penpot/inst "2025-11-07T00:00:00Z"))
|
||||
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
|
||||
(t/is (= (:modified-at row1) #penpot/inst "2025-10-31T00:00:00Z")))
|
||||
|
||||
;; Restore files
|
||||
(let [data {::th/type :restore-deleted-team-files
|
||||
::rpc/profile-id (:id profile)
|
||||
:team-id team-id
|
||||
:ids #{(:id file)}}
|
||||
out (th/command! data)]
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(let [result (:result out)]
|
||||
(t/is (fn? result))
|
||||
(let [events (th/consume-sse result)]
|
||||
;; (pp/pprint events)
|
||||
(t/is (= 2 (count events)))
|
||||
(t/is (= :end (first (last events))))
|
||||
(t/is (= (:ids data) (last (last events)))))))
|
||||
|
||||
|
||||
(let [[row1 :as rows] (th/db-query :file {:project-id (:id project)})]
|
||||
;; (pp/pprint rows)
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
|
||||
(t/is (nil? (:deleted-at row1))))
|
||||
|
||||
|
||||
;; Check if project is restored
|
||||
(let [[row1 :as rows] (th/db-query :project {:id (:id project)})]
|
||||
;; (pp/pprint rows)
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
|
||||
(t/is (nil? (:deleted-at row1))))))))
|
||||
|
||||
@@ -8,14 +8,12 @@
|
||||
(:require
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.thumbnails :as thc]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.auth :as cauth]
|
||||
[app.setup.clock :as clock]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[backend-tests.helpers :as th]
|
||||
@@ -85,8 +83,7 @@
|
||||
(t/is (map? (:result out))))
|
||||
|
||||
;; run the task again
|
||||
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
|
||||
(th/run-task! "storage-gc-touched" {}))]
|
||||
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
|
||||
(t/is (= 2 (:freeze res))))
|
||||
|
||||
(let [[row1 row2 :as rows] (th/db-query :file-tagged-object-thumbnail
|
||||
@@ -117,9 +114,9 @@
|
||||
|
||||
;; Run the File GC task that should remove unused file object
|
||||
;; thumbnails
|
||||
(th/run-task! :file-gc {:file-id (:id file)})
|
||||
(th/run-task! :file-gc {:min-age 0 :file-id (:id file)})
|
||||
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(t/is (= 3 (:processed result))))
|
||||
|
||||
;; check if row2 related thumbnail row still exists
|
||||
@@ -136,8 +133,7 @@
|
||||
(t/is (some? (sto/get-object storage (:media-id row2))))
|
||||
|
||||
;; run the task again
|
||||
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
|
||||
(th/run-task! :storage-gc-touched {}))]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 1 (:delete res)))
|
||||
(t/is (= 0 (:freeze res))))
|
||||
|
||||
@@ -147,9 +143,8 @@
|
||||
|
||||
;; Run the storage gc deleted task, it should permanently delete
|
||||
;; all storage objects related to the deleted thumbnails
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [res (th/run-task! :storage-gc-deleted {})]
|
||||
(t/is (= 1 (:deleted res)))))
|
||||
(let [result (th/run-task! :storage-gc-deleted {:min-age 0})]
|
||||
(t/is (= 1 (:deleted result))))
|
||||
|
||||
(t/is (nil? (sto/get-object storage (:media-id row1))))
|
||||
(t/is (some? (sto/get-object storage (:media-id row2))))
|
||||
@@ -221,9 +216,9 @@
|
||||
|
||||
;; Run the File GC task that should remove unused file object
|
||||
;; thumbnails
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(t/is (= 2 (:processed result))))
|
||||
|
||||
;; check if row1 related thumbnail row still exists
|
||||
@@ -235,7 +230,7 @@
|
||||
(t/is (= (:object-id data1) (:object-id row)))
|
||||
(t/is (uuid? (:media-id row1))))
|
||||
|
||||
(let [result (th/run-task! :storage-gc-touched {})]
|
||||
(let [result (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 1 (:delete result))))
|
||||
|
||||
;; Check if storage objects still exists after file-gc
|
||||
@@ -247,9 +242,8 @@
|
||||
|
||||
;; Run the storage gc deleted task, it should permanently delete
|
||||
;; all storage objects related to the deleted thumbnails
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [result (th/run-task! :storage-gc-deleted {})]
|
||||
(t/is (= 1 (:deleted result)))))
|
||||
(let [result (th/run-task! :storage-gc-deleted {:min-age 0})]
|
||||
(t/is (= 1 (:deleted result))))
|
||||
|
||||
(t/is (some? (sto/get-object storage (:media-id row2)))))))
|
||||
|
||||
|
||||
@@ -6,13 +6,11 @@
|
||||
|
||||
(ns backend-tests.rpc-font-test
|
||||
(:require
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.setup.clock :as clock]
|
||||
[app.storage :as sto]
|
||||
[backend-tests.helpers :as th]
|
||||
[clojure.test :as t]
|
||||
@@ -131,7 +129,7 @@
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 6 (:freeze res))))
|
||||
|
||||
(let [params {::th/type :delete-font
|
||||
@@ -143,17 +141,16 @@
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 6 (:delete res)))))))
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 6 (:delete res))))))
|
||||
|
||||
(t/deftest font-deletion-2
|
||||
(let [prof (th/create-profile* 1 {:is-active true})
|
||||
@@ -192,7 +189,7 @@
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 6 (:freeze res))))
|
||||
|
||||
(let [params {::th/type :delete-font
|
||||
@@ -204,17 +201,16 @@
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 3 (:delete res)))))))
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 3 (:delete res))))))
|
||||
|
||||
(t/deftest font-deletion-3
|
||||
(let [prof (th/create-profile* 1 {:is-active true})
|
||||
@@ -252,7 +248,7 @@
|
||||
(t/is (nil? (:error out1)))
|
||||
(t/is (nil? (:error out2)))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 6 (:freeze res))))
|
||||
|
||||
(let [params {::th/type :delete-font-variant
|
||||
@@ -264,14 +260,13 @@
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 3 (:delete res)))))))
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 3 (:delete res))))))
|
||||
|
||||
@@ -6,13 +6,11 @@
|
||||
|
||||
(ns backend-tests.rpc-project-test
|
||||
(:require
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.setup.clock :as clock]
|
||||
[backend-tests.helpers :as th]
|
||||
[clojure.test :as t]))
|
||||
|
||||
@@ -228,9 +226,8 @@
|
||||
(t/is (= 0 (count result)))))
|
||||
|
||||
;; run permanent deletion
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 1 (:processed result)))))
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed result))))
|
||||
|
||||
;; query the list of files of a after hard deletion
|
||||
(let [data {::th/type :get-project-files
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.setup.clock :as clock]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[backend-tests.helpers :as th]
|
||||
@@ -526,9 +525,8 @@
|
||||
(t/is (= :not-found (:type edata)))))
|
||||
|
||||
;; run permanent deletion
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed result)))))
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 2 (:processed result))))
|
||||
|
||||
;; query the list of projects of a after hard deletion
|
||||
(let [data {::th/type :get-projects
|
||||
@@ -583,9 +581,8 @@
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (ct/inst? (:deleted-at (first rows)))))
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 7 (:processed result)))))))
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 7 (:processed result))))))
|
||||
|
||||
(t/deftest create-team-access-request
|
||||
(with-mocks [mock {:target 'app.email/send! :return nil}]
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.setup.clock :as clock]
|
||||
[app.storage :as sto]
|
||||
[backend-tests.helpers :as th]
|
||||
[clojure.test :as t]
|
||||
@@ -54,13 +53,19 @@
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object (sto/put-object! storage {::sto/content content
|
||||
::sto/expired-at (ct/in-future {:hours 1})
|
||||
::sto/expired-at (ct/in-future {:seconds 1})
|
||||
:content-type "text/plain"})]
|
||||
|
||||
(t/is (sto/object? object))
|
||||
(t/is (ct/inst? (:expired-at object)))
|
||||
(t/is (ct/is-after? (:expired-at object) (ct/now)))
|
||||
(t/is (nil? (sto/get-object storage (:id object))))))
|
||||
(t/is (= object (sto/get-object storage (:id object))))
|
||||
|
||||
(th/sleep 1000)
|
||||
(t/is (nil? (sto/get-object storage (:id object))))
|
||||
(t/is (nil? (sto/get-object-data storage object)))
|
||||
(t/is (nil? (sto/get-object-url storage object)))
|
||||
(t/is (nil? (sto/get-object-path storage object)))))
|
||||
|
||||
(t/deftest put-and-delete-object
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
@@ -93,25 +98,20 @@
|
||||
::sto/expired-at (ct/now)
|
||||
:content-type "text/plain"})
|
||||
object2 (sto/put-object! storage {::sto/content content2
|
||||
::sto/expired-at (ct/in-future {:hours 2})
|
||||
::sto/expired-at (ct/in-past {:hours 2})
|
||||
:content-type "text/plain"})
|
||||
object3 (sto/put-object! storage {::sto/content content3
|
||||
::sto/expired-at (ct/in-future {:hours 1})
|
||||
::sto/expired-at (ct/in-past {:hours 1})
|
||||
:content-type "text/plain"})]
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 0}))]
|
||||
(let [res (th/run-task! :storage-gc-deleted {})]
|
||||
(t/is (= 1 (:deleted res)))))
|
||||
|
||||
(th/sleep 200)
|
||||
|
||||
(let [res (th/run-task! :storage-gc-deleted {})]
|
||||
(t/is (= 1 (:deleted res))))
|
||||
|
||||
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
|
||||
(t/is (= 2 (:count res))))
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 61}))]
|
||||
(let [res (th/run-task! :storage-gc-deleted {})]
|
||||
(t/is (= 1 (:deleted res)))))
|
||||
|
||||
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
|
||||
(t/is (= 1 (:count res))))))
|
||||
(t/is (= 2 (:count res))))))
|
||||
|
||||
(t/deftest touched-gc-task-1
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
@@ -158,7 +158,7 @@
|
||||
{:id (:id result-1)})
|
||||
|
||||
;; run the objects gc task for permanent deletion
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
|
||||
;; check that we still have all the storage objects
|
||||
@@ -182,6 +182,7 @@
|
||||
(let [res (th/db-exec-one! ["select count(*) from storage_object where deleted_at is not null"])]
|
||||
(t/is (= 0 (:count res)))))))
|
||||
|
||||
|
||||
(t/deftest touched-gc-task-2
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
@@ -242,12 +243,11 @@
|
||||
{:id (:id result-2)})
|
||||
|
||||
;; run the objects gc task for permanent deletion
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
|
||||
;; revert touched state to all storage objects
|
||||
|
||||
(th/db-exec-one! ["update storage_object set touched_at=?" (ct/now)])
|
||||
(th/db-exec-one! ["update storage_object set touched_at=now()"])
|
||||
|
||||
;; Run the task again
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
@@ -293,10 +293,10 @@
|
||||
result-2 (:result out2)]
|
||||
|
||||
;; now we proceed to manually mark all storage objects touched
|
||||
(th/db-exec! ["update storage_object set touched_at=?" (ct/now)])
|
||||
(th/db-exec! ["update storage_object set touched_at=now()"])
|
||||
|
||||
;; run the touched gc task
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
|
||||
(t/is (= 2 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
@@ -305,48 +305,16 @@
|
||||
(t/is (= 2 (count rows)))))
|
||||
|
||||
;; now we proceed to manually delete all file_media_object
|
||||
(th/db-exec! ["update file_media_object set deleted_at = ?" (ct/now)])
|
||||
(th/db-exec! ["update file_media_object set deleted_at = now()"])
|
||||
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(let [res (th/run-task! "objects-gc" {:min-age 0})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; run the touched gc task
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
;; check that we have all no objects
|
||||
(let [rows (th/db-exec! ["select * from storage_object where deleted_at is null"])]
|
||||
(t/is (= 0 (count rows))))))
|
||||
|
||||
(t/deftest tempfile-bucket-test
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content1 (sto/content "content1")
|
||||
now (ct/now)
|
||||
|
||||
object1 (sto/put-object! storage {::sto/content content1
|
||||
::sto/touched-at (ct/plus now {:minutes 1})
|
||||
:bucket "tempfile"
|
||||
:content-type "text/plain"})]
|
||||
|
||||
|
||||
(binding [ct/*clock* (clock/fixed now)]
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 0 (:delete res)))))
|
||||
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/plus now {:minutes 1}))]
|
||||
(let [res (th/run-task! :storage-gc-touched {})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 1 (:delete res)))))
|
||||
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 1}))]
|
||||
(let [res (th/run-task! :storage-gc-deleted {})]
|
||||
(t/is (= 0 (:deleted res)))))
|
||||
|
||||
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 2}))]
|
||||
(let [res (th/run-task! :storage-gc-deleted {})]
|
||||
(t/is (= 0 (:deleted res)))))))
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
org.slf4j/slf4j-api {:mvn/version "2.0.17"}
|
||||
pl.tkowalcz.tjahzi/log4j2-appender {:mvn/version "0.9.40"}
|
||||
|
||||
selmer/selmer {:mvn/version "1.12.69"}
|
||||
selmer/selmer {:mvn/version "1.12.62"}
|
||||
criterium/criterium {:mvn/version "0.4.6"}
|
||||
|
||||
metosin/jsonista {:mvn/version "0.3.13"}
|
||||
@@ -48,8 +48,12 @@
|
||||
com.sun.mail/jakarta.mail {:mvn/version "2.0.2"}
|
||||
org.la4j/la4j {:mvn/version "0.6.0"}
|
||||
|
||||
;; exception printing
|
||||
fipp/fipp {:mvn/version "0.6.29"}
|
||||
|
||||
me.flowthing/pp {:mvn/version "2024-11-13.77"}
|
||||
|
||||
|
||||
io.aviso/pretty {:mvn/version "1.4.4"}
|
||||
environ/environ {:mvn/version "1.2.0"}}
|
||||
:paths ["src" "vendor" "target/classes"]
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run test;
|
||||
@@ -9,10 +9,10 @@
|
||||
(:refer-clojure :exclude [get-in select-keys str with-open max])
|
||||
#?(:cljs (:require-macros [app.common.data.macros]))
|
||||
(:require
|
||||
#?(:clj [cljs.analyzer.api :as aapi])
|
||||
#?(:clj [clojure.core :as c]
|
||||
:cljs [cljs.core :as c])
|
||||
[app.common.data :as d]
|
||||
[cljs.analyzer.api :as aapi]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defmacro select-keys
|
||||
@@ -44,43 +44,42 @@
|
||||
[& params]
|
||||
`(str/concat ~@params))
|
||||
|
||||
#?(:clj
|
||||
(defmacro export
|
||||
"A helper macro that allows reexport a var in a current namespace."
|
||||
[v]
|
||||
(if (boolean (:ns &env))
|
||||
(defmacro export
|
||||
"A helper macro that allows reexport a var in a current namespace."
|
||||
[v]
|
||||
(if (boolean (:ns &env))
|
||||
|
||||
;; Code for ClojureScript
|
||||
(let [mdata (aapi/resolve &env v)
|
||||
arglists (second (get-in mdata [:meta :arglists]))
|
||||
sym (symbol (c/name v))
|
||||
andsym (symbol "&")
|
||||
procarg #(if (= % andsym) % (gensym "param"))]
|
||||
(if (pos? (count arglists))
|
||||
`(def
|
||||
~(with-meta sym (:meta mdata))
|
||||
(fn ~@(for [args arglists]
|
||||
(let [args (map procarg args)]
|
||||
(if (some #(= andsym %) args)
|
||||
(let [[sargs dargs] (split-with #(not= andsym %) args)]
|
||||
`([~@sargs ~@dargs] (apply ~v ~@sargs ~@(rest dargs))))
|
||||
`([~@args] (~v ~@args)))))))
|
||||
`(def ~(with-meta sym (:meta mdata)) ~v)))
|
||||
;; Code for ClojureScript
|
||||
(let [mdata (aapi/resolve &env v)
|
||||
arglists (second (get-in mdata [:meta :arglists]))
|
||||
sym (symbol (c/name v))
|
||||
andsym (symbol "&")
|
||||
procarg #(if (= % andsym) % (gensym "param"))]
|
||||
(if (pos? (count arglists))
|
||||
`(def
|
||||
~(with-meta sym (:meta mdata))
|
||||
(fn ~@(for [args arglists]
|
||||
(let [args (map procarg args)]
|
||||
(if (some #(= andsym %) args)
|
||||
(let [[sargs dargs] (split-with #(not= andsym %) args)]
|
||||
`([~@sargs ~@dargs] (apply ~v ~@sargs ~@(rest dargs))))
|
||||
`([~@args] (~v ~@args)))))))
|
||||
`(def ~(with-meta sym (:meta mdata)) ~v)))
|
||||
|
||||
;; Code for Clojure
|
||||
(let [vr (resolve v)
|
||||
m (meta vr)
|
||||
n (:name m)
|
||||
n (with-meta n
|
||||
(cond-> {}
|
||||
(:dynamic m) (assoc :dynamic true)
|
||||
(:protocol m) (assoc :protocol (:protocol m))))]
|
||||
`(let [m# (meta ~vr)]
|
||||
(def ~n (deref ~vr))
|
||||
(alter-meta! (var ~n) merge (dissoc m# :name))
|
||||
;; (when (:macro m#)
|
||||
;; (.setMacro (var ~n)))
|
||||
~vr)))))
|
||||
;; Code for Clojure
|
||||
(let [vr (resolve v)
|
||||
m (meta vr)
|
||||
n (:name m)
|
||||
n (with-meta n
|
||||
(cond-> {}
|
||||
(:dynamic m) (assoc :dynamic true)
|
||||
(:protocol m) (assoc :protocol (:protocol m))))]
|
||||
`(let [m# (meta ~vr)]
|
||||
(def ~n (deref ~vr))
|
||||
(alter-meta! (var ~n) merge (dissoc m# :name))
|
||||
;; (when (:macro m#)
|
||||
;; (.setMacro (var ~n)))
|
||||
~vr))))
|
||||
|
||||
(defmacro fmt
|
||||
"String interpolation helper. Can only be used with strings known at
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
[app.common.schema :as sm]
|
||||
[clojure.core :as c]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str])
|
||||
[cuerdas.core :as str]
|
||||
[expound.alpha :as expound])
|
||||
#?(:clj
|
||||
(:import
|
||||
clojure.lang.IPersistentMap)))
|
||||
@@ -109,6 +110,13 @@
|
||||
(contains? data :explain))
|
||||
(explain (:explain data) opts)
|
||||
|
||||
(and (contains? data ::s/problems)
|
||||
(contains? data ::s/value)
|
||||
(contains? data ::s/spec))
|
||||
(binding [s/*explain-out* expound/printer]
|
||||
(with-out-str
|
||||
(s/explain-out (update data ::s/problems #(take (:length opts 10) %)))))
|
||||
|
||||
(contains? data ::sm/explain)
|
||||
(sm/humanize-explain (::sm/explain data) opts)))
|
||||
|
||||
|
||||
@@ -53,7 +53,6 @@
|
||||
"plugins/runtime"
|
||||
"tokens/numeric-input"
|
||||
"design-tokens/v1"
|
||||
"text-editor/v2-html-paste"
|
||||
"text-editor/v2"
|
||||
"render-wasm/v1"
|
||||
"variants/v1"})
|
||||
@@ -76,7 +75,6 @@
|
||||
(def frontend-only-features
|
||||
#{"styles/v2"
|
||||
"plugins/runtime"
|
||||
"text-editor/v2-html-paste"
|
||||
"text-editor/v2"
|
||||
"tokens/numeric-input"
|
||||
"render-wasm/v1"})
|
||||
@@ -126,7 +124,6 @@
|
||||
:feature-plugins "plugins/runtime"
|
||||
:feature-design-tokens "design-tokens/v1"
|
||||
:feature-text-editor-v2 "text-editor/v2"
|
||||
:feature-text-editor-v2-html-paste "text-editor/v2-html-paste"
|
||||
:feature-render-wasm "render-wasm/v1"
|
||||
:feature-variants "variants/v1"
|
||||
:feature-token-input "tokens/numeric-input"
|
||||
|
||||
@@ -371,7 +371,7 @@
|
||||
[:set-tokens-lib
|
||||
[:map {:title "SetTokensLib"}
|
||||
[:type [:= :set-tokens-lib]]
|
||||
[:tokens-lib [:maybe ctob/schema:tokens-lib]]]]
|
||||
[:tokens-lib ctob/schema:tokens-lib]]]
|
||||
|
||||
[:set-token
|
||||
[:map {:title "SetTokenChange"}
|
||||
@@ -463,16 +463,35 @@
|
||||
|
||||
;; Changes Processing Impl
|
||||
|
||||
#_:clj-kondo/ignore
|
||||
(defn- validate-shape
|
||||
[{:keys [id] :as shape} page-id]
|
||||
(when-not (cts/valid-shape? shape)
|
||||
(ex/raise :type :assertion
|
||||
:code :data-validation
|
||||
:hint (str "invalid shape found '" id "'")
|
||||
:page-id page-id
|
||||
:shape-id id
|
||||
::sm/explain (cts/explain-shape shape))))
|
||||
(defn validate-shapes!
|
||||
[data-old data-new items]
|
||||
(letfn [(validate-shape! [[page-id id]]
|
||||
(let [shape-old (dm/get-in data-old [:pages-index page-id :objects id])
|
||||
shape-new (dm/get-in data-new [:pages-index page-id :objects id])]
|
||||
|
||||
;; If object has changed or is new verify is correct
|
||||
(when (and (some? shape-new)
|
||||
(not= shape-old shape-new))
|
||||
(when-not (and (cts/valid-shape? shape-new)
|
||||
(cts/shape? shape-new))
|
||||
(ex/raise :type :assertion
|
||||
:code :data-validation
|
||||
:hint (str "invalid shape found after applying changes on file "
|
||||
(:id data-new))
|
||||
:file-id (:id data-new)
|
||||
::sm/explain (cts/explain-shape shape-new))))))]
|
||||
|
||||
(->> (into #{} (map :page-id) items)
|
||||
(mapcat (fn [page-id]
|
||||
(filter #(= page-id (:page-id %)) items)))
|
||||
(mapcat (fn [{:keys [type id page-id] :as item}]
|
||||
(sequence
|
||||
(map (partial vector page-id))
|
||||
(case type
|
||||
(:add-obj :mod-obj :del-obj) (cons id nil)
|
||||
(:mov-objects :reg-objects) (:shapes item)
|
||||
nil))))
|
||||
(run! validate-shape!))))
|
||||
|
||||
(defn- process-touched-change
|
||||
[data {:keys [id page-id component-id]}]
|
||||
@@ -499,8 +518,14 @@
|
||||
(check-changes items))
|
||||
|
||||
(binding [*touched-changes* (volatile! #{})]
|
||||
(let [result (reduce #(or (process-change %1 %2) %1) data items)]
|
||||
(reduce process-touched-change result @*touched-changes*)))))
|
||||
(let [result (reduce #(or (process-change %1 %2) %1) data items)
|
||||
result (reduce process-touched-change result @*touched-changes*)]
|
||||
;; Validate result shapes (only on the backend)
|
||||
;;
|
||||
;; TODO: (PERF) add changed shapes tracking and only validate
|
||||
;; the tracked changes instead of iterate over all shapes
|
||||
#?(:clj (validate-shapes! data result items))
|
||||
result))))
|
||||
|
||||
;; --- Comment Threads
|
||||
|
||||
@@ -588,10 +613,9 @@
|
||||
|
||||
(defmethod process-change :add-obj
|
||||
[data {:keys [id obj page-id component-id frame-id parent-id index ignore-touched]}]
|
||||
;; NOTE: we only perform hard validation on backend
|
||||
#?(:clj (validate-shape obj page-id))
|
||||
|
||||
(let [update-container #(ctst/add-shape id obj % frame-id parent-id index ignore-touched)]
|
||||
(let [update-container
|
||||
(fn [container]
|
||||
(ctst/add-shape id obj container frame-id parent-id index ignore-touched))]
|
||||
|
||||
(when *state*
|
||||
(swap! *state* collect-shape-media-refs obj page-id))
|
||||
@@ -614,9 +638,6 @@
|
||||
(when (and *state* page-id)
|
||||
(swap! *state* collect-shape-media-refs shape page-id))
|
||||
|
||||
;; NOTE: we only perform hard validation on backend
|
||||
#?(:clj (validate-shape shape page-id))
|
||||
|
||||
(assoc objects id shape))
|
||||
|
||||
objects))
|
||||
@@ -671,6 +692,8 @@
|
||||
(d/update-in-when data [:pages-index page-id] fix-container)
|
||||
(d/update-in-when data [:components component-id] fix-container))))
|
||||
|
||||
;; FIXME: remove, seems like this method is already unused
|
||||
;; reg-objects operation "regenerates" the geometry and selrect of the parent groups
|
||||
(defmethod process-change :reg-objects
|
||||
[data {:keys [page-id component-id shapes]}]
|
||||
;; FIXME: Improve performance
|
||||
@@ -699,60 +722,48 @@
|
||||
|
||||
(update-group [group objects]
|
||||
(let [lookup (d/getf objects)
|
||||
children (get group :shapes)
|
||||
group (cond
|
||||
;; If the group is empty we don't make any changes. Will be removed by a later process
|
||||
(empty? children)
|
||||
group
|
||||
children (get group :shapes)]
|
||||
(cond
|
||||
;; If the group is empty we don't make any changes. Will be removed by a later process
|
||||
(empty? children)
|
||||
group
|
||||
|
||||
(= :bool (:type group))
|
||||
(path/update-bool-shape group objects)
|
||||
(= :bool (:type group))
|
||||
(path/update-bool-shape group objects)
|
||||
|
||||
(:masked-group group)
|
||||
(->> (map lookup children)
|
||||
(set-mask-selrect group))
|
||||
(:masked-group group)
|
||||
(->> (map lookup children)
|
||||
(set-mask-selrect group))
|
||||
|
||||
:else
|
||||
(->> (map lookup children)
|
||||
(gsh/update-group-selrect group)))]
|
||||
#?(:clj (validate-shape group page-id))
|
||||
group))]
|
||||
:else
|
||||
(->> (map lookup children)
|
||||
(gsh/update-group-selrect group)))))]
|
||||
|
||||
(if page-id
|
||||
(d/update-in-when data [:pages-index page-id :objects] reg-objects)
|
||||
(d/update-in-when data [:components component-id :objects] reg-objects))))
|
||||
|
||||
(defmethod process-change :mov-objects
|
||||
;; FIXME: ignore-touched is no longer used, so we can consider it deprecated
|
||||
[data {:keys [parent-id shapes index page-id component-id #_ignore-touched after-shape allow-altering-copies syncing]}]
|
||||
[data {:keys [parent-id shapes index page-id component-id ignore-touched after-shape allow-altering-copies syncing]}]
|
||||
(letfn [(calculate-invalid-targets [objects shape-id]
|
||||
(let [reduce-fn #(into %1 (calculate-invalid-targets objects %2))]
|
||||
(->> (get-in objects [shape-id :shapes])
|
||||
(reduce reduce-fn #{shape-id}))))
|
||||
|
||||
;; Avoid placing a shape as a direct or indirect child of itself, or
|
||||
;; inside its main component if it's in a copy, or inside a copy, or
|
||||
;; from a copy
|
||||
;; Avoid placing a shape as a direct or indirect child of itself,
|
||||
;; or inside its main component if it's in a copy,
|
||||
;; or inside a copy, or from a copy
|
||||
(is-valid-move? [objects shape-id]
|
||||
(let [invalid-targets (calculate-invalid-targets objects shape-id)
|
||||
shape (get objects shape-id)]
|
||||
(and shape
|
||||
(not (invalid-targets parent-id))
|
||||
(not (cfh/components-nesting-loop? objects shape-id parent-id))
|
||||
(or
|
||||
;; In some cases (like a component
|
||||
;; swap) it's allowed to change the
|
||||
;; structure of a copy
|
||||
allow-altering-copies
|
||||
|
||||
;; DEPRECATED, remove once v2.12 released
|
||||
syncing
|
||||
|
||||
(and
|
||||
;; We don't want to change the structure of component copies
|
||||
(not (ctk/in-component-copy? (get objects (:parent-id shape))))
|
||||
;; We need to check the origin and target frames
|
||||
(not (ctk/in-component-copy? (get objects parent-id))))))))
|
||||
(or allow-altering-copies ;; In some cases (like a component swap) it's allowed to change the structure of a copy
|
||||
syncing ;; If we are syncing the changes of a main component, it's allowed to change the structure of a copy
|
||||
(and
|
||||
(not (ctk/in-component-copy? (get objects (:parent-id shape)))) ;; We don't want to change the structure of component copies
|
||||
(not (ctk/in-component-copy? (get objects parent-id)))))))) ;; We need to check the origin and target frames
|
||||
|
||||
(insert-items [prev-shapes index shapes]
|
||||
(let [prev-shapes (or prev-shapes [])]
|
||||
@@ -761,13 +772,17 @@
|
||||
(cfh/append-at-the-end prev-shapes shapes))))
|
||||
|
||||
(add-to-parent [parent index shapes]
|
||||
(update parent :shapes
|
||||
(fn [parent-shapes]
|
||||
(-> parent-shapes
|
||||
(insert-items index shapes)
|
||||
;; We need to ensure that no `nil` in the shapes list
|
||||
;; after adding all the incoming shapes to the parent.
|
||||
(d/vec-without-nils)))))
|
||||
(let [parent (-> parent
|
||||
(update :shapes insert-items index shapes)
|
||||
;; We need to ensure that no `nil` in the
|
||||
;; shapes list after adding all the
|
||||
;; incoming shapes to the parent.
|
||||
(update :shapes d/vec-without-nils))]
|
||||
(cond-> parent
|
||||
(and (:shape-ref parent)
|
||||
(#{:group :frame} (:type parent))
|
||||
(not ignore-touched))
|
||||
(dissoc :remote-synced))))
|
||||
|
||||
(remove-from-old-parent [old-objects objects shape-id]
|
||||
(let [prev-parent-id (dm/get-in old-objects [shape-id :parent-id])]
|
||||
@@ -775,63 +790,58 @@
|
||||
;; the new destination target parent id.
|
||||
(if (= prev-parent-id parent-id)
|
||||
objects
|
||||
(d/update-in-when objects [prev-parent-id :shapes]
|
||||
(fn [shapes]
|
||||
(-> shapes
|
||||
(d/without-obj shape-id)
|
||||
(d/vec-without-nils)))))))
|
||||
(let [sid shape-id
|
||||
pid prev-parent-id
|
||||
obj (get objects pid)
|
||||
component? (and (:shape-ref obj)
|
||||
(= (:type obj) :group)
|
||||
(not ignore-touched))]
|
||||
(-> objects
|
||||
(d/update-in-when [pid :shapes] d/without-obj sid)
|
||||
(d/update-in-when [pid :shapes] d/vec-without-nils)
|
||||
(cond-> component? (d/update-when pid #(dissoc % :remote-synced))))))))
|
||||
|
||||
(update-parent-id [objects id]
|
||||
(d/update-when objects id assoc :parent-id parent-id))
|
||||
(-> objects
|
||||
(d/update-when id assoc :parent-id parent-id)))
|
||||
|
||||
;; Updates the frame-id references that might be outdated
|
||||
(update-frame-id [frame-id objects id]
|
||||
(let [obj (some-> (get objects id)
|
||||
(assoc :frame-id frame-id))]
|
||||
(assign-frame-id [frame-id objects id]
|
||||
(let [objects (d/update-when objects id assoc :frame-id frame-id)
|
||||
obj (get objects id)]
|
||||
(cond-> objects
|
||||
(some? obj)
|
||||
(assoc id obj)
|
||||
|
||||
;; If we moving a frame, we DO NOT NEED update
|
||||
;; children because the children will point correctly
|
||||
;; to the frame what we are currently moving
|
||||
(not (cfh/frame-shape? obj))
|
||||
(as-> $$ (reduce (partial update-frame-id frame-id) $$ (:shapes obj))))))
|
||||
|
||||
(validate-shape [objects #_:clj-kondo/ignore shape-id]
|
||||
#?(:clj (when-let [shape (get objects shape-id)]
|
||||
(validate-shape shape page-id)))
|
||||
objects)
|
||||
;; If we moving frame, the parent frame is the root
|
||||
;; and we DO NOT NEED update children because the
|
||||
;; children will point correctly to the frame what we
|
||||
;; are currently moving
|
||||
(not= :frame (:type obj))
|
||||
(as-> $$ (reduce (partial assign-frame-id frame-id) $$ (:shapes obj))))))
|
||||
|
||||
(move-objects [objects]
|
||||
(let [parent (get objects parent-id)]
|
||||
;; Do not proceed with the move if parent does not
|
||||
;; exists; this can happen on a race condition when an
|
||||
;; inflight move operations lands when parent is deleted
|
||||
(if (and (seq shapes) (every? (partial is-valid-move? objects) shapes) parent)
|
||||
(let [index (or (some-> (d/index-of (:shapes parent) after-shape) inc) index)
|
||||
frame-id (if (cfh/frame-shape? parent)
|
||||
(:id parent)
|
||||
(:frame-id parent))]
|
||||
(as-> objects $
|
||||
;; Add the new shapes to the parent object.
|
||||
(d/update-when $ parent-id #(add-to-parent % index shapes))
|
||||
(let [valid? (every? (partial is-valid-move? objects) shapes)
|
||||
parent (get objects parent-id)
|
||||
after-shape-index (d/index-of (:shapes parent) after-shape)
|
||||
index (if (nil? after-shape-index) index (inc after-shape-index))
|
||||
frame-id (if (= :frame (:type parent))
|
||||
(:id parent)
|
||||
(:frame-id parent))]
|
||||
|
||||
;; Update each individual shape link to the new parent
|
||||
(reduce update-parent-id $ shapes)
|
||||
(if (and valid? (seq shapes))
|
||||
(as-> objects $
|
||||
;; Add the new shapes to the parent object.
|
||||
(d/update-when $ parent-id #(add-to-parent % index shapes))
|
||||
|
||||
;; Analyze the old parents and clear the old links
|
||||
;; only if the new parent is different form old
|
||||
;; parent.
|
||||
(reduce (partial remove-from-old-parent objects) $ shapes)
|
||||
;; Update each individual shape link to the new parent
|
||||
(reduce update-parent-id $ shapes)
|
||||
|
||||
;; Ensure that all shapes of the new parent has a
|
||||
;; correct link to the topside frame.
|
||||
(reduce (partial update-frame-id frame-id) $ shapes)
|
||||
|
||||
;; Perform validation of the affected shapes
|
||||
(reduce validate-shape $ shapes)))
|
||||
;; Analyze the old parents and clear the old links
|
||||
;; only if the new parent is different form old
|
||||
;; parent.
|
||||
(reduce (partial remove-from-old-parent objects) $ shapes)
|
||||
|
||||
;; Ensure that all shapes of the new parent has a
|
||||
;; correct link to the topside frame.
|
||||
(reduce (partial assign-frame-id frame-id) $ shapes))
|
||||
objects)))]
|
||||
|
||||
(if page-id
|
||||
|
||||
@@ -120,8 +120,11 @@
|
||||
:terms-and-privacy-checkbox
|
||||
;; Only for developtment.
|
||||
:tiered-file-data-storage
|
||||
:token-units
|
||||
:token-base-font-size
|
||||
:token-color
|
||||
:token-typography-types
|
||||
:token-typography-composite
|
||||
:token-shadow
|
||||
:transit-readable-response
|
||||
:user-feedback
|
||||
@@ -129,6 +132,7 @@
|
||||
:v2-migration
|
||||
:webhooks
|
||||
;; TODO: deprecate this flag and consolidate the code
|
||||
:export-file-v3
|
||||
:render-wasm-dpr
|
||||
:hide-release-modal
|
||||
:subscriptions
|
||||
@@ -168,8 +172,9 @@
|
||||
:enable-google-fonts-provider
|
||||
:enable-component-thumbnails
|
||||
:enable-render-wasm-dpr
|
||||
:enable-token-color
|
||||
:enable-inspect-styles
|
||||
:enable-token-units
|
||||
:enable-token-typography-types
|
||||
:enable-token-typography-composite
|
||||
:enable-feature-fdata-objects-map])
|
||||
|
||||
(defn parse
|
||||
|
||||
@@ -43,6 +43,8 @@
|
||||
"
|
||||
#?(:cljs (:require-macros [app.common.logging :as l]))
|
||||
(:require
|
||||
#?(:clj [clojure.edn :as edn]
|
||||
:cljs [cljs.reader :as edn])
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.pprint :as pp]
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
[app.common.logic.shapes :as cls]
|
||||
[app.common.logic.variant-properties :as clvp]
|
||||
[app.common.path-names :as cpn]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.component :as ctk]
|
||||
[app.common.types.components-list :as ctkl]
|
||||
[app.common.types.container :as ctn]
|
||||
@@ -34,7 +35,8 @@
|
||||
[app.common.types.typography :as cty]
|
||||
[app.common.types.variant :as ctv]
|
||||
[app.common.uuid :as uuid]
|
||||
[clojure.set :as set]))
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
;; Change this to :info :debug or :trace to debug this module, or :warn to reset to default
|
||||
(log/set-level! :warn)
|
||||
@@ -471,10 +473,10 @@
|
||||
If an asset id is given, only shapes linked to this particular asset will
|
||||
be synchronized."
|
||||
[changes file-id asset-type asset-id library-id libraries current-file-id]
|
||||
(assert (contains? #{:colors :components :typographies} asset-type))
|
||||
(assert (or (nil? asset-id) (uuid? asset-id)))
|
||||
(assert (uuid? file-id))
|
||||
(assert (uuid? library-id))
|
||||
(s/assert #{:colors :components :typographies} asset-type)
|
||||
(s/assert (s/nilable ::us/uuid) asset-id)
|
||||
(s/assert ::us/uuid file-id)
|
||||
(s/assert ::us/uuid library-id)
|
||||
|
||||
(container-log :info asset-id
|
||||
:msg "Sync file with library"
|
||||
@@ -508,10 +510,10 @@
|
||||
If an asset id is given, only shapes linked to this particular asset will
|
||||
be synchronized."
|
||||
[changes file-id asset-type asset-id library-id libraries current-file-id]
|
||||
(assert (contains? #{:colors :components :typographies} asset-type))
|
||||
(assert (or (nil? asset-id) (uuid? asset-id)))
|
||||
(assert (uuid? file-id))
|
||||
(assert (uuid? library-id))
|
||||
(s/assert #{:colors :components :typographies} asset-type)
|
||||
(s/assert (s/nilable ::us/uuid) asset-id)
|
||||
(s/assert ::us/uuid file-id)
|
||||
(s/assert ::us/uuid library-id)
|
||||
|
||||
(container-log :info asset-id
|
||||
:msg "Sync local components with library"
|
||||
@@ -1510,7 +1512,7 @@
|
||||
:shapes [(:id shape)]
|
||||
:index index-after
|
||||
:ignore-touched true
|
||||
:allow-altering-copies true}))
|
||||
:syncing true}))
|
||||
(update :undo-changes conj (make-change
|
||||
container
|
||||
{:type :mov-objects
|
||||
@@ -1518,7 +1520,7 @@
|
||||
:shapes [(:id shape)]
|
||||
:index index-before
|
||||
:ignore-touched true
|
||||
:allow-altering-copies true})))]
|
||||
:syncing true})))]
|
||||
|
||||
(if (and (ctk/touched-group? parent :shapes-group) omit-touched?)
|
||||
changes
|
||||
@@ -2813,15 +2815,13 @@
|
||||
ids-map (into {} (map #(vector % (uuid/next))) all-ids)
|
||||
|
||||
|
||||
;; If there is an alt-duplication we change to root
|
||||
;; For variants so the copy is made as a child of root
|
||||
;; If there is an alt-duplication of a variant, change its parent to root
|
||||
;; so the copy is made as a child of root
|
||||
;; This is because inside a variant-container can't be a copy
|
||||
;; For other shape this way the layout won't be changed when duplicated
|
||||
;; and if you move outside the layout will not change
|
||||
shapes (map (fn [shape]
|
||||
(cond-> shape
|
||||
alt-duplication?
|
||||
(assoc :parent-id uuid/zero :frame-id uuid/zero)))
|
||||
(if (and alt-duplication? (ctk/is-variant? shape))
|
||||
(assoc shape :parent-id uuid/zero :frame-id nil)
|
||||
shape))
|
||||
shapes)
|
||||
|
||||
|
||||
|
||||
@@ -8,8 +8,6 @@
|
||||
(:refer-clojure :exclude [deref merge parse-uuid parse-long parse-double parse-boolean type keys])
|
||||
#?(:cljs (:require-macros [app.common.schema :refer [ignoring]]))
|
||||
(:require
|
||||
#?(:clj [malli.dev.pretty :as mdp])
|
||||
#?(:clj [malli.dev.virhe :as v])
|
||||
[app.common.data :as d]
|
||||
[app.common.math :as mth]
|
||||
[app.common.pprint :as pp]
|
||||
@@ -21,6 +19,8 @@
|
||||
[clojure.core :as c]
|
||||
[cuerdas.core :as str]
|
||||
[malli.core :as m]
|
||||
[malli.dev.pretty :as mdp]
|
||||
[malli.dev.virhe :as v]
|
||||
[malli.error :as me]
|
||||
[malli.generator :as mg]
|
||||
[malli.registry :as mr]
|
||||
@@ -245,30 +245,27 @@
|
||||
:level (d/nilv level 8)
|
||||
:length (d/nilv length 12)})))))
|
||||
|
||||
#?(:clj
|
||||
(defmethod v/-format ::schemaless-explain
|
||||
[_ explanation printer]
|
||||
{:body [:group
|
||||
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
|
||||
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer)]}))
|
||||
(defmethod v/-format ::schemaless-explain
|
||||
[_ explanation printer]
|
||||
{:body [:group
|
||||
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
|
||||
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer)]})
|
||||
|
||||
#?(:clj
|
||||
(defmethod v/-format ::explain
|
||||
[_ {:keys [schema] :as explanation} printer]
|
||||
{:body [:group
|
||||
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
|
||||
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer) :break :break
|
||||
(v/-block "Schema" (v/-visit schema printer) printer)]}))
|
||||
(defmethod v/-format ::explain
|
||||
[_ {:keys [schema] :as explanation} printer]
|
||||
{:body [:group
|
||||
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
|
||||
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer) :break :break
|
||||
(v/-block "Schema" (v/-visit schema printer) printer)]})
|
||||
|
||||
#?(:clj
|
||||
(defn pretty-explain
|
||||
"A helper that allows print a console-friendly output for the explain;
|
||||
should not be used for other purposes"
|
||||
[explain & {:keys [variant message]
|
||||
:or {variant ::explain
|
||||
message "Validation Error"}}]
|
||||
(let [explain (fn [] (me/with-error-messages explain))]
|
||||
((mdp/prettifier variant message explain default-options)))))
|
||||
(defn pretty-explain
|
||||
"A helper that allows print a console-friendly output for the
|
||||
explain; should not be used for other purposes"
|
||||
[explain & {:keys [variant message]
|
||||
:or {variant ::explain
|
||||
message "Validation Error"}}]
|
||||
(let [explain (fn [] (me/with-error-messages explain))]
|
||||
((mdp/prettifier variant message explain default-options))))
|
||||
|
||||
(defmacro ignoring
|
||||
[expr]
|
||||
@@ -302,13 +299,6 @@
|
||||
::explain explain}))))
|
||||
value))))
|
||||
|
||||
(defn coercer
|
||||
[schema & {:as opts}]
|
||||
(let [decode-fn (decoder schema json-transformer)
|
||||
check-fn (check-fn schema opts)]
|
||||
(fn [data]
|
||||
(-> data decode-fn check-fn))))
|
||||
|
||||
(defn check
|
||||
"A helper intended to be used on assertions for validate/check the
|
||||
schema over provided data. Raises an assertion exception.
|
||||
@@ -852,6 +842,38 @@
|
||||
choices))]
|
||||
{:pred pred}))})
|
||||
|
||||
;; (register!
|
||||
;; {:type ::inst
|
||||
;; :pred tm/instant?
|
||||
;; :type-properties
|
||||
;; {:title "inst"
|
||||
;; :description "Satisfies Inst protocol"
|
||||
;; :error/message "should be an instant"
|
||||
;; :gen/gen (->> (sg/small-int :min 0 :max 100000)
|
||||
;; (sg/fmap (fn [v] (tm/parse-inst v))))
|
||||
|
||||
;; :decode/string tm/parse-inst
|
||||
;; :encode/string tm/format-inst
|
||||
;; :decode/json tm/parse-inst
|
||||
;; :encode/json tm/format-inst
|
||||
;; ::oapi/type "string"
|
||||
;; ::oapi/format "iso"}})
|
||||
|
||||
;; (register!
|
||||
;; {:type ::timestamp
|
||||
;; :pred tm/instant?
|
||||
;; :type-properties
|
||||
;; {:title "inst"
|
||||
;; :description "Satisfies Inst protocol, the same as ::inst but encodes to epoch"
|
||||
;; :error/message "should be an instant"
|
||||
;; :gen/gen (->> (sg/small-int)
|
||||
;; (sg/fmap (fn [v] (tm/parse-inst v))))
|
||||
;; :decode/string tm/parse-inst
|
||||
;; :encode/string inst-ms
|
||||
;; :decode/json tm/parse-inst
|
||||
;; :encode/json inst-ms
|
||||
;; ::oapi/type "string"
|
||||
;; ::oapi/format "number"}})
|
||||
|
||||
#?(:clj
|
||||
(register!
|
||||
@@ -929,7 +951,7 @@
|
||||
:pred #(and (string? %) (not (str/blank? %)))
|
||||
:property-pred
|
||||
(fn [{:keys [min max] :as props}]
|
||||
(if (or min max)
|
||||
(if (seq props)
|
||||
(fn [value]
|
||||
(let [size (count value)]
|
||||
(cond
|
||||
@@ -1003,9 +1025,6 @@
|
||||
(def valid-safe-number?
|
||||
(lazy-validator ::safe-number))
|
||||
|
||||
(def valid-safe-int?
|
||||
(lazy-validator ::safe-int))
|
||||
|
||||
(def valid-text?
|
||||
(validator ::text))
|
||||
|
||||
|
||||
@@ -1082,35 +1082,33 @@
|
||||
|
||||
detach-shape
|
||||
(fn [objects shape]
|
||||
(let [shape' (cond-> shape
|
||||
(not= file-id (:fill-color-ref-file shape))
|
||||
(dissoc :fill-color-ref-id :fill-color-ref-file)
|
||||
(l/debug :hint "detach-shape"
|
||||
:file-id file-id
|
||||
:component-ref-file (get-component-ref-file objects shape)
|
||||
::l/sync? true)
|
||||
(cond-> shape
|
||||
(not= file-id (:fill-color-ref-file shape))
|
||||
(dissoc :fill-color-ref-id :fill-color-ref-file)
|
||||
|
||||
(not= file-id (:stroke-color-ref-file shape))
|
||||
(dissoc :stroke-color-ref-id :stroke-color-ref-file)
|
||||
(not= file-id (:stroke-color-ref-file shape))
|
||||
(dissoc :stroke-color-ref-id :stroke-color-ref-file)
|
||||
|
||||
(not= file-id (get-component-ref-file objects shape))
|
||||
(dissoc :component-id :component-file :shape-ref :component-root)
|
||||
(not= file-id (get-component-ref-file objects shape))
|
||||
(dissoc :component-id :component-file :shape-ref :component-root)
|
||||
|
||||
(= :text (:type shape))
|
||||
(update :content detach-text))]
|
||||
|
||||
(when (not= shape shape')
|
||||
(l/dbg :hint "detach shape"
|
||||
:file-id (str file-id)
|
||||
:shape-id (str (:id shape))))
|
||||
|
||||
shape'))
|
||||
(= :text (:type shape))
|
||||
(update :content detach-text)))
|
||||
|
||||
detach-objects
|
||||
(fn [objects]
|
||||
(d/update-vals objects #(detach-shape objects %)))
|
||||
(update-vals objects #(detach-shape objects %)))
|
||||
|
||||
detach-pages
|
||||
(fn [pages-index]
|
||||
(d/update-vals pages-index #(update % :objects detach-objects)))]
|
||||
(update-vals pages-index #(update % :objects detach-objects)))]
|
||||
|
||||
(update-in file [:data :pages-index] detach-pages)))
|
||||
(-> file
|
||||
(update-in [:data :pages-index] detach-pages))))
|
||||
|
||||
;; Base font size
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.flags :as flags]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.types.color :as types.color]
|
||||
[app.common.types.fills.impl :as impl]
|
||||
[clojure.core :as c]
|
||||
@@ -50,19 +49,12 @@
|
||||
(= 1 (count result))))
|
||||
|
||||
(def schema:fill
|
||||
[:and schema:fill-attrs [:fn has-valid-fill-attrs?]])
|
||||
[:and schema:fill-attrs
|
||||
[:fn has-valid-fill-attrs?]])
|
||||
|
||||
(def check-fill
|
||||
(sm/check-fn schema:fill))
|
||||
|
||||
(def ^:private schema:fills-as-vector
|
||||
[:vector {:gen/max 2} schema:fill])
|
||||
|
||||
(def schema:fills
|
||||
[:or {:gen/gen (sg/generator schema:fills-as-vector)}
|
||||
schema:fills-as-vector
|
||||
[:fn impl/fills?]])
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; CONSTRUCTORS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -267,4 +267,3 @@
|
||||
(-> (stp/convert-to-path shape objects)
|
||||
(update :content impl/path-data))))
|
||||
|
||||
(dm/export impl/decode-segments)
|
||||
|
||||
@@ -565,9 +565,6 @@
|
||||
(def check-content
|
||||
(sm/check-fn schema:content))
|
||||
|
||||
(def decode-segments
|
||||
(sm/lazy-decoder schema:segments sm/json-transformer))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; CONSTRUCTORS & PREDICATES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.transit :as t]
|
||||
[app.common.types.color :as clr]
|
||||
[app.common.types.fills :refer [schema:fills fill->color]]
|
||||
[app.common.types.fills :refer [schema:fill fill->color]]
|
||||
[app.common.types.grid :as ctg]
|
||||
[app.common.types.path :as path]
|
||||
[app.common.types.plugins :as ctpg]
|
||||
@@ -192,7 +192,8 @@
|
||||
[:locked {:optional true} :boolean]
|
||||
[:hidden {:optional true} :boolean]
|
||||
[:masked-group {:optional true} :boolean]
|
||||
[:fills {:optional true} schema:fills]
|
||||
[:fills {:optional true}
|
||||
[:vector {:gen/max 2} schema:fill]]
|
||||
[:proportion {:optional true} ::sm/safe-number]
|
||||
[:proportion-lock {:optional true} :boolean]
|
||||
[:constraints-h {:optional true}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
(ns app.common.types.shape.text
|
||||
(:require
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.fills :refer [schema:fills]]))
|
||||
[app.common.types.fills :refer [schema:fill]]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SCHEMA
|
||||
@@ -32,7 +32,8 @@
|
||||
[:type [:= "paragraph"]]
|
||||
[:key {:optional true} :string]
|
||||
[:fills {:optional true}
|
||||
[:maybe schema:fills]]
|
||||
[:maybe
|
||||
[:vector {:gen/max 2} schema:fill]]]
|
||||
[:font-family {:optional true} ::sm/text]
|
||||
[:font-size {:optional true} ::sm/text]
|
||||
[:font-style {:optional true} ::sm/text]
|
||||
@@ -48,7 +49,8 @@
|
||||
[:text :string]
|
||||
[:key {:optional true} :string]
|
||||
[:fills {:optional true}
|
||||
[:maybe schema:fills]]
|
||||
[:maybe
|
||||
[:vector {:gen/max 2} schema:fill]]]
|
||||
[:font-family {:optional true} ::sm/text]
|
||||
[:font-size {:optional true} ::sm/text]
|
||||
[:font-style {:optional true} ::sm/text]
|
||||
@@ -69,7 +71,7 @@
|
||||
[:y ::sm/safe-number]
|
||||
[:width ::sm/safe-number]
|
||||
[:height ::sm/safe-number]
|
||||
[:fills schema:fills]
|
||||
[:fills [:vector {:gen/max 2} schema:fill]]
|
||||
[:font-family {:optional true} ::sm/text]
|
||||
[:font-size {:optional true} ::sm/text]
|
||||
[:font-style {:optional true} ::sm/text]
|
||||
|
||||
@@ -90,10 +90,6 @@
|
||||
[{:fill-color clr/black
|
||||
:fill-opacity 1}])
|
||||
|
||||
(def default-paragraph-attrs
|
||||
{:text-align "left"
|
||||
:text-direction "ltr"})
|
||||
|
||||
(def default-text-attrs
|
||||
{:font-id "sourcesanspro"
|
||||
:font-family "sourcesanspro"
|
||||
|
||||
@@ -266,6 +266,10 @@
|
||||
typography-token-keys
|
||||
#{:line-height}))
|
||||
|
||||
;; TODO: Created to extract the font-size feature from the typography feature flag.
|
||||
;; Delete this once the typography feature flag is removed.
|
||||
(def ff-typography-keys (set/difference typography-keys font-size-keys))
|
||||
|
||||
(def ^:private schema:number
|
||||
(-> (reduce mu/union [[:map [:line-height {:optional true} token-name-ref]]
|
||||
schema:rotation])
|
||||
|
||||
@@ -1575,10 +1575,10 @@ Will return a value that matches this schema:
|
||||
(if (map? shadow)
|
||||
(let [legacy-shadow-type (get "type" shadow)]
|
||||
(-> shadow
|
||||
(set/rename-keys {"x" :offset-x
|
||||
"offsetX" :offset-x
|
||||
"y" :offset-y
|
||||
"offsetY" :offset-y
|
||||
(set/rename-keys {"x" :offsetX
|
||||
"offsetX" :offsetX
|
||||
"y" :offsetY
|
||||
"offsetY" :offsetY
|
||||
"blur" :blur
|
||||
"spread" :spread
|
||||
"color" :color
|
||||
@@ -1589,7 +1589,7 @@ Will return a value that matches this schema:
|
||||
(= "false" %) false
|
||||
(= legacy-shadow-type "innerShadow") true
|
||||
:else false))
|
||||
(select-keys [:offset-x :offset-y :blur :spread :color :inset])))
|
||||
(select-keys [:offsetX :offsetY :blur :spread :color :inset])))
|
||||
shadow))]
|
||||
(cond
|
||||
;; Reference value - keep as string
|
||||
@@ -1679,7 +1679,7 @@ Will return a value that matches this schema:
|
||||
["id" {:optional true} :string]
|
||||
["name" :string]
|
||||
["description" :string]
|
||||
["isSource" {:optional true} :boolean]
|
||||
["isSource" :boolean]
|
||||
["selectedTokenSets"
|
||||
[:map-of :string [:enum "enabled" "disabled"]]]]]]
|
||||
["$metadata" {:optional true}
|
||||
@@ -1794,19 +1794,17 @@ Will return a value that matches this schema:
|
||||
data (without any case transformation). Used as schema decoder and
|
||||
in the SDK."
|
||||
[data]
|
||||
(if (instance? TokensLib data)
|
||||
data
|
||||
(let [data (if (string? data)
|
||||
(json/decode data :key-fn identity)
|
||||
data)
|
||||
data #?(:cljs (if (object? data)
|
||||
(json/->clj data :key-fn identity)
|
||||
data)
|
||||
:clj data)
|
||||
(let [data (if (string? data)
|
||||
(json/decode data :key-fn identity)
|
||||
data)
|
||||
data #?(:cljs (if (object? data)
|
||||
(json/->clj data :key-fn identity)
|
||||
data)
|
||||
:clj data)
|
||||
|
||||
data (decode-multi-set-dtcg-data data)]
|
||||
(-> (check-multi-set-dtcg-data data)
|
||||
(parse-multi-set-dtcg-json)))))
|
||||
data (decode-multi-set-dtcg-data data)]
|
||||
(-> (check-multi-set-dtcg-data data)
|
||||
(parse-multi-set-dtcg-json))))
|
||||
|
||||
(defn- parse-multi-set-legacy-json
|
||||
"Parse a decoded json file with multi sets in legacy format into a TokensLib."
|
||||
@@ -1860,8 +1858,8 @@ Will return a value that matches this schema:
|
||||
(mapv (fn [shadow]
|
||||
(if (map? shadow)
|
||||
(-> shadow
|
||||
(set/rename-keys {:offset-x "offsetX"
|
||||
:offset-y "offsetY"
|
||||
(set/rename-keys {:offsetX "offsetX"
|
||||
:offsetY "offsetY"
|
||||
:blur "blur"
|
||||
:spread "spread"
|
||||
:color "color"
|
||||
|
||||
@@ -311,22 +311,16 @@
|
||||
[variant]
|
||||
(cpn/merge-path-item (:name variant) (str/replace (:variant-name variant) #", " " / ")))
|
||||
|
||||
(def ^:private boolean-pairs
|
||||
[["on" "off"]
|
||||
["yes" "no"]
|
||||
["true" "false"]])
|
||||
|
||||
(defn find-boolean-pair
|
||||
"Given a vector, return a map that contains the boolean equivalency if the values match
|
||||
with any of the boolean pairs. Returns nil if none match."
|
||||
[[a b :as v]]
|
||||
(let [a' (-> a str/trim str/lower)
|
||||
b' (-> b str/trim str/lower)]
|
||||
"Given a vector, return the map from 'bool-values' that contains both as keys.
|
||||
Returns nil if none match."
|
||||
[v]
|
||||
(let [bool-values [{"on" true "off" false}
|
||||
{"yes" true "no" false}
|
||||
{"true" true "false" false}]]
|
||||
(when (= (count v) 2)
|
||||
(some (fn [[t f]]
|
||||
(cond (and (= a' t)
|
||||
(= b' f)) {a true b false}
|
||||
(and (= b' t)
|
||||
(= a' f)) {b true a false}
|
||||
:else nil))
|
||||
boolean-pairs))))
|
||||
(some (fn [b]
|
||||
(when (and (contains? b (first v))
|
||||
(contains? b (last v)))
|
||||
b))
|
||||
bool-values))))
|
||||
|
||||
@@ -1897,15 +1897,15 @@
|
||||
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-single")]
|
||||
(t/is (some? token))
|
||||
(t/is (= :shadow (:type token)))
|
||||
(t/is (= [{:offset-x "0", :offset-y "2px", :blur "4px", :spread "0", :color "#000", :inset false}]
|
||||
(t/is (= [{:offsetX "0", :offsetY "2px", :blur "4px", :spread "0", :color "#000", :inset false}]
|
||||
(:value token)))))
|
||||
|
||||
(t/testing "multiple shadow token"
|
||||
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-multiple")]
|
||||
(t/is (some? token))
|
||||
(t/is (= :shadow (:type token)))
|
||||
(t/is (= [{:offset-x "0", :offset-y "2px", :blur "4px", :spread "0", :color "#000", :inset true}
|
||||
{:offset-x "0", :offset-y "8px", :blur "16px", :spread "0", :color "#000", :inset true}]
|
||||
(t/is (= [{:offsetX "0", :offsetY "2px", :blur "4px", :spread "0", :color "#000", :inset true}
|
||||
{:offsetX "0", :offsetY "8px", :blur "16px", :spread "0", :color "#000", :inset true}]
|
||||
(:value token)))))
|
||||
|
||||
(t/testing "shadow token with reference"
|
||||
@@ -1918,7 +1918,7 @@
|
||||
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-with-type")]
|
||||
(t/is (some? token))
|
||||
(t/is (= :shadow (:type token)))
|
||||
(t/is (= [{:offset-x "0", :offset-y "4px", :blur "8px", :spread "0", :color "rgba(0,0,0,0.2)", :inset false}]
|
||||
(t/is (= [{:offsetX "0", :offsetY "4px", :blur "8px", :spread "0", :color "rgba(0,0,0,0.2)", :inset false}]
|
||||
(:value token)))))
|
||||
|
||||
(t/testing "shadow token with description"
|
||||
@@ -1937,14 +1937,14 @@
|
||||
(ctob/make-token
|
||||
{:name "shadow.single"
|
||||
:type :shadow
|
||||
:value [{:offset-x "0" :offset-y "2px" :blur "4px" :spread "0" :color "#0000001A"}]
|
||||
:value [{:offsetX "0" :offsetY "2px" :blur "4px" :spread "0" :color "#0000001A"}]
|
||||
:description "A single shadow"})
|
||||
"shadow.multiple"
|
||||
(ctob/make-token
|
||||
{:name "shadow.multiple"
|
||||
:type :shadow
|
||||
:value [{:offset-x "0" :offset-y "2px" :blur "4px" :spread "0" :color "#0000001A"}
|
||||
{:offset-x "0" :offset-y "8px" :blur "16px" :spread "0" :color "#0000001A"}]})
|
||||
:value [{:offsetX "0" :offsetY "2px" :blur "4px" :spread "0" :color "#0000001A"}
|
||||
{:offsetX "0" :offsetY "8px" :blur "16px" :spread "0" :color "#0000001A"}]})
|
||||
"shadow.ref"
|
||||
(ctob/make-token
|
||||
{:name "shadow.ref"
|
||||
@@ -1991,7 +1991,7 @@
|
||||
(ctob/make-token
|
||||
{:name "shadow.test"
|
||||
:type :shadow
|
||||
:value [{:offset-x "1" :offset-y "1" :blur "1" :spread "1" :color "red" :inset true}]
|
||||
:value [{:offsetX "1" :offsetY "1" :blur "1" :spread "1" :color "red" :inset true}]
|
||||
:description "Round trip test"})
|
||||
"shadow.ref"
|
||||
(ctob/make-token
|
||||
|
||||
@@ -163,11 +163,9 @@
|
||||
|
||||
(t/deftest find-boolean-pair
|
||||
(t/testing "find-boolean-pair"
|
||||
(t/is (= (ctv/find-boolean-pair ["off" "on"]) {"on" true "off" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["OfF" "oN"]) {"oN" true "OfF" false}))
|
||||
(t/is (= (ctv/find-boolean-pair [" ofF" "oN "]) {"oN " true " ofF" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["on" "off"]) {"on" true "off" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["yes" "no"]) {"yes" true "no" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["false" "true"]) {"true" true "false" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["off" "on"]) {"on" true "off" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["on" "off"]) {"on" true "off" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["off" "on" "other"]) nil))
|
||||
(t/is (= (ctv/find-boolean-pair ["hello" "bye"]) nil))))
|
||||
(t/is (= (ctv/find-boolean-pair ["yes" "no"]) {"yes" true "no" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["false" "true"]) {"true" true "false" false}))
|
||||
(t/is (= (ctv/find-boolean-pair ["hello" "bye"]) nil))))
|
||||
|
||||
@@ -25,6 +25,48 @@ RUN set -ex; \
|
||||
binutils \
|
||||
build-essential autoconf libtool pkg-config
|
||||
|
||||
|
||||
################################################################################
|
||||
## IMAGE MAGICK
|
||||
################################################################################
|
||||
|
||||
FROM base AS build-imagemagick
|
||||
|
||||
ENV IMAGEMAGICK_VERSION=7.1.1-47 \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -ex; \
|
||||
apt-get -qq update; \
|
||||
apt-get -qq upgrade; \
|
||||
apt-get -qqy --no-install-recommends install \
|
||||
libltdl-dev \
|
||||
libpng-dev \
|
||||
libjpeg-dev \
|
||||
libtiff-dev \
|
||||
libwebp-dev \
|
||||
libopenexr-dev \
|
||||
libfftw3-dev \
|
||||
libzip-dev \
|
||||
liblcms2-dev \
|
||||
liblzma-dev \
|
||||
libzstd-dev \
|
||||
libheif-dev \
|
||||
librsvg2-dev \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN set -eux; \
|
||||
curl -LfsSo /tmp/magick.tar.gz https://github.com/ImageMagick/ImageMagick/archive/refs/tags/${IMAGEMAGICK_VERSION}.tar.gz; \
|
||||
mkdir -p /tmp/magick; \
|
||||
cd /tmp/magick; \
|
||||
tar -xf /tmp/magick.tar.gz --strip-components=1; \
|
||||
./configure --prefix=/opt/imagick; \
|
||||
make -j 2; \
|
||||
make install; \
|
||||
rm -rf /opt/imagick/lib/libMagick++*; \
|
||||
rm -rf /opt/imagick/include; \
|
||||
rm -rf /opt/imagick/share;
|
||||
|
||||
################################################################################
|
||||
## NODE SETUP
|
||||
################################################################################
|
||||
@@ -59,45 +101,13 @@ RUN set -eux; \
|
||||
corepack enable; \
|
||||
rm -rf /tmp/nodejs.tar.gz;
|
||||
|
||||
|
||||
################################################################################
|
||||
## CADDYSERVER SETUP
|
||||
################################################################################
|
||||
|
||||
FROM base AS setup-caddy
|
||||
|
||||
ENV CADDY_VERSION=2.10.2
|
||||
|
||||
RUN set -eux; \
|
||||
ARCH="$(dpkg --print-architecture)"; \
|
||||
case "${ARCH}" in \
|
||||
aarch64|arm64) \
|
||||
BINARY_URL="https://github.com/caddyserver/caddy/releases/download/v${CADDY_VERSION}/caddy_${CADDY_VERSION}_linux_arm64.tar.gz"; \
|
||||
;; \
|
||||
amd64|x86_64) \
|
||||
BINARY_URL="https://github.com/caddyserver/caddy/releases/download/v${CADDY_VERSION}/caddy_${CADDY_VERSION}_linux_amd64.tar.gz"; \
|
||||
;; \
|
||||
*) \
|
||||
echo "Unsupported arch: ${ARCH}"; \
|
||||
exit 1; \
|
||||
;; \
|
||||
esac; \
|
||||
curl -LfsSo /tmp/caddy.tar.gz ${BINARY_URL}; \
|
||||
mkdir -p /tmp/caddy; \
|
||||
cd /tmp/caddy; \
|
||||
tar -xf /tmp/caddy.tar.gz; \
|
||||
chown -R root /tmp/caddy; \
|
||||
mv /tmp/caddy/caddy /usr/bin/; \
|
||||
rm -rf /tmp/caddy.tar.gz; \
|
||||
rm -rf /tmp/caddy;
|
||||
|
||||
################################################################################
|
||||
## JVM SETUP
|
||||
################################################################################
|
||||
|
||||
FROM base AS setup-jvm
|
||||
|
||||
ENV CLOJURE_VERSION=1.12.3.1577
|
||||
ENV CLOJURE_VERSION=1.12.2.1565
|
||||
|
||||
RUN set -eux; \
|
||||
ARCH="$(dpkg --print-architecture)"; \
|
||||
@@ -375,7 +385,7 @@ ENV LANG='C.UTF-8' \
|
||||
RUSTUP_HOME="/opt/rustup" \
|
||||
PATH="/opt/jdk/bin:/opt/utils/bin:/opt/clojure/bin:/opt/node/bin:/opt/imagick/bin:/opt/cargo/bin:$PATH"
|
||||
|
||||
COPY --from=penpotapp/imagemagick:7.1.2-0 /opt/imagick /opt/imagick
|
||||
COPY --from=build-imagemagick /opt/imagick /opt/imagick
|
||||
COPY --from=setup-jvm /opt/jdk /opt/jdk
|
||||
COPY --from=setup-jvm /opt/clojure /opt/clojure
|
||||
COPY --from=setup-node /opt/node /opt/node
|
||||
@@ -383,7 +393,6 @@ COPY --from=setup-utils /opt/utils /opt/utils
|
||||
COPY --from=setup-rust /opt/cargo /opt/cargo
|
||||
COPY --from=setup-rust /opt/rustup /opt/rustup
|
||||
COPY --from=setup-rust /opt/emsdk /opt/emsdk
|
||||
COPY --from=setup-caddy /usr/bin/caddy /usr/bin/caddy
|
||||
|
||||
COPY files/nginx.conf /etc/nginx/nginx.conf
|
||||
COPY files/nginx-mime.types /etc/nginx/mime.types
|
||||
@@ -394,7 +403,6 @@ COPY files/vimrc /root/.vimrc
|
||||
COPY files/tmux.conf /root/.tmux.conf
|
||||
COPY files/sudoers /etc/sudoers
|
||||
|
||||
COPY files/Caddyfile /home/
|
||||
COPY files/start-tmux.sh /home/start-tmux.sh
|
||||
COPY files/start-tmux-back.sh /home/start-tmux-back.sh
|
||||
COPY files/entrypoint.sh /home/entrypoint.sh
|
||||
|
||||
@@ -67,11 +67,6 @@ services:
|
||||
- PENPOT_LDAP_ATTRS_FULLNAME=cn
|
||||
- PENPOT_LDAP_ATTRS_PHOTO=jpegPhoto
|
||||
|
||||
networks:
|
||||
default:
|
||||
aliases:
|
||||
- main
|
||||
|
||||
minio:
|
||||
image: "minio/minio:RELEASE.2025-04-03T14-56-28Z"
|
||||
command: minio server /mnt/data --console-address ":9001"
|
||||
@@ -83,10 +78,9 @@ services:
|
||||
- MINIO_ROOT_USER=minioadmin
|
||||
- MINIO_ROOT_PASSWORD=minioadmin
|
||||
|
||||
networks:
|
||||
default:
|
||||
aliases:
|
||||
- minio
|
||||
ports:
|
||||
- 9000:9000
|
||||
- 9001:9001
|
||||
|
||||
postgres:
|
||||
image: postgres:16.8
|
||||
@@ -116,11 +110,6 @@ services:
|
||||
volumes:
|
||||
- "valkey_data:/data"
|
||||
|
||||
networks:
|
||||
default:
|
||||
aliases:
|
||||
- redis
|
||||
|
||||
mailer:
|
||||
image: sj26/mailcatcher:latest
|
||||
restart: always
|
||||
@@ -129,12 +118,6 @@ services:
|
||||
ports:
|
||||
- "1080:1080"
|
||||
|
||||
networks:
|
||||
default:
|
||||
aliases:
|
||||
- mailer
|
||||
|
||||
|
||||
# https://github.com/rroemhild/docker-test-openldap
|
||||
ldap:
|
||||
image: rroemhild/test-openldap:2.1
|
||||
@@ -148,9 +131,3 @@ services:
|
||||
nofile:
|
||||
soft: 1024
|
||||
hard: 1024
|
||||
|
||||
networks:
|
||||
default:
|
||||
aliases:
|
||||
- ldap
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
localhost:3449 {
|
||||
tls internal
|
||||
reverse_proxy localhost:4449
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
nginx;
|
||||
caddy run -c /home/Caddyfile;
|
||||
nginx
|
||||
tail -f /dev/null
|
||||
|
||||
@@ -12,7 +12,7 @@ http {
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 100;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
server_tokens off;
|
||||
|
||||
@@ -55,7 +55,7 @@ http {
|
||||
proxy_cache_key "$host$request_uri";
|
||||
|
||||
server {
|
||||
listen 4449 default_server;
|
||||
listen 3449 default_server;
|
||||
server_name _;
|
||||
|
||||
client_max_body_size 300M;
|
||||
@@ -141,10 +141,6 @@ http {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
}
|
||||
|
||||
location /nitrate/ {
|
||||
proxy_pass http://127.0.0.1:3000/;
|
||||
}
|
||||
|
||||
location /playground {
|
||||
alias /home/penpot/penpot/experiments/;
|
||||
add_header Cache-Control "no-cache, max-age=0";
|
||||
@@ -223,6 +219,16 @@ http {
|
||||
add_header X-Cache-Status $upstream_cache_status;
|
||||
}
|
||||
|
||||
location ~ ^/js/config.js$ {
|
||||
add_header Cache-Control "no-store, no-cache, max-age=0" always;
|
||||
}
|
||||
|
||||
location ~* \.(js|css|jpg|svg|png|mjs|map)$ {
|
||||
# We set no cache only on devenv
|
||||
add_header Cache-Control "no-store, no-cache, max-age=0" always;
|
||||
# add_header Cache-Control "max-age=604800" always; # 7 days
|
||||
}
|
||||
|
||||
location ~ ^/(/|css|fonts|images|js|wasm|mjs|map) {
|
||||
}
|
||||
|
||||
@@ -230,9 +236,9 @@ http {
|
||||
return 301 " /404";
|
||||
}
|
||||
|
||||
add_header Cache-Control "no-store";
|
||||
# This header is what we need to use on prod
|
||||
# add_header Cache-Control "public, must-revalidate, max-age=0";
|
||||
add_header Last-Modified $date_gmt;
|
||||
add_header Cache-Control "no-store, no-cache, max-age=0" always;
|
||||
if_modified_since off;
|
||||
try_files $uri /index.html$is_args$args /index.html =404;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
##
|
||||
## You can read more about all available flags and other
|
||||
## environment variables here:
|
||||
## https://help.penpot.app/technical-guide/configuration/#penpot-configuration
|
||||
## https://help.penpot.app/technical-guide/configuration/#advanced-configuration
|
||||
#
|
||||
# WARNING: if you're exposing Penpot to the internet, you should remove the flags
|
||||
# 'disable-secure-session-cookies' and 'disable-email-verification'
|
||||
@@ -37,15 +37,6 @@ x-body-size: &penpot-http-body-size
|
||||
# Max multipart body size (350MiB)
|
||||
PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE: 367001600
|
||||
|
||||
## Penpot SECRET KEY. It serves as a master key from which other keys for subsystems
|
||||
## (eg http sessions, or invitations) are derived.
|
||||
##
|
||||
## We recommend to use a trully randomly generated
|
||||
## 512 bits base64 encoded string here. You can generate one with:
|
||||
##
|
||||
## python3 -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||
x-secret-key: &penpot-secret-key
|
||||
PENPOT_SECRET_KEY: change-this-insecure-key
|
||||
|
||||
networks:
|
||||
penpot:
|
||||
@@ -54,6 +45,7 @@ volumes:
|
||||
penpot_postgres_v15:
|
||||
penpot_assets:
|
||||
# penpot_traefik:
|
||||
# penpot_minio:
|
||||
|
||||
services:
|
||||
## Traefik service declaration example. Consider using it if you are going to expose
|
||||
@@ -128,7 +120,20 @@ services:
|
||||
## Configuration envronment variables for the backend container.
|
||||
|
||||
environment:
|
||||
<< : [*penpot-flags, *penpot-public-uri, *penpot-http-body-size, *penpot-secret-key]
|
||||
<< : [*penpot-flags, *penpot-public-uri, *penpot-http-body-size]
|
||||
|
||||
## Penpot SECRET KEY. It serves as a master key from which other keys for subsystems
|
||||
## (eg http sessions, or invitations) are derived.
|
||||
##
|
||||
## If you leave it commented, all created sessions and invitations will
|
||||
## become invalid on container restart.
|
||||
##
|
||||
## If you going to uncomment this, we recommend to use a trully randomly generated
|
||||
## 512 bits base64 encoded string here. You can generate one with:
|
||||
##
|
||||
## python3 -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||
|
||||
# PENPOT_SECRET_KEY: my-insecure-key
|
||||
|
||||
## The PREPL host. Mainly used for external programatic access to penpot backend
|
||||
## (example: admin). By default it will listen on `localhost` but if you are going to use
|
||||
@@ -154,12 +159,13 @@ services:
|
||||
PENPOT_ASSETS_STORAGE_BACKEND: assets-fs
|
||||
PENPOT_STORAGE_ASSETS_FS_DIRECTORY: /opt/data/assets
|
||||
|
||||
## Also can be configured to to use a S3 compatible storage.
|
||||
## Also can be configured to to use a S3 compatible storage
|
||||
## service like MiniIO. Look below for minio service setup.
|
||||
|
||||
# AWS_ACCESS_KEY_ID: <KEY_ID>
|
||||
# AWS_SECRET_ACCESS_KEY: <ACCESS_KEY>
|
||||
# PENPOT_ASSETS_STORAGE_BACKEND: assets-s3
|
||||
# PENPOT_STORAGE_ASSETS_S3_ENDPOINT: <ENDPOINT>
|
||||
# PENPOT_STORAGE_ASSETS_S3_ENDPOINT: http://penpot-minio:9000
|
||||
# PENPOT_STORAGE_ASSETS_S3_BUCKET: <BUKET_NAME>
|
||||
|
||||
## Telemetry. When enabled, a periodical process will send anonymous data about this
|
||||
@@ -196,7 +202,6 @@ services:
|
||||
- penpot
|
||||
|
||||
environment:
|
||||
<< : [*penpot-secret-key]
|
||||
# Don't touch it; this uses an internal docker network to
|
||||
# communicate with the frontend.
|
||||
PENPOT_PUBLIC_URI: http://penpot-frontend:8080
|
||||
@@ -260,3 +265,22 @@ services:
|
||||
- "1080:1080"
|
||||
networks:
|
||||
- penpot
|
||||
|
||||
## Example configuration of MiniIO (S3 compatible object storage service); If you don't
|
||||
## have preference, then just use filesystem, this is here just for the completeness.
|
||||
|
||||
# minio:
|
||||
# image: "minio/minio:latest"
|
||||
# command: minio server /mnt/data --console-address ":9001"
|
||||
# restart: always
|
||||
#
|
||||
# volumes:
|
||||
# - "penpot_minio:/mnt/data"
|
||||
#
|
||||
# environment:
|
||||
# - MINIO_ROOT_USER=minioadmin
|
||||
# - MINIO_ROOT_PASSWORD=minioadmin
|
||||
#
|
||||
# ports:
|
||||
# - 9000:9000
|
||||
# - 9001:9001
|
||||
|
||||
@@ -120,7 +120,7 @@
|
||||
</ul>
|
||||
</div>
|
||||
<div class="footer-bottom">
|
||||
<div class="footer-text"><span>Kaleidos © 2025 | Made with LOVE and Open Source</span></div>
|
||||
<div class="footer-text"><span>Kaleidos © 2024 | Made with LOVE and Open Source</span></div>
|
||||
<div class="github-widget">
|
||||
<a class="github-link" href="https://github.com/penpot/penpot" rel="noopener" target="_blank" aria-label="Star penpot/penpot on GitHub">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 58.208 58.208" version="1.1">
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 16 KiB |
@@ -314,7 +314,7 @@ If you're using the official <code class="language-bash">docker-compose.yml</cod
|
||||
|
||||
## Email configuration
|
||||
|
||||
By default, <code class="language-bash">smtp</code> flag is disabled, the email will be
|
||||
By default, <code class="language-bash">smpt</code> flag is disabled, the email will be
|
||||
printed to the console, which means that the emails will be shown in the stdout.
|
||||
|
||||
Note that if you plan to invite members to a team, it is recommended that you enable SMTP
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
title: 3.07. Abstraction levels
|
||||
desc: "Penpot Technical Guide: organize data and logic in clear abstraction layers—ADTs, file ops, event-sourced changes, business rules, and data events."
|
||||
---
|
||||
|
||||
# Code organization in abstraction levels
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
title: 3.06. Backend Guide
|
||||
desc: "Penpot Technical Guide: Backend basics - REPL setup, loading fixtures, database migrations, and clj-kondo linting to speed development workflows."
|
||||
---
|
||||
|
||||
# Backend guide #
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
title: 1.2 Install with Elestio
|
||||
desc: "Step-by-step guide to deploy a self-hosted Penpot on Elestio: 3-minute setup, managed DNS/SMTP/SSL/backups, Docker Compose config, updates & support."
|
||||
---
|
||||
|
||||
# Install with Elestio
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
title: Design Tokens
|
||||
order: 5
|
||||
desc: Learn how to create, manage and apply Penpot Design Tokens using W3C DTCG format, with sets, themes, aliases, equations and JSON import/export.
|
||||
---
|
||||
|
||||
<h1 id="design-tokens">Design Tokens</h1>
|
||||
|
||||
@@ -107,25 +107,6 @@ desc: Streamline your design workflow with Penpot's Components guide! Learn to c
|
||||
<li>Select the variant copy, press right-click, and select the menu option <strong>Restore variant</strong> (will show if the main component still exists).</li>
|
||||
</ul>
|
||||
|
||||
<h3 id="component-variants-toggle">Toggle for boolean variants</h3>
|
||||
<p>When a variant property represents a boolean state, Penpot can display it as a toggle instead of a dropdown. This offers a quicker and more visual way to switch between two opposite values when working with copies.</p>
|
||||
<p>The toggle appears in place of the property values dropdown, <strong>only when a copy is selected</strong>.</p>
|
||||
<figure>
|
||||
<img src="/img/variants/07-variants-boolean.webp" alt="Boolean variant option" />
|
||||
</figure>
|
||||
<h4>Accepted value pairs</h4>
|
||||
<p>For Penpot to recognize the property as a boolean and display the toggle, the property must be defined with exactly two opposing values. These can be any of the following pairs:</p>
|
||||
<ul>
|
||||
<li><code>true</code> / <code>false</code></li>
|
||||
<li><code>on</code> / <code>off</code></li>
|
||||
<li><code>yes</code> / <code>no</code></li>
|
||||
</ul>
|
||||
<p>The order of the values does not matter. Penpot automatically maps them to ON and OFF states:</p>
|
||||
<ul>
|
||||
<li><strong>ON state:</strong> <code>true</code>, <code>yes</code>, <code>on</code></li>
|
||||
<li><strong>OFF state:</strong> <code>false</code>, <code>no</code>, <code>off</code></li>
|
||||
</ul>
|
||||
|
||||
<h3 id="component-use-variants">Use variants</h3>
|
||||
<p>Once you have created your variants, you can place a copy of a component with variants into your design and then switch between its different versions.</p>
|
||||
|
||||
|
||||
@@ -18,15 +18,4 @@ cp ../.yarnrc.yml target/;
|
||||
cp yarn.lock target/;
|
||||
cp package.json target/;
|
||||
|
||||
cat <<EOF | tee target/setup
|
||||
#/usr/bin/env bash
|
||||
set -e;
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install
|
||||
yarn run playwright install chromium;
|
||||
EOF
|
||||
|
||||
chmod +x target/setup;
|
||||
|
||||
sed -i -re "s/\%version\%/$CURRENT_VERSION/g" ./target/app.js;
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/../../backend/scripts/_env;
|
||||
|
||||
exec node target/app.js
|
||||
@@ -1,10 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/../../backend/scripts/_env;
|
||||
|
||||
bb -i '(babashka.wait/wait-for-port "localhost" 9630)';
|
||||
bb -i '(babashka.wait/wait-for-path "target/app.js")';
|
||||
sleep 2;
|
||||
|
||||
exec node target/app.js
|
||||
node target/app.js
|
||||
|
||||
@@ -7,9 +7,7 @@
|
||||
(ns app.config
|
||||
(:refer-clojure :exclude [get])
|
||||
(:require
|
||||
["node:buffer" :as buffer]
|
||||
["node:crypto" :as crypto]
|
||||
["node:process" :as process]
|
||||
["process" :as process]
|
||||
[app.common.data :as d]
|
||||
[app.common.flags :as flags]
|
||||
[app.common.schema :as sm]
|
||||
@@ -23,14 +21,13 @@
|
||||
:host "localhost"
|
||||
:http-server-port 6061
|
||||
:http-server-host "0.0.0.0"
|
||||
:tempdir "/tmp/penpot"
|
||||
:tempdir "/tmp/penpot-exporter"
|
||||
:redis-uri "redis://redis/0"})
|
||||
|
||||
(def ^:private schema:config
|
||||
(def ^:private
|
||||
schema:config
|
||||
[:map {:title "config"}
|
||||
[:secret-key :string]
|
||||
[:public-uri {:optional true} ::sm/uri]
|
||||
[:management-api-key {:optional true} :string]
|
||||
[:host {:optional true} :string]
|
||||
[:tenant {:optional true} :string]
|
||||
[:flags {:optional true} [::sm/set :keyword]]
|
||||
@@ -96,10 +93,3 @@
|
||||
(c/get config key))
|
||||
([key default]
|
||||
(c/get config key default)))
|
||||
|
||||
(def management-key
|
||||
(or (c/get config :management-api-key)
|
||||
(let [secret-key (c/get config :secret-key)
|
||||
derived-key (crypto/hkdfSync "blake2b512" secret-key, "management" "" 32)]
|
||||
(-> (.from buffer/Buffer derived-key)
|
||||
(.toString "base64url")))))
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
[app.common.spec :as us]
|
||||
[app.handlers.export-frames :as export-frames]
|
||||
[app.handlers.export-shapes :as export-shapes]
|
||||
[app.handlers.resources :as resources]
|
||||
[app.util.transit :as t]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]))
|
||||
@@ -53,7 +54,7 @@
|
||||
|
||||
:else
|
||||
(let [data {:type :server-error
|
||||
:code code
|
||||
:code type
|
||||
:hint (ex-message error)
|
||||
:data data}]
|
||||
(l/error :hint "unexpected internal error" :cause error)
|
||||
@@ -70,6 +71,7 @@
|
||||
|
||||
(defmethod command-spec :export-shapes [_] ::export-shapes/params)
|
||||
(defmethod command-spec :export-frames [_] ::export-frames/params)
|
||||
(defmethod command-spec :get-resource [_] (s/keys :req-un [::id]))
|
||||
|
||||
(s/def ::params
|
||||
(s/and (s/keys :req-un [::cmd]
|
||||
@@ -81,6 +83,7 @@
|
||||
(let [{:keys [cmd] :as params} (us/conform ::params params)]
|
||||
(l/debug :hint "process-request" :cmd cmd)
|
||||
(case cmd
|
||||
:get-resource (resources/handler exchange)
|
||||
:export-shapes (export-shapes/handler exchange params)
|
||||
:export-frames (export-frames/handler exchange params)
|
||||
(ex/raise :type :internal
|
||||
|
||||
@@ -43,78 +43,90 @@
|
||||
;; datastructure preparation uses it for creating the groups.
|
||||
(let [exports (-> (map #(assoc % :type :pdf :scale 1 :suffix "") exports)
|
||||
(prepare-exports auth-token))]
|
||||
|
||||
(handle-export exchange (assoc params :exports exports))))
|
||||
|
||||
(defn handle-export
|
||||
[{:keys [:request/auth-token] :as exchange} {:keys [exports name profile-id] :as params}]
|
||||
(let [topic (str profile-id)
|
||||
file-id (-> exports first :file-id)
|
||||
[exchange {:keys [exports wait name profile-id] :as params}]
|
||||
(let [total (count exports)
|
||||
topic (str profile-id)
|
||||
resource (rsc/create :pdf (or name (-> exports first :name)))
|
||||
|
||||
resource
|
||||
(rsc/create :pdf (or name (-> exports first :name)))
|
||||
on-progress (fn [{:keys [done]}]
|
||||
(when-not wait
|
||||
(let [data {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:name (:name resource)
|
||||
:filename (:filename resource)
|
||||
:status "running"
|
||||
:total total
|
||||
:done done}]
|
||||
(redis/pub! topic data))))
|
||||
|
||||
on-progress
|
||||
(fn [done]
|
||||
(let [data {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:status "running"
|
||||
:done done}]
|
||||
(redis/pub! topic data)))
|
||||
on-complete (fn []
|
||||
(when-not wait
|
||||
(let [data {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:name (:name resource)
|
||||
:filename (:filename resource)
|
||||
:status "ended"}]
|
||||
(redis/pub! topic data))))
|
||||
|
||||
on-complete
|
||||
(fn [resource]
|
||||
(let [data {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:resource-uri (:uri resource)
|
||||
:name (:name resource)
|
||||
:filename (:filename resource)
|
||||
:mtype (:mtype resource)
|
||||
:status "ended"}]
|
||||
(redis/pub! topic data)))
|
||||
on-error (fn [cause]
|
||||
(l/error :hint "unexpected error on frames exportation" :cause cause)
|
||||
(if wait
|
||||
(p/rejected cause)
|
||||
(let [data {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:name (:name resource)
|
||||
:filename (:filename resource)
|
||||
:status "error"
|
||||
:cause (ex-message cause)}]
|
||||
(redis/pub! topic data))))
|
||||
|
||||
on-error
|
||||
(fn [cause]
|
||||
(l/error :hint "unexpected error on frames exportation" :cause cause)
|
||||
(let [data {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:name (:name resource)
|
||||
:filename (:filename resource)
|
||||
:status "error"
|
||||
:cause (ex-message cause)}]
|
||||
(redis/pub! topic data)))
|
||||
proc (create-pdf :resource resource
|
||||
:exports exports
|
||||
:on-progress on-progress
|
||||
:on-complete on-complete
|
||||
:on-error on-error)]
|
||||
(if wait
|
||||
(p/then proc #(assoc exchange :response/body (dissoc % :path)))
|
||||
(assoc exchange :response/body (dissoc resource :path)))))
|
||||
|
||||
result-cache
|
||||
(atom [])
|
||||
(defn create-pdf
|
||||
[& {:keys [resource exports on-progress on-complete on-error]
|
||||
:or {on-progress (constantly nil)
|
||||
on-complete (constantly nil)
|
||||
on-error p/rejected}}]
|
||||
|
||||
(let [file-id (-> exports first :file-id)
|
||||
result (atom [])
|
||||
|
||||
on-object
|
||||
(fn [{:keys [path] :as object}]
|
||||
(let [res (swap! result-cache conj path)]
|
||||
(on-progress (count res))))
|
||||
(let [res (swap! result conj path)]
|
||||
(on-progress {:done (count res)})))]
|
||||
|
||||
procs
|
||||
(->> (seq exports)
|
||||
(map #(rd/render % on-object)))]
|
||||
(-> (p/loop [exports (seq exports)]
|
||||
(when-let [export (first exports)]
|
||||
(p/do
|
||||
(rd/render export on-object)
|
||||
(p/recur (rest exports)))))
|
||||
|
||||
(->> (p/all procs)
|
||||
(p/fmap (fn [] @result-cache))
|
||||
(p/mcat (partial join-pdf file-id))
|
||||
(p/mcat (partial move-file resource))
|
||||
(p/fmap (constantly resource))
|
||||
(p/mcat (partial rsc/upload-resource auth-token))
|
||||
(p/mcat (fn [resource]
|
||||
(->> (sh/stat (:path resource))
|
||||
(p/fmap #(merge resource %)))))
|
||||
(p/merr on-error)
|
||||
(p/fnly (fn [resource cause]
|
||||
(when-not cause
|
||||
(on-complete resource)))))
|
||||
|
||||
(assoc exchange :response/body (dissoc resource :path))))
|
||||
(p/then (fn [_] (deref result)))
|
||||
(p/then (partial join-pdf file-id))
|
||||
(p/then (partial move-file resource))
|
||||
(p/then (constantly resource))
|
||||
(p/then (fn [resource]
|
||||
(-> (sh/stat (:path resource))
|
||||
(p/then #(merge resource %)))))
|
||||
(p/catch on-error)
|
||||
(p/finally (fn [_ cause]
|
||||
(when-not cause
|
||||
(on-complete)))))))
|
||||
|
||||
(defn- join-pdf
|
||||
[file-id paths]
|
||||
(p/let [prefix (str/concat "penpot.pdfunite." file-id ".")
|
||||
(p/let [prefix (str/concat "penpot.tmp.pdfunite." file-id ".")
|
||||
path (sh/tempfile :prefix prefix :suffix ".pdf")]
|
||||
(sh/run-cmd! (str "pdfunite " (str/join " " paths) " " path))
|
||||
path))
|
||||
|
||||
@@ -60,26 +60,46 @@
|
||||
(handle-multiple-export exchange (assoc params :exports exports)))))
|
||||
|
||||
(defn- handle-single-export
|
||||
[{:keys [:request/auth-token] :as exchange} {:keys [export name skip-children] :as params}]
|
||||
(let [resource (rsc/create (:type export) (or name (:name export)))
|
||||
export (assoc export :skip-children skip-children)]
|
||||
[exchange {:keys [export wait profile-id name skip-children] :as params}]
|
||||
(let [topic (str profile-id)
|
||||
resource (rsc/create (:type export) (or name (:name export)))
|
||||
|
||||
(->> (rd/render export
|
||||
(fn [{:keys [path] :as object}]
|
||||
(sh/move! path (:path resource))))
|
||||
(p/fmap (constantly resource))
|
||||
(p/mcat (partial rsc/upload-resource auth-token))
|
||||
(p/fmap (fn [resource]
|
||||
(dissoc resource :path)))
|
||||
(p/fmap (fn [resource]
|
||||
(assoc exchange :response/body resource)))
|
||||
(p/merr (fn [cause]
|
||||
(l/error :hint "unexpected error on export multiple"
|
||||
:cause cause)
|
||||
(p/rejected cause))))))
|
||||
on-progress (fn [{:keys [path] :as object}]
|
||||
(p/do
|
||||
;; Move the generated path to the resource
|
||||
;; path destination.
|
||||
(sh/move! path (:path resource))
|
||||
|
||||
(when-not wait
|
||||
(redis/pub! topic {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:status "running"
|
||||
:total 1
|
||||
:done 1})
|
||||
(redis/pub! topic {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:filename (:filename resource)
|
||||
:name (:name resource)
|
||||
:status "ended"}))))
|
||||
on-error (fn [cause]
|
||||
(l/error :hint "unexpected error on export multiple"
|
||||
:cause cause)
|
||||
(if wait
|
||||
(p/rejected cause)
|
||||
(redis/pub! topic {:type :export-update
|
||||
:resource-id (:id resource)
|
||||
:status "error"
|
||||
:cause (ex-message cause)})))
|
||||
export (assoc export :skip-children skip-children)
|
||||
proc (-> (rd/render export on-progress)
|
||||
(p/then (constantly resource))
|
||||
(p/catch on-error))]
|
||||
(if wait
|
||||
(p/then proc #(assoc exchange :response/body (dissoc % :path)))
|
||||
(assoc exchange :response/body (dissoc resource :path)))))
|
||||
|
||||
(defn- handle-multiple-export
|
||||
[{:keys [:request/auth-token] :as exchange} {:keys [exports wait profile-id name] :as params}]
|
||||
[exchange {:keys [exports wait profile-id name skip-children] :as params}]
|
||||
(let [resource (rsc/create :zip (or name (-> exports first :name)))
|
||||
total (count exports)
|
||||
topic (str profile-id)
|
||||
@@ -93,6 +113,15 @@
|
||||
:done done}]
|
||||
(redis/pub! topic data))))
|
||||
|
||||
on-complete (fn []
|
||||
(when-not wait
|
||||
(let [data {:type :export-update
|
||||
:name (:name resource)
|
||||
:filename (:filename resource)
|
||||
:resource-id (:id resource)
|
||||
:status "ended"}]
|
||||
(redis/pub! topic data))))
|
||||
|
||||
on-error (fn [cause]
|
||||
(l/error :hint "unexpected error on multiple exportation" :cause cause)
|
||||
(if wait
|
||||
@@ -103,35 +132,30 @@
|
||||
:cause (ex-message cause)})))
|
||||
|
||||
zip (rsc/create-zip :resource resource
|
||||
:on-complete on-complete
|
||||
:on-error on-error
|
||||
:on-progress on-progress)
|
||||
|
||||
append (fn [{:keys [filename path] :as resource}]
|
||||
append (fn [{:keys [filename path] :as object}]
|
||||
(rsc/add-to-zip! zip path (str/replace filename sanitize-file-regex "_")))
|
||||
|
||||
proc (->> exports
|
||||
(map (fn [export] (rd/render export append)))
|
||||
(p/all)
|
||||
(p/fnly (fn [_] (.finalize zip)))
|
||||
(p/fmap (constantly resource))
|
||||
(p/mcat (partial rsc/upload-resource auth-token))
|
||||
(p/fmap (fn [resource]
|
||||
(let [data {:type :export-update
|
||||
:name (:name resource)
|
||||
:filename (:filename resource)
|
||||
:resource-id (:id resource)
|
||||
:resource-uri (:uri resource)
|
||||
:mtype (:mtype resource)
|
||||
:status "ended"}]
|
||||
(p/do (redis/pub! topic data)
|
||||
(assoc exchange :response/body resource)))))
|
||||
(p/merr on-error))]
|
||||
proc (-> (p/do
|
||||
(p/loop [exports (seq exports)]
|
||||
(when-let [export (some-> (first exports)
|
||||
(assoc :skip-children skip-children))]
|
||||
(p/do
|
||||
(rd/render export append)
|
||||
(p/recur (rest exports)))))
|
||||
(.finalize zip))
|
||||
(p/then (constantly resource))
|
||||
(p/catch on-error))]
|
||||
(if wait
|
||||
(p/then proc #(assoc exchange :response/body (dissoc % :path)))
|
||||
(assoc exchange :response/body (dissoc resource :path)))))
|
||||
|
||||
|
||||
(defn- assoc-file-name
|
||||
"A transducer that assocs a candidate filename and avoid duplicates"
|
||||
"A transducer that assocs a candidate filename and avoid duplicates."
|
||||
[]
|
||||
(letfn [(find-candidate [params used]
|
||||
(loop [index 0]
|
||||
|
||||
@@ -9,13 +9,9 @@
|
||||
(:require
|
||||
["archiver$default" :as arc]
|
||||
["node:fs" :as fs]
|
||||
["node:fs/promises" :as fsp]
|
||||
["node:path" :as path]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uri :as u]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.util.mime :as mime]
|
||||
[app.util.shell :as sh]
|
||||
[cljs.core :as c]
|
||||
@@ -29,20 +25,44 @@
|
||||
(defn create
|
||||
"Generates ephimeral resource object."
|
||||
[type name]
|
||||
(let [task-id (uuid/next)
|
||||
path (-> (get-path type task-id)
|
||||
(sh/schedule-deletion))]
|
||||
{:path path
|
||||
(let [task-id (uuid/next)]
|
||||
{:path (get-path type task-id)
|
||||
:mtype (mime/get type)
|
||||
:name name
|
||||
:filename (str/concat name (mime/get-extension type))
|
||||
:id task-id}))
|
||||
:id (str/concat (c/name type) "." task-id)}))
|
||||
|
||||
(defn- lookup
|
||||
[id]
|
||||
(p/let [[type task-id] (str/split id "." 2)
|
||||
path (get-path type task-id)
|
||||
mtype (mime/get (keyword type))
|
||||
stat (sh/stat path)]
|
||||
|
||||
(when-not stat
|
||||
(ex/raise :type :not-found))
|
||||
|
||||
{:stream (fs/createReadStream path)
|
||||
:headers {"content-type" mtype
|
||||
"content-length" (:size stat)}}))
|
||||
|
||||
(defn handler
|
||||
[{:keys [:request/params] :as exchange}]
|
||||
(when-not (contains? params :id)
|
||||
(ex/raise :type :validation
|
||||
:code :missing-id))
|
||||
|
||||
(-> (lookup (get params :id))
|
||||
(p/then (fn [{:keys [stream headers] :as resource}]
|
||||
(-> exchange
|
||||
(assoc :response/status 200)
|
||||
(assoc :response/body stream)
|
||||
(assoc :response/headers headers))))))
|
||||
|
||||
(defn create-zip
|
||||
[& {:keys [resource on-complete on-progress on-error]}]
|
||||
(let [^js zip (arc/create "zip")
|
||||
^js out (fs/createWriteStream (:path resource))
|
||||
on-complete (or on-complete (constantly nil))
|
||||
progress (atom 0)]
|
||||
(.on zip "error" on-error)
|
||||
(.on zip "end" on-complete)
|
||||
@@ -60,29 +80,3 @@
|
||||
(defn close-zip!
|
||||
[zip]
|
||||
(.finalize ^js zip))
|
||||
|
||||
(defn upload-resource
|
||||
[auth-token resource]
|
||||
(->> (fsp/readFile (:path resource))
|
||||
(p/fmap (fn [buffer]
|
||||
(new js/Blob #js [buffer] #js {:type (:mtype resource)})))
|
||||
(p/mcat (fn [blob]
|
||||
(let [fdata (new js/FormData)
|
||||
uri (-> (cf/get :public-uri)
|
||||
(u/ensure-path-slash)
|
||||
(u/join "api/management/methods/upload-tempfile")
|
||||
(str))]
|
||||
(.append fdata "content" blob (:filename resource))
|
||||
(js/fetch uri #js {:headers #js {"X-Shared-Key" cf/management-key
|
||||
"Authorization" (str "Bearer " auth-token)}
|
||||
:method "POST"
|
||||
:body fdata}))))
|
||||
(p/mcat (fn [response]
|
||||
(if (not= (.-status response) 200)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-upload-resource
|
||||
:response-status (.-status response))
|
||||
(.text response))))
|
||||
(p/fmap t/decode-str)
|
||||
(p/fmap (fn [result]
|
||||
(merge resource (dissoc result :id))))))
|
||||
|
||||
@@ -29,13 +29,13 @@
|
||||
:userAgent bw/default-user-agent})
|
||||
|
||||
(render-object [page {:keys [id] :as object}]
|
||||
(p/let [path (sh/tempfile :prefix "penpot.tmp.bitmap." :suffix (mime/get-extension type))
|
||||
(p/let [path (sh/tempfile :prefix "penpot.tmp.render.bitmap." :suffix (mime/get-extension type))
|
||||
node (bw/select page (str/concat "#screenshot-" id))]
|
||||
(bw/wait-for node)
|
||||
(case type
|
||||
:png (bw/screenshot node {:omit-background? true :type type :path path})
|
||||
:jpeg (bw/screenshot node {:omit-background? false :type type :path path})
|
||||
:webp (p/let [png-path (sh/tempfile :prefix "penpot.tmp.bitmap." :suffix ".png")]
|
||||
:webp (p/let [png-path (sh/tempfile :prefix "penpot.tmp.render.bitmap." :suffix ".png")]
|
||||
;; playwright only supports jpg and png, we need to convert it afterwards
|
||||
(bw/screenshot node {:omit-background? true :type :png :path png-path})
|
||||
(sh/run-cmd! (str "convert " png-path " -quality 100 WEBP:" path))))
|
||||
@@ -52,7 +52,6 @@
|
||||
;; take the screnshot of requested objects, one by one
|
||||
(p/run (partial render-object page) objects)
|
||||
nil))]
|
||||
|
||||
(p/let [params {:file-id file-id
|
||||
:page-id page-id
|
||||
:share-id share-id
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
|
||||
(render-object [page base-uri {:keys [id] :as object}]
|
||||
(p/let [uri (prepare-uri base-uri id)
|
||||
path (sh/tempfile :prefix "penpot.tmp.pdf." :suffix (mime/get-extension type))]
|
||||
path (sh/tempfile :prefix "penpot.tmp.render.pdf." :suffix (mime/get-extension type))]
|
||||
(l/info :uri uri)
|
||||
(bw/nav! page uri)
|
||||
(p/let [dom (bw/select page (dm/str "#screenshot-" id))]
|
||||
|
||||
@@ -7,12 +7,11 @@
|
||||
(ns app.util.shell
|
||||
"Shell & FS utilities."
|
||||
(:require
|
||||
["node:child_process" :as proc]
|
||||
["node:fs" :as fs]
|
||||
["node:path" :as path]
|
||||
["child_process" :as proc]
|
||||
["fs" :as fs]
|
||||
["path" :as path]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[cuerdas.core :as str]
|
||||
@@ -20,8 +19,7 @@
|
||||
|
||||
(l/set-level! :trace)
|
||||
|
||||
(def ^:const default-deletion-delay
|
||||
(* 60 60 1)) ;; 1h
|
||||
(def tempfile-minage (* 1000 60 60 1)) ;; 1h
|
||||
|
||||
(def tmpdir
|
||||
(let [path (cf/get :tempdir)]
|
||||
@@ -30,28 +28,16 @@
|
||||
(fs/mkdirSync path #js {:recursive true}))
|
||||
path))
|
||||
|
||||
(defn schedule-deletion
|
||||
([path] (schedule-deletion path default-deletion-delay))
|
||||
([path delay]
|
||||
(let [remove-path
|
||||
(fn []
|
||||
(try
|
||||
(when (fs/existsSync path)
|
||||
(fs/rmSync path #js {:recursive true})
|
||||
(l/trc :hint "tempfile permanently deleted" :path path))
|
||||
(catch :default cause
|
||||
(l/err :hint "error on deleting temporal file"
|
||||
:path path
|
||||
:cause cause))))
|
||||
scheduled-at
|
||||
(-> (ct/now) (ct/plus #js {:seconds delay}))]
|
||||
|
||||
(l/trc :hint "schedule tempfile deletion"
|
||||
:path path
|
||||
:scheduled-at (ct/format-inst scheduled-at))
|
||||
|
||||
(js/setTimeout remove-path (* delay 1000))
|
||||
path)))
|
||||
(defn- schedule-deletion!
|
||||
[path]
|
||||
(letfn [(remote-tempfile []
|
||||
(when (fs/existsSync path)
|
||||
(l/trace :hint "permanently remove tempfile" :path path)
|
||||
(fs/rmSync path #js {:recursive true})))]
|
||||
(l/trace :hint "schedule tempfile deletion"
|
||||
:path path
|
||||
:scheduled-at (.. (js/Date. (+ (js/Date.now) tempfile-minage)) toString))
|
||||
(js/setTimeout remote-tempfile tempfile-minage)))
|
||||
|
||||
(defn tempfile
|
||||
[& {:keys [prefix suffix]
|
||||
@@ -62,7 +48,9 @@
|
||||
(let [path (path/join tmpdir (str/concat prefix (uuid/next) "-" i suffix))]
|
||||
(if (fs/existsSync path)
|
||||
(recur (inc i))
|
||||
(schedule-deletion path)))
|
||||
(do
|
||||
(schedule-deletion! path)
|
||||
path)))
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-locate-temporal-file
|
||||
:hint "unable to find a tempfile candidate"))))
|
||||
@@ -73,12 +61,11 @@
|
||||
|
||||
(defn stat
|
||||
[path]
|
||||
(->> (.stat fs/promises path)
|
||||
(p/fmap (fn [data]
|
||||
{:created-at (inst-ms (.-ctime ^js data))
|
||||
:size (.-size data)}))
|
||||
(p/merr (fn [_cause]
|
||||
(p/resolved nil)))))
|
||||
(-> (.stat fs/promises path)
|
||||
(p/then (fn [data]
|
||||
{:created-at (inst-ms (.-ctime ^js data))
|
||||
:size (.-size data)}))
|
||||
(p/catch (constantly nil))))
|
||||
|
||||
(defn rmdir!
|
||||
[path]
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
:git/url "https://github.com/funcool/beicon.git"}
|
||||
|
||||
funcool/rumext
|
||||
{:git/tag "v2.25"
|
||||
:git/sha "27e5a1a"
|
||||
{:git/tag "v2.24"
|
||||
:git/sha "17a0c94"
|
||||
:git/url "https://github.com/funcool/rumext.git"}
|
||||
|
||||
instaparse/instaparse {:mvn/version "1.5.0"}
|
||||
@@ -42,7 +42,7 @@
|
||||
:dev
|
||||
{:extra-paths ["dev"]
|
||||
:extra-deps
|
||||
{thheller/shadow-cljs {:mvn/version "3.2.2"}
|
||||
{thheller/shadow-cljs {:mvn/version "3.2.0"}
|
||||
com.bhauman/rebel-readline {:mvn/version "RELEASE"}
|
||||
org.clojure/tools.namespace {:mvn/version "RELEASE"}
|
||||
criterium/criterium {:mvn/version "RELEASE"}
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
"build:storybook:cljs": "clojure -M:dev:shadow-cljs compile storybook",
|
||||
"build:app:libs": "node ./scripts/build-libs.js",
|
||||
"build:app:main": "clojure -M:dev:shadow-cljs release main worker",
|
||||
"build:app:worker": "clojure -M:dev:shadow-cljs release worker",
|
||||
"build:app": "yarn run clear:shadow-cache && yarn run build:app:main && yarn run build:app:libs",
|
||||
"e2e:server": "node ./scripts/e2e-server.js",
|
||||
"fmt:clj": "cljfmt fix --parallel=true src/ test/",
|
||||
@@ -83,7 +82,7 @@
|
||||
"nodemon": "^3.1.10",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"p-limit": "^6.2.0",
|
||||
"playwright": "1.56.1",
|
||||
"playwright": "1.52.0",
|
||||
"postcss": "^8.5.4",
|
||||
"postcss-clean": "^1.2.2",
|
||||
"prettier": "3.5.3",
|
||||
|
||||
@@ -11,7 +11,6 @@ import { defineConfig, devices } from "@playwright/test";
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: "./playwright",
|
||||
outputDir: './test-results',
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
@@ -21,19 +20,20 @@ export default defineConfig({
|
||||
/* Opt out of parallel tests by default; can be overriden with --workers */
|
||||
workers: 1,
|
||||
/* Timeout for expects (longer in CI) */
|
||||
|
||||
timeout: 80000,
|
||||
expect: {
|
||||
timeout: process.env.CI ? 40000 : 5000,
|
||||
timeout: process.env.CI ? 20000 : 5000,
|
||||
},
|
||||
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: "list",
|
||||
reporter: "html",
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
baseURL: "http://localhost:3000",
|
||||
|
||||
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||
trace: "on-first-retry",
|
||||
|
||||
locale: "en-US",
|
||||
|
||||
permissions: ["clipboard-write", "clipboard-read"],
|
||||
@@ -45,10 +45,6 @@ export default defineConfig({
|
||||
name: "default",
|
||||
use: { ...devices["Desktop Chrome"] },
|
||||
testDir: "./playwright/ui/specs",
|
||||
use: {
|
||||
video: 'retain-on-failure',
|
||||
trace: 'retain-on-failure',
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "ds",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -887,4 +887,4 @@
|
||||
"~:base-font-size": "16px"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"~:id": "~ue179d9df-de35-80bf-8005-2861e849b3f7",
|
||||
"~:file-id": "~ue179d9df-de35-80bf-8005-283bbd5516b0",
|
||||
"~:created-at": "~m1729604566293",
|
||||
"~:data": {
|
||||
"~u6ad3e6b9-c5a0-80cf-8005-283bbe38dba8": {
|
||||
"~:id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe38dba8",
|
||||
"~:name": "F",
|
||||
"~:path": "",
|
||||
"~:modified-at": "~m1729604566311",
|
||||
"~:main-instance-id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe378bcc",
|
||||
"~:main-instance-page": "~ue179d9df-de35-80bf-8005-283bbd5516b1"
|
||||
},
|
||||
"~u6ad3e6b9-c5a0-80cf-8005-283bbe39bb51": {
|
||||
"~:id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe39bb51",
|
||||
"~:name": "E",
|
||||
"~:path": "",
|
||||
"~:modified-at": "~m1729604566311",
|
||||
"~:main-instance-id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe378bcd",
|
||||
"~:main-instance-page": "~ue179d9df-de35-80bf-8005-283bbd5516b1"
|
||||
},
|
||||
"~u6ad3e6b9-c5a0-80cf-8005-283bbe3a9014": {
|
||||
"~:id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe3a9014",
|
||||
"~:name": "C",
|
||||
"~:path": "",
|
||||
"~:modified-at": "~m1729604566311",
|
||||
"~:main-instance-id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe378bcf",
|
||||
"~:main-instance-page": "~ue179d9df-de35-80bf-8005-283bbd5516b1"
|
||||
},
|
||||
"~u6ad3e6b9-c5a0-80cf-8005-283bbe3b1793": {
|
||||
"~:id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe3b1793",
|
||||
"~:name": "B",
|
||||
"~:path": "",
|
||||
"~:modified-at": "~m1729604566311",
|
||||
"~:main-instance-id": "~u6ad3e6b9-c5a0-80cf-8005-283bbe378bd0",
|
||||
"~:main-instance-page": "~ue179d9df-de35-80bf-8005-283bbd5516b1"
|
||||
}
|
||||
}
|
||||
}
|
||||
1867
frontend/playwright/data/workspace/get-file-fragment-9066-2.json
Normal file
1867
frontend/playwright/data/workspace/get-file-fragment-9066-2.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5947,8 +5947,8 @@
|
||||
"~:spread": "10",
|
||||
"~:color": "rgb(160, 73, 73)",
|
||||
"~:inset": true,
|
||||
"~:offset-x": "10",
|
||||
"~:offset-y": "10"
|
||||
"~:offsetX": "10",
|
||||
"~:offsetY": "10"
|
||||
}
|
||||
],
|
||||
"~:description": "",
|
||||
|
||||
@@ -5,6 +5,6 @@
|
||||
"~:revn": 2,
|
||||
"~:created-at": "~m1730199694953",
|
||||
"~:created-by": "user",
|
||||
"~:profile-id": "~uc7ce0794-0992-8105-8004-38e630f29a9b"
|
||||
"~:profile-id": "~u4678a621-b446-818a-8004-e7b734def799"
|
||||
}
|
||||
]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user