Compare commits

..

2 Commits

Author SHA1 Message Date
Andrey Antukh
347a758831 WIP 2025-12-11 12:22:28 +01:00
Andrey Antukh
4aea00a76f WIP 2025-12-11 12:16:39 +01:00
967 changed files with 71220 additions and 127958 deletions

305
.circleci/config.yml Normal file
View File

@@ -0,0 +1,305 @@
version: 2.1
jobs:
lint:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
steps:
- checkout
- run:
name: "fmt check"
working_directory: "."
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "lint clj common"
working_directory: "."
command: |
yarn run lint:clj:common
- run:
name: "lint clj frontend"
working_directory: "."
command: |
yarn run lint:clj:frontend
- run:
name: "lint clj backend"
working_directory: "."
command: |
yarn run lint:clj:backend
- run:
name: "lint clj exporter"
working_directory: "."
command: |
yarn run lint:clj:exporter
- run:
name: "lint clj library"
working_directory: "."
command: |
yarn run lint:clj:library
test-common:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
- run:
name: "JVM tests"
working_directory: "./common"
command: |
clojure -M:dev:test
- run:
name: "NODE tests"
working_directory: "./common"
command: |
yarn install
yarn run test
- save_cache:
paths:
- ~/.m2
- ~/.yarn
- ~/.gitlibs
- ~/.cache/ms-playwright
key: v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
test-frontend:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: "install dependencies"
working_directory: "./frontend"
# We install playwright here because the dependent tasks
# uses the same cache as this task so we prepopulate it
command: |
yarn install
yarn run playwright install chromium --with-deps
- run:
name: "lint scss on frontend"
working_directory: "./frontend"
command: |
yarn run lint:scss
- run:
name: "unit tests"
working_directory: "./frontend"
command: |
yarn run test
- save_cache:
paths:
- ~/.m2
- ~/.yarn
- ~/.gitlibs
- ~/.cache/ms-playwright
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
test-library:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx6g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: Install dependencies and build
working_directory: "./library"
command: |
yarn install
- run:
name: Build and Test
working_directory: "./library"
command: |
./scripts/build
yarn run test
test-components:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx6g -Xms2g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: Install dependencies
working_directory: "./frontend"
command: |
yarn install
yarn run playwright install chromium
- run:
name: Build Storybook
working_directory: "./frontend"
command: yarn run build:storybook
- run:
name: Serve Storybook and run tests
working_directory: "./frontend"
command: |
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
"npx http-server storybook-static --port 6006 --silent" \
"npx wait-on tcp:6006 && yarn test:storybook"
test-backend:
docker:
- image: penpotapp/devenv:latest
- image: cimg/postgres:14.5
environment:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
- image: cimg/redis:7.0.5
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
- restore_cache:
keys:
- v1-dependencies-{{ checksum "backend/deps.edn" }}
- run:
name: "tests"
working_directory: "./backend"
command: |
clojure -M:dev:test --reporter kaocha.report/documentation
environment:
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
- save_cache:
paths:
- ~/.m2
- ~/.gitlibs
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
test-render-wasm:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
steps:
- checkout
- run:
name: "fmt check"
working_directory: "./render-wasm"
command: |
cargo fmt --check
- run:
name: "lint"
working_directory: "./render-wasm"
command: |
./lint
- run:
name: "cargo tests"
working_directory: "./render-wasm"
command: |
./test
workflows:
penpot:
jobs:
- test-frontend:
requires:
- lint: success
- test-library:
requires:
- lint: success
- test-components:
requires:
- lint: success
- test-backend:
requires:
- lint: success
- test-common:
requires:
- lint: success
- lint
- test-render-wasm

View File

@@ -1,38 +0,0 @@
---
name: New Render Bug Report
about: Create a report about the bugs you have found in the new render
title: ''
labels: new render
assignees: claragvinola
---
**Describe the bug**
A clear and concise description of what the bug is.
**Steps to Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots or screen recordings**
If applicable, add screenshots or screen recording to help illustrate your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View File

@@ -40,7 +40,7 @@ on:
jobs:
build-bundle:
name: Build and Upload Penpot Bundle
runs-on: penpot-runner-01
runs-on: ubuntu-24.04
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View File

@@ -7,14 +7,9 @@ jobs:
build-and-push:
name: Build and push DevEnv Docker image
environment: release-admins
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
steps:
- name: Set common environment variables
run: |
# Each job execution will use its own docker configuration.
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4

View File

@@ -19,14 +19,9 @@ on:
jobs:
build-and-push:
name: Build and Push Penpot Docker Images
runs-on: penpot-runner-02
runs-on: ubuntu-24.04-arm
steps:
- name: Set common environment variables
run: |
# Each job execution will use its own docker configuration.
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4
with:
@@ -71,15 +66,6 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
# To avoid the “429 Too Many Requests” error when downloading
# images from DockerHub for unregistered users.
# https://docs.docker.com/docker-hub/usage/
- name: Login to DockerHub Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.PUB_DOCKER_USERNAME }}
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v5

View File

@@ -1,21 +0,0 @@
name: _NITRATE MODULE
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "nitrate-module"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: "nitrate-module"

View File

@@ -1,14 +0,0 @@
name: _STAGING RENDER
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "staging-render"
build_wasm: "yes"
build_storybook: "yes"

View File

@@ -33,7 +33,7 @@ jobs:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
🐳 *[PENPOT] Docker image available: ${{ github.ref_name }}*
🐳 *[PENPOT] Docker image available.*
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -26,7 +26,7 @@ jobs:
- name: Check Commit Type
uses: gsactions/commit-message-checker@v2
with:
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert|Reapply).+[^.])$'
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert).+[^.])$'
flags: 'gm'
error: 'Commit should match CONTRIBUTING.md guideline'
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request

View File

@@ -1,101 +0,0 @@
name: Plugins/api-doc deployer
on:
push:
branches:
- develop
- staging
- main
paths:
- "plugins/libs/plugin-types/index.d.ts"
- "plugins/libs/plugin-types/REAME.md"
- "plugins/tools/typedoc.css"
- "plugins/CHANGELOG.md"
- "plugins/wrangle-penpot-plugins-api-doc.toml"
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch'
type: choice
required: true
default: 'develop'
options:
- develop
- staging
- main
permissions:
contents: read
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ steps.vars.outputs.gh_ref }}
# START: Setup Node and PNPM enabling cache
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Enable PNPM
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
- name: Get pnpm store path
id: pnpm-store
working-directory: ./plugins
shell: bash
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
# END: Setup Node and PNPM enabling cache
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
pnpm install --no-frozen-lockfile;
pnpm add -D -w wrangler@latest;
- name: Build docs
working-directory: plugins
shell: bash
run: pnpm run build:doc
- name: Select Worker name
run: |
REF="${{ steps.vars.outputs.gh_ref }}"
case "$REF" in
main) echo "WORKER_NAME=penpot-plugins-api-doc-pro" >> $GITHUB_ENV ;;
staging) echo "WORKER_NAME=penpot-plugins-api-doc-pre" >> $GITHUB_ENV ;;
develop) echo "WORKER_NAME=penpot-plugins-api-doc-hourly" >> $GITHUB_ENV ;;
*) echo "Unsupported branch ${REF}" && exit 1 ;;
esac
- name: Deploy to Cloudflare Workers
uses: cloudflare/wrangler-action@v3
with:
workingDirectory: plugins
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --config wrangle-penpot-plugins-api-doc.toml --name ${{ env.WORKER_NAME }}

View File

@@ -21,7 +21,7 @@ concurrency:
jobs:
lint:
name: "Linter"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -34,7 +34,7 @@ jobs:
test-common:
name: "Common Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -51,54 +51,9 @@ jobs:
run: |
./scripts/test
test-plugins:
name: Plugins Runtime Linter & Tests
runs-on: penpot-runner-02
steps:
- uses: actions/checkout@v4
- name: Setup Node
id: setup-node
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
pnpm install;
- name: Run Lint
working-directory: ./plugins
run: pnpm run lint
- name: Run Format Check
working-directory: ./plugins
run: pnpm run format:check
- name: Run Test
working-directory: ./plugins
run: pnpm run test
- name: Build runtime
working-directory: ./plugins
run: pnpm run build
- name: Build plugins
working-directory: ./plugins
run: pnpm run build:plugins
- name: Build styles
working-directory: ./plugins
run: pnpm run build:styles-example
test-frontend:
name: "Frontend Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -112,14 +67,12 @@ jobs:
- name: Component Tests
working-directory: ./frontend
env:
VITEST_BROWSER_TIMEOUT: 120000
run: |
./scripts/test-components
test-render-wasm:
name: "Render WASM Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -143,7 +96,7 @@ jobs:
test-backend:
name: "Backend Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
services:
@@ -182,7 +135,7 @@ jobs:
test-library:
name: "Library Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -196,7 +149,7 @@ jobs:
build-integration:
name: "Build Integration Bundle"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -206,7 +159,17 @@ jobs:
- name: Build Bundle
working-directory: ./frontend
run: |
./scripts/build 0.0.0
corepack enable;
corepack install;
yarn install
yarn run build:app:assets
yarn run build:app
yarn run build:app:libs
- name: Build WASM
working-directory: "./render-wasm"
run: |
./build release
- name: Store Bundle Cache
uses: actions/cache@v4
@@ -214,10 +177,9 @@ jobs:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
test-integration-1:
name: "Integration Tests 1/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -247,7 +209,7 @@ jobs:
test-integration-2:
name: "Integration Tests 2/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -277,7 +239,7 @@ jobs:
test-integration-3:
name: "Integration Tests 3/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -307,7 +269,7 @@ jobs:
test-integration-4:
name: "Integration Tests 4/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration

3
.gitignore vendored
View File

@@ -5,7 +5,6 @@
!.yarn/releases
!.yarn/sdks
!.yarn/versions
.pnpm-store
*-init.clj
*.css.json
*.jar
@@ -54,8 +53,6 @@
/exporter/target
/frontend/.storybook/preview-body.html
/frontend/.storybook/preview-head.html
/frontend/playwright-report/
/frontend/text-editor/src/wasm/
/frontend/dist/
/frontend/npm-debug.log
/frontend/out/

105
.gitpod.yml Normal file
View File

@@ -0,0 +1,105 @@
image:
file: docker/gitpod/Dockerfile
ports:
# nginx
- port: 3449
onOpen: open-preview
# frontend nREPL
- port: 3447
onOpen: ignore
visibility: private
# frontend shadow server
- port: 3448
onOpen: ignore
visibility: private
# backend
- port: 6060
onOpen: ignore
# exporter shadow server
- port: 9630
onOpen: ignore
visibility: private
# exporter http server
- port: 6061
onOpen: ignore
# mailhog web interface
- port: 8025
onOpen: ignore
# mailhog postfix
- port: 1025
onOpen: ignore
# postgres
- port: 5432
onOpen: ignore
# redis
- port: 6379
onOpen: ignore
# openldap
- port: 389
onOpen: ignore
tasks:
# https://github.com/gitpod-io/gitpod/issues/666#issuecomment-534347856
- name: gulp
command: >
cd $GITPOD_REPO_ROOT/frontend/;
yarn && gp sync-done 'frontend-yarn';
npx gulp --theme=${PENPOT_THEME} watch
- name: frontend shadow watch
command: >
cd $GITPOD_REPO_ROOT/frontend/;
gp sync-await 'frontend-yarn';
npx shadow-cljs watch main
- init: gp await-port 5432 && psql -f $GITPOD_REPO_ROOT/docker/gitpod/files/postgresql_init.sql
name: backend
command: >
cd $GITPOD_REPO_ROOT/backend/;
./scripts/start-dev
- name: exporter shadow watch
command:
cd $GITPOD_REPO_ROOT/exporter/;
gp sync-await 'frontend-yarn';
yarn && npx shadow-cljs watch main
- name: exporter web server
command: >
cd $GITPOD_REPO_ROOT/exporter/;
./scripts/wait-and-start.sh
- name: signed terminal
before: >
[[ ! -z ${GNUGPG} ]] &&
cd ~ &&
rm -rf .gnupg &&
echo ${GNUGPG} | base64 -d | tar --no-same-owner -xzvf -
init: >
[[ ! -z ${GNUGPG_KEY} ]] &&
git config --global commit.gpgsign true &&
git config --global user.signingkey ${GNUGPG_KEY}
command: cd $GITPOD_REPO_ROOT
- name: redis
command: redis-server
- before: go get github.com/mailhog/MailHog
name: mailhog
command: MailHog
- name: Nginx
command: >
nginx &&
multitail /var/log/nginx/access.log -I /var/log/nginx/error.log

2
.nvmrc
View File

@@ -1 +1 @@
v22.21.1
v22.19.0

40
.travis.yml Normal file
View File

@@ -0,0 +1,40 @@
dist: xenial
language: generic
sudo: required
cache:
directories:
- $HOME/.m2
services:
- docker
branches:
only:
- master
- develop
install:
- curl -O https://download.clojure.org/install/linux-install-1.10.1.447.sh
- chmod +x linux-install-1.10.1.447.sh
- sudo ./linux-install-1.10.1.447.sh
before_script:
- env | sort
script:
- ./manage.sh build-devenv
- ./manage.sh run-frontend-tests
- ./manage.sh run-backend-tests
- ./manage.sh build-images
- ./manage.sh run
after_script:
- docker images
notifications:
email: false
env:
- NODE_VERSION=10.16.0

11
.yarnrc.yml Normal file
View File

@@ -0,0 +1,11 @@
enableGlobalCache: true
enableImmutableCache: false
enableImmutableInstalls: false
enableTelemetry: false
httpTimeout: 600000
nodeLinker: node-modules

View File

@@ -1,77 +1,6 @@
# CHANGELOG
## 2.14.0 (Unreleased)
### :boom: Breaking changes & Deprecations
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- Remap references when renaming tokens [Taiga #10202](https://tree.taiga.io/project/penpot/us/10202)
- Tokens panel nested path view [Taiga #9966](https://tree.taiga.io/project/penpot/us/9966)
- Improve usability of lock and hide buttons in the layer panel. [Taiga #12916](https://tree.taiga.io/project/penpot/issue/12916)
- Optimize sidebar performance for deeply nested shapes [Taiga #13017](https://tree.taiga.io/project/penpot/task/13017)
- Remove tokens path node and bulk remove tokens [Taiga #13007](https://tree.taiga.io/project/penpot/us/13007)
- Replace themes management modal radio buttons for switches [Taiga #9215](https://tree.taiga.io/project/penpot/us/9215)
### :bug: Bugs fixed
- Fix prototype connections lost when switching between variants [Taiga #12812](https://tree.taiga.io/project/penpot/issue/12812)
- Fix wrong image in the onboarding invitation block [Taiga #13040](https://tree.taiga.io/project/penpot/issue/13040)
- Fix wrong register image [Taiga #12955](https://tree.taiga.io/project/penpot/task/12955)
- Fix error message on components doesn't close automatically [Taiga #12012](https://tree.taiga.io/project/penpot/issue/12012)
- Fix incorrect default option on tokens import dialog [Github #8051](https://github.com/penpot/penpot/pull/8051)
- Fix unhandled exception tokens creation dialog [Github #8110](https://github.com/penpot/penpot/issues/8110)
- Fix displaying a hidden user avatar when there is only one more [Taiga #13058](https://tree.taiga.io/project/penpot/issue/13058)
- Fix boolean operators in menu for boards [Taiga #13174](https://tree.taiga.io/project/penpot/issue/13174)
## 2.13.0 (Unreleased)
### :boom: Breaking changes & Deprecations
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
- Fix mask issues with component swap (by @dfelinto) [Github #7675](https://github.com/penpot/penpot/issues/7675)
### :sparkles: New features & Enhancements
- Add new Box Shadow Tokens [Taiga #10201](https://tree.taiga.io/project/penpot/us/10201)
- Make i18n translation files load on-demand [Taiga #11474](https://tree.taiga.io/project/penpot/us/11474)
- Add deleted files to dashboard [Taiga #8149](https://tree.taiga.io/project/penpot/us/8149)
### :bug: Bugs fixed
- Fix problem when drag+duplicate a full grid [Taiga #12565](https://tree.taiga.io/project/penpot/issue/12565)
- Fix problem when pasting elements in reverse flex layout [Taiga #12460](https://tree.taiga.io/project/penpot/issue/12460)
- Fix wrong board size presets in Android [Taiga #12339](https://tree.taiga.io/project/penpot/issue/12339)
- Fix problem with grid layout components and auto sizing [Github #7797](https://github.com/penpot/penpot/issues/7797)
- Fix some alignments on inspect tab [Taiga #12915](https://tree.taiga.io/project/penpot/issue/12915)
- Fix problem with text editor maintaining previous styles [Taiga #12835](https://tree.taiga.io/project/penpot/issue/12835)
- Fix color assets from shared libraries not appearing as assets in Selected colors panel [Taiga #12957](https://tree.taiga.io/project/penpot/issue/12957)
- Fix CSS generated box-shadow property [Taiga #12997](https://tree.taiga.io/project/penpot/issue/12997)
- Fix inner shadow selector on shadow token [Taiga #12951](https://tree.taiga.io/project/penpot/issue/12951)
- Fix missing text color token from selected shapes in selected colors list [Taiga #12956](https://tree.taiga.io/project/penpot/issue/12956)
- Fix dropdown option width in Guides columns dropdown [Taiga #12959](https://tree.taiga.io/project/penpot/issue/12959)
- Fix typos on download modal [Taiga #12865](https://tree.taiga.io/project/penpot/issue/12865)
- Fix problem with text editor maintaining previous styles [Taiga #12835](https://tree.taiga.io/project/penpot/issue/12835)
- Fix unhandled exception tokens creation dialog [Github #8110](https://github.com/penpot/penpot/issues/8110)
- Fix allow negative spread values on shadow token creation [Taiga #13167](https://tree.taiga.io/project/penpot/issue/13167)
- Fix spanish translations on import export token modal [Taiga #13171](https://tree.taiga.io/project/penpot/issue/13171)
## 2.12.1
### :bug: Bugs fixed
- Fix setting a portion of text as bold or underline messes things up [Github #7980](https://github.com/penpot/penpot/issues/7980)
- Fix problem with style in fonts input [Taiga #12935](https://tree.taiga.io/project/penpot/issue/12935)
- Fix problem with path editor and right click [Github #7917](https://github.com/penpot/penpot/issues/7917)
## 2.12.0
## 2.12.0 (Unreleased)
### :boom: Breaking changes & Deprecations
@@ -82,6 +11,7 @@ The backend RPC API URLS are changed from `/api/rpc/command/<name>` to
compatibility; however, if you are a user of this API, it is strongly
recommended that you adapt your code to use the new PATH.
#### Updated SSO Callback URL
The OAuth / Single Sign-On (SSO) callback endpoint has changed to
@@ -114,6 +44,7 @@ This update standardizes all authentication flows under the single URL
and makis it more modular, enabling the ability to configure SSO auth
provider dinamically.
#### Changes on default docker compose
We have updated the `docker/images/docker-compose.yaml` with a small
@@ -131,7 +62,6 @@ example. It's still usable as before, we just removed the example.
- Ensure consistent snap behavior across all zoom levels [Github #7774](https://github.com/penpot/penpot/pull/7774) by [@Tokytome](https://github.com/Tokytome)
- Fix crash in token grid view due to tooltip validation (by @dfelinto) [Github #7887](https://github.com/penpot/penpot/pull/7887)
- Enable Hindi translations on the application
### :sparkles: New features & Enhancements
@@ -164,8 +94,6 @@ example. It's still usable as before, we just removed the example.
- Fix incorrect interaction betwen hower and scroll on assets sidebar [Taiga #12389](https://tree.taiga.io/project/penpot/issue/12389)
- Fix switch variants with paths [Taiga #12841](https://tree.taiga.io/project/penpot/issue/12841)
- Fix referencing typography tokens on font-family tokens [Taiga #12492](https://tree.taiga.io/project/penpot/issue/12492)
- Fix horizontal scroll on layer panel [Taiga #12843](https://tree.taiga.io/project/penpot/issue/12843)
- Fix unicode handling on email template abbreviation filter [Github #7966](https://github.com/penpot/penpot/pull/7966)
## 2.11.1
@@ -177,6 +105,7 @@ example. It's still usable as before, we just removed the example.
- Deprecated configuration variables with the prefix `PENPOT_ASSETS_*`, and will be
removed in future versions:
- The `PENPOT_ASSETS_STORAGE_BACKEND` becomes `PENPOT_OBJECTS_STORAGE_BACKEND` and its
values passes from (`assets-fs` or `assets-s3`) to (`fs` or `s3`)
- The `PENPOT_STORAGE_ASSETS_FS_DIRECTORY` becomes `PENPOT_OBJECTS_STORAGE_FS_DIRECTORY`

View File

@@ -120,12 +120,17 @@ them on your system, you can run them with:
```bash
# Check formatting
./scripts/fmt
yarn fmt:clj:check
# Lint
./scripts/lint
# Check and fix formatting
yarn fmt:clj
# Run the linter
yarn lint:clj
```
There are more choices in `package.json`.
Ideally, you should run these commands as git pre-commit hooks. A convenient way
of defining them is to use [Husky](https://typicode.github.io/husky/#/).

7
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

View File

@@ -97,8 +97,8 @@
:jmx-remote
{:jvm-opts ["-Dcom.sun.management.jmxremote"
"-Dcom.sun.management.jmxremote.port=9000"
"-Dcom.sun.management.jmxremote.rmi.port=9000"
"-Dcom.sun.management.jmxremote.port=9090"
"-Dcom.sun.management.jmxremote.rmi.port=9090"
"-Dcom.sun.management.jmxremote.local.only=false"
"-Dcom.sun.management.jmxremote.authenticate=false"
"-Dcom.sun.management.jmxremote.ssl=false"

View File

@@ -4,7 +4,7 @@
"license": "MPL-2.0",
"author": "Kaleidos INC",
"private": true,
"packageManager": "pnpm@10.26.2+sha512.0e308ff2005fc7410366f154f625f6631ab2b16b1d2e70238444dd6ae9d630a8482d92a451144debc492416896ed16f7b114a86ec68b8404b2443869e68ffda6",
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
"repository": {
"type": "git",
"url": "https://github.com/penpot/penpot"

306
backend/pnpm-lock.yaml generated
View File

@@ -1,306 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
luxon:
specifier: ^3.4.4
version: 3.7.2
sax:
specifier: ^1.4.1
version: 1.4.3
devDependencies:
nodemon:
specifier: ^3.1.2
version: 3.1.11
source-map-support:
specifier: ^0.5.21
version: 0.5.21
ws:
specifier: ^8.17.0
version: 8.18.3
packages:
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
brace-expansion@1.1.12:
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
buffer-from@1.1.2:
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
debug@4.4.3:
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
has-flag@3.0.0:
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
engines: {node: '>=4'}
ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
luxon@3.7.2:
resolution: {integrity: sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==}
engines: {node: '>=12'}
minimatch@3.1.2:
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
nodemon@3.1.11:
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
engines: {node: '>=10'}
hasBin: true
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
sax@1.4.3:
resolution: {integrity: sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==}
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'}
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
source-map@0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
supports-color@5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
touch@3.1.1:
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
hasBin: true
undefsafe@2.0.5:
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
ws@8.18.3:
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
snapshots:
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
balanced-match@1.0.2: {}
binary-extensions@2.3.0: {}
brace-expansion@1.1.12:
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
braces@3.0.3:
dependencies:
fill-range: 7.1.1
buffer-from@1.1.2: {}
chokidar@3.6.0:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
concat-map@0.0.1: {}
debug@4.4.3(supports-color@5.5.0):
dependencies:
ms: 2.1.3
optionalDependencies:
supports-color: 5.5.0
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fsevents@2.3.3:
optional: true
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
has-flag@3.0.0: {}
ignore-by-default@1.0.1: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-extglob@2.1.1: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
luxon@3.7.2: {}
minimatch@3.1.2:
dependencies:
brace-expansion: 1.1.12
ms@2.1.3: {}
nodemon@3.1.11:
dependencies:
chokidar: 3.6.0
debug: 4.4.3(supports-color@5.5.0)
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.7.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
undefsafe: 2.0.5
normalize-path@3.0.0: {}
picomatch@2.3.1: {}
pstree.remy@1.1.8: {}
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
sax@1.4.3: {}
semver@7.7.3: {}
simple-update-notifier@2.0.0:
dependencies:
semver: 7.7.3
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
source-map: 0.6.1
source-map@0.6.1: {}
supports-color@5.5.0:
dependencies:
has-flag: 3.0.0
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
touch@3.1.1: {}
undefsafe@2.0.5: {}
ws@8.18.3: {}

View File

View File

@@ -240,4 +240,4 @@
</div>
</body>
</html>
</html>

View File

@@ -36,6 +36,17 @@
[integrant.core :as ig]
[yetti.response :as-alias yres]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn obfuscate-string
[s]
(if (< (count s) 10)
(apply str (take (count s) (repeat "*")))
(str (subs s 0 5)
(apply str (take (- (count s) 5) (repeat "*"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OIDC PROVIDER (GENERIC)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -166,7 +177,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -211,7 +222,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -288,7 +299,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -330,7 +341,7 @@
:provider "gitlab"
:base-uri (:base-uri provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
(ex/raise :type ::internal
@@ -350,7 +361,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -448,7 +459,7 @@
(l/trc :hint "fetch access token"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider))
:client-secret (obfuscate-string (:client-secret provider))
:grant-type (:grant_type params)
:redirect-uri (:redirect_uri params))
@@ -501,7 +512,7 @@
[cfg provider tdata]
(l/trc :hint "fetch user info"
:uri (:user-uri provider)
:token (d/obfuscate-string (:token/access tdata)))
:token (obfuscate-string (:token/access tdata)))
(let [params {:uri (:user-uri provider)
:headers {"Authorization" (str (:token/type tdata) " " (:token/access tdata))}

View File

@@ -331,81 +331,6 @@
(set/difference cfeat/backend-only-features))
#{}))))
(defn check-file-exists
[cfg id & {:keys [include-deleted?]
:or {include-deleted? false}
:as options}]
(db/get-with-sql cfg [sql:get-minimal-file id]
{:db/remove-deleted (not include-deleted?)}))
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?")
(defn- get-file-permissions*
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-file-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions* conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-file-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(defn get-project
[cfg project-id]
(db/get cfg :project {:id project-id}))

View File

@@ -821,10 +821,9 @@
entries (keep (match-storage-entry-fn) entries)]
(doseq [{:keys [id entry]} entries]
(let [object (-> (read-entry input entry)
(decode-storage-object)
(update :bucket d/nilv sto/default-bucket)
(validate-storage-object))
(let [object (->> (read-entry input entry)
(decode-storage-object)
(validate-storage-object))
ext (cmedia/mtype->extension (:content-type object))
path (str "objects/" id ext)

View File

@@ -124,6 +124,8 @@
(throw (IllegalArgumentException. "invalid email body provided")))
(doseq [[name content] attachments]
(prn "attachment" name)
(let [attachment-part (MimeBodyPart.)]
(.setFileName attachment-part ^String name)
(.setContent attachment-part ^String content (str "text/plain; charset=" charset))

View File

@@ -30,7 +30,7 @@
(defn- get-file-media-object
[pool id]
(db/get pool :file-media-object {:id id} {::db/remove-deleted false}))
(db/get pool :file-media-object {:id id}))
(defn- serve-object-from-s3
[{:keys [::sto/storage] :as cfg} obj]

View File

@@ -309,7 +309,7 @@
(fn [request]
(let [key (yreq/get-header request "x-shared-key")]
(if (= key shared-key)
(handler (assoc request ::http/auth-with-shared-key true))
(handler request)
{::yres/status 403}))))
(fn [_ _]
{::yres/status 403})))

View File

@@ -14,7 +14,6 @@
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
@@ -93,11 +92,7 @@
(let [handler-name (:type path-params)
etag (yreq/get-header request "if-none-match")
profile-id (or (::session/profile-id request)
(::actoken/profile-id request)
(if (::http/auth-with-shared-key request)
uuid/zero
nil))
(::actoken/profile-id request))
ip-addr (inet/parse-request request)
data (-> params

View File

@@ -307,8 +307,7 @@
:content-type (:mtype input)})]
(:id sobject))
(catch Throwable cause
(l/wrn :hint "unable to import profile picture"
:uri uri
(l/err :hint "unable to import profile picture"
:cause cause)
nil)))

View File

@@ -79,14 +79,85 @@
;; --- FILE PERMISSIONS
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
and f.deleted_at is null
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
and f.deleted_at is null
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?
and f.deleted_at is null")
(defn get-file-permissions
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(def has-edit-permissions?
(perms/make-edition-predicate-fn bfc/get-file-permissions))
(perms/make-edition-predicate-fn get-permissions))
(def has-read-permissions?
(perms/make-read-predicate-fn bfc/get-file-permissions))
(perms/make-read-predicate-fn get-permissions))
(def has-comment-permissions?
(perms/make-comment-predicate-fn bfc/get-file-permissions))
(perms/make-comment-predicate-fn get-permissions))
(def check-edition-permissions!
(perms/make-check-fn has-edit-permissions?))
@@ -99,7 +170,7 @@
(defn check-comment-permissions!
[conn profile-id file-id share-id]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
can-read (has-read-permissions? perms)
can-comment (has-comment-permissions? perms)]
(when-not (or can-read can-comment)
@@ -151,7 +222,7 @@
(defn- get-minimal-file-with-perms
[cfg {:keys [:id ::rpc/profile-id]}]
(let [mfile (get-minimal-file cfg id)
perms (bfc/get-file-permissions cfg profile-id id)]
perms (get-permissions cfg profile-id id)]
(assoc mfile :permissions perms)))
(defn get-file-etag
@@ -177,7 +248,7 @@
;; will be already prefetched and we just reuse them instead
;; of making an additional database queries.
(let [perms (or (:permissions (::cond/object params))
(bfc/get-file-permissions conn profile-id id))]
(get-permissions conn profile-id id))]
(check-read-permissions! perms)
(let [team (teams/get-team conn
@@ -240,7 +311,7 @@
::sm/result schema:file-fragment}
[cfg {:keys [::rpc/profile-id file-id fragment-id share-id]}]
(db/run! cfg (fn [cfg]
(let [perms (bfc/get-file-permissions cfg profile-id file-id share-id)]
(let [perms (get-permissions cfg profile-id file-id share-id)]
(check-read-permissions! perms)
(-> (get-file-fragment cfg file-id fragment-id)
(rph/with-http-cache long-cache-duration))))))
@@ -385,7 +456,8 @@
:code :params-validation
:hint "page-id is required when object-id is provided"))
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
file (bfc/get-file cfg file-id :read-only? true)
proj (db/get conn :project {:id (:project-id file)})
@@ -616,10 +688,11 @@
"Get libraries used by the specified file."
{::doc/added "1.17"
::sm/params schema:get-file-libraries}
[cfg {:keys [::rpc/profile-id file-id]}]
(bfc/check-file-exists cfg file-id)
(check-read-permissions! cfg profile-id file-id)
(bfc/get-file-libraries cfg file-id))
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
(dm/with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id file-id)
(bfc/get-file-libraries conn file-id)))
;; --- COMMAND QUERY: Files that use this File library
@@ -704,6 +777,7 @@
f.created_at,
f.modified_at,
f.name,
f.is_shared,
f.deleted_at AS will_be_deleted_at,
ft.media_id AS thumbnail_id,
row_number() OVER w AS row_num,
@@ -711,7 +785,8 @@
FROM file AS f
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN file_thumbnail AS ft on (ft.file_id = f.id
AND ft.revn = f.revn)
AND ft.revn = f.revn
AND ft.deleted_at is null)
WHERE p.team_id = ?
AND (p.deleted_at > ?::timestamptz OR
f.deleted_at > ?::timestamptz)
@@ -813,7 +888,7 @@
AND (f.deleted_at IS NULL OR f.deleted_at > now())
ORDER BY f.created_at ASC;")
(defn- absorb-library-by-file
(defn- absorb-library-by-file!
[cfg ldata file-id]
(assert (db/connection-map? cfg)
@@ -837,7 +912,7 @@
:modified-at (ct/now)
:has-media-trimmed false}))))
(defn- absorb-library*
(defn- absorb-library
"Find all files using a shared library, and absorb all library assets
into the file local libraries"
[cfg {:keys [id data] :as library}]
@@ -852,10 +927,10 @@
:library-id (str id)
:files (str/join "," (map str ids)))
(run! (partial absorb-library-by-file cfg data) ids)
(run! (partial absorb-library-by-file! cfg data) ids)
library))
(defn absorb-library
(defn absorb-library!
[{:keys [::db/conn] :as cfg} id]
(let [file (-> (bfc/get-file cfg id
:realize? true
@@ -872,7 +947,7 @@
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-file-features! (:features file)))
(absorb-library* cfg file)))
(absorb-library cfg file)))
(defn- set-file-shared
[{:keys [::db/conn] :as cfg} {:keys [profile-id id] :as params}]
@@ -885,14 +960,14 @@
;; file, we need to perform more complex operation,
;; so in this case we retrieve the complete file and
;; perform all required validations.
(let [file (-> (absorb-library cfg id)
(let [file (-> (absorb-library! cfg id)
(assoc :is-shared false))]
(db/delete! conn :file-library-rel {:library-file-id id})
(db/update! conn :file
{:is-shared false
:modified-at (ct/now)}
{:id id})
file)
(select-keys file [:id :name :is-shared]))
(and (false? (:is-shared file))
(true? (:is-shared params)))
@@ -939,11 +1014,6 @@
{:id file-id}
{::db/return-keys [:id :name :is-shared :deleted-at
:project-id :created-at :modified-at]})]
;; Remove all possible relations for that file
(db/delete! conn :file-library-rel
{:library-file-id file-id})
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
@@ -1094,53 +1164,47 @@
;; --- MUTATION COMMAND: delete-files-immediatelly
(def ^:private sql:get-delete-team-files-candidates
"SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])")
(def ^:private sql:delete-team-files
"UPDATE file AS uf SET deleted_at = ?::timestamptz
FROM (
SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])
) AS subquery
WHERE uf.id = subquery.id
RETURNING uf.id, uf.deleted_at;")
(def ^:private schema:permanently-delete-team-files
[:map {:title "permanently-delete-team-files"}
[:team-id ::sm/uuid]
[:ids [::sm/set ::sm/uuid]]])
(defn- permanently-delete-team-files
[{:keys [::db/conn]} {:keys [::rpc/request-at team-id ids]}]
(let [ids (into #{}
d/xf:map-id
(db/exec! conn [sql:get-delete-team-files-candidates team-id
(db/create-array conn "uuid" ids)]))]
(reduce (fn [acc id]
(events/tap :progress {:file-id id :index (inc (count acc)) :total (count ids)})
(db/update! conn :file
{:deleted-at request-at}
{:id id}
{::db/return-keys false})
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
:deleted-at request-at
:id id}})
(conj acc id))
#{}
ids)))
(sv/defmethod ::permanently-delete-team-files
"Mark the specified files to be deleted immediatelly on the
specified team. The team-id on params will be used to filter and
check writable permissons on team."
{::doc/added "2.13"
::sm/params schema:permanently-delete-team-files}
{::doc/added "2.12"
::sm/params schema:permanently-delete-team-files
::db/transaction true}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
(teams/check-edition-permissions! pool profile-id team-id)
(sse/response #(db/tx-run! cfg permanently-delete-team-files params)))
[{:keys [::db/conn]} {:keys [::rpc/profile-id ::rpc/request-at team-id ids]}]
(teams/check-edition-permissions! conn profile-id team-id)
(reduce (fn [acc {:keys [id deleted-at]}]
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
:deleted-at deleted-at
:id id}})
(conj acc id))
#{}
(db/plan conn [sql:delete-team-files request-at team-id
(db/create-array conn "uuid" ids)])))
;; --- MUTATION COMMAND: restore-files-immediatelly
@@ -1204,7 +1268,7 @@
{:keys [files projects]}
(reduce (fn [result {:keys [id project-id]}]
(let [index (-> result :files count)]
(events/tap :progress {:file-id id :index (inc index) :total total-files})
(events/tap :progress {:file-id id :index index :total total-files})
(restore-file conn id)
(-> result
@@ -1227,7 +1291,7 @@
(sv/defmethod ::restore-deleted-team-files
"Removes the deletion mark from the specified files (and respective
projects) on the specified team."
{::doc/added "2.13"
{::doc/added "2.12"
::sse/stream? true
::sm/params schema:restore-deleted-team-files}
[cfg params]

View File

@@ -199,13 +199,15 @@
[cfg {:keys [::rpc/profile-id file-id strip-frames-with-thumbnails] :as params}]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/check-read-permissions! conn profile-id file-id)
(let [team (teams/get-team conn
:profile-id profile-id
:file-id file-id)
file (bfc/get-file cfg file-id
:include-deleted? true
:realize? true
:read-only? true)
strip-frames-with-thumbnails
(or (nil? strip-frames-with-thumbnails) ;; if not present, default to true
(true? strip-frames-with-thumbnails))]
@@ -331,16 +333,12 @@
;; --- MUTATION COMMAND: create-file-thumbnail
(defn- create-file-thumbnail
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [file-id revn props media] :as params}]
(defn- create-file-thumbnail!
[{:keys [::db/conn ::sto/storage]} {:keys [file-id revn props media] :as params}]
(media/validate-media-type! media)
(media/validate-media-size! media)
(let [file (bfc/get-file cfg file-id
:include-deleted? true
:load-data? false)
props (db/tjson (or props {}))
(let [props (db/tjson (or props {}))
path (:path media)
mtype (:mtype media)
hash (sto/calculate-hash path)
@@ -369,7 +367,7 @@
(db/update! conn :file-thumbnail
{:media-id (:id media)
:deleted-at (:deleted-at file)
:deleted-at nil
:updated-at tnow
:props props}
{:file-id file-id
@@ -380,7 +378,6 @@
:revn revn
:created-at tnow
:updated-at tnow
:deleted-at (:deleted-at file)
:props props
:media-id (:id media)}))
@@ -405,8 +402,6 @@
::rtry/when rtry/conflict-exception?
::sm/params schema:create-file-thumbnail}
;; FIXME: do not run the thumbnail upload inside a transaction
[cfg {:keys [::rpc/profile-id file-id] :as params}]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
;; TODO For now we check read permissions instead of write,
@@ -414,6 +409,6 @@
;; review this approach on the future.
(files/check-read-permissions! conn profile-id file-id)
(when-not (db/read-only? conn)
(let [media (create-file-thumbnail cfg params)]
(let [media (create-file-thumbnail! cfg params)]
{:uri (files/resolve-public-uri (:id media))
:id (:id media)})))))

View File

@@ -6,7 +6,6 @@
(ns app.rpc.commands.fonts
(:require
[app.binfile.common :as bfc]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
@@ -67,7 +66,7 @@
(uuid? file-id)
(let [file (db/get-by-id conn :file file-id {:columns [:id :project-id]})
project (db/get-by-id conn :project (:project-id file) {:columns [:id :team-id]})
perms (bfc/get-file-permissions conn profile-id file-id share-id)]
perms (files/get-permissions conn profile-id file-id share-id)]
(files/check-read-permissions! perms)
(db/query conn :team-font-variant
{:team-id (:team-id project)

View File

@@ -19,7 +19,7 @@
inner join team_profile_rel as tpr on (tpr.team_id = p.team_id)
where tpr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null)
and (p.deleted_at is null or p.deleted_at > now())
and (tpr.is_admin = true or
tpr.is_owner = true or
tpr.can_edit = true)
@@ -29,7 +29,7 @@
inner join project_profile_rel as ppr on (ppr.project_id = p.id)
where ppr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null)
and (p.deleted_at is null or p.deleted_at > now())
and (ppr.is_admin = true or
ppr.is_owner = true or
ppr.can_edit = true)
@@ -47,7 +47,7 @@
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn)
inner join projects as pr on (f.project_id = pr.id)
where f.name ilike ('%' || ? || '%')
and (f.deleted_at is null)
and (f.deleted_at is null or f.deleted_at > now())
order by f.created_at asc")
(defn search-files

View File

@@ -13,6 +13,7 @@
[app.config :as cf]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.teams :as teams]
[app.rpc.cond :as-alias cond]
[app.rpc.doc :as-alias doc]
@@ -120,7 +121,7 @@
[system {:keys [::rpc/profile-id file-id share-id] :as params}]
(db/run! system
(fn [{:keys [::db/conn] :as system}]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (files/get-permissions conn profile-id file-id share-id)
params (-> params
(assoc ::perms perms)
(assoc :profile-id profile-id))]

View File

@@ -104,29 +104,28 @@
(def ^:private schema:limit
[:and
[:map
[::name :keyword]
[::name :any]
[::strategy schema:strategy]
[::key :string]
[::opts :string]
[::capacity {:optional true} ::sm/int]
[::rate {:optional true} ::sm/int]
[::interval {:optional true} ::ct/duration]
[::params {:optional true} [::sm/vec :any]]
[::permits {:optional true} ::sm/int]
[::unit {:optional true} [:enum :days :hours :minutes :seconds :weeks]]]
[:fn (fn [attrs]
(let [contains-fn (partial contains? attrs)]
(or (every? contains-fn [::capacity ::rate ::interval])
(every? contains-fn [::permits ::unit]))))]])
[::opts :string]]
[:or
[:map
[::capacity ::sm/int]
[::rate ::sm/int]
[::internal ::ct/duration]
[::params [::sm/vec :any]]]
[:map
[::nreq ::sm/int]
[::unit [:enum :days :hours :minutes :seconds :weeks]]]]])
(def ^:private schema:limits
[:map-of :keyword [::sm/vec schema:limit]])
(def ^:private valid-limit-tuple?
(sm/validator schema:limit-tuple))
(sm/lazy-validator schema:limit-tuple))
(def ^:private valid-rlimit-instance?
(sm/validator ::rpc/rlimit))
(sm/lazy-validator ::rpc/rlimit))
(defmethod parse-limit :window
[[name strategy opts :as vlimit]]
@@ -135,16 +134,16 @@
(merge
{::name name
::strategy strategy}
(if-let [[_ permits unit] (re-find window-opts-re opts)]
(let [permits (parse-long permits)]
{::permits permits
(if-let [[_ nreq unit] (re-find window-opts-re opts)]
(let [nreq (parse-long nreq)]
{::nreq nreq
::unit (case unit
"d" :days
"h" :hours
"m" :minutes
"s" :seconds
"w" :weeks)
::key (str "penpot.rlimit." (cf/get :tenant) ".window." (d/name name))
::key (str "ratelimit.window." (d/name name))
::opts opts})
(ex/raise :type :validation
:code :invalid-window-limit-opts
@@ -165,15 +164,15 @@
::interval interval
::opts opts
::params [(->seconds interval) rate capacity]
::key (str "penpot.rlimit." (cf/get :tenant) ".bucket." (d/name name))})
::key (str "ratelimit.bucket." (d/name name))})
(ex/raise :type :validation
:code :invalid-bucket-limit-opts
:hint (str/ffmt "looks like '%' does not have a valid format" opts))))
(defmethod process-limit :bucket
[rconn profile-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
[rconn user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
(let [script (-> bucket-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id)])
(assoc ::rscript/keys [(str key "." service "." user-id)])
(assoc ::rscript/vals (conj params (->seconds now))))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
@@ -193,18 +192,18 @@
(assoc ::lresult/remaining remaining))))
(defmethod process-limit :window
[rconn profile-id now {:keys [::permits ::unit ::key ::service] :as limit}]
[rconn user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
(let [ts (ct/truncate now unit)
ttl (ct/diff now (ct/plus ts {unit 1}))
script (-> window-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [permits (->seconds ttl)]))
(assoc ::rscript/keys [(str key "." service "." user-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [nreq (->seconds ttl)]))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
remaining (nth result 1)]
(l/trace :hint "limit processed"
:service service
:name (name (::name limit))
:limit (name (::name limit))
:strategy (name (::strategy limit))
:opts (::opts limit)
:allowed allowed?
@@ -215,8 +214,8 @@
(assoc ::lresult/reset (ct/plus ts {unit 1})))))
(defn- process-limits
[rconn profile-id limits now]
(let [results (into [] (map (partial process-limit rconn profile-id now)) limits)
[rconn user-id limits now]
(let [results (into [] (map (partial process-limit rconn user-id now)) limits)
remaining (->> results
(d/index-by ::name ::lresult/remaining)
(uri/map->query-string))
@@ -228,7 +227,7 @@
(when rejected
(l/warn :hint "rejected rate limit"
:profile-id (str profile-id)
:user-id (str user-id)
:limit-service (-> rejected ::service name)
:limit-name (-> rejected ::name name)
:limit-strategy (-> rejected ::strategy name)))
@@ -372,9 +371,12 @@
(defn- on-refresh-error
[_ cause]
(when-not (instance? java.util.concurrent.RejectedExecutionException cause)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true)))
(if-let [explain (-> cause ex-data ex/explain)]
(l/warn ::l/raw (str "unable to refresh config, invalid format:\n" explain)
::l/sync? true)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true))))
(defn- get-config-path
[]

View File

@@ -25,9 +25,9 @@ local allowed = filled >= requested
local newTokens = filled
if allowed then
newTokens = filled - requested
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
end
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
redis.call("expire", tokensKey, ttl)
return { allowed, newTokens }

View File

@@ -35,9 +35,6 @@
:assets-s3 :s3
nil)))
(def default-bucket
"file-media-object")
(def valid-buckets
#{"file-media-object"
"team-font-variant"

View File

@@ -25,7 +25,7 @@
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.storage :as sto]
[app.storage :as-alias sto]
[app.storage.impl :as impl]
[integrant.core :as ig]))
@@ -130,7 +130,7 @@
[{:keys [metadata]}]
(or (some-> metadata :bucket)
(some-> metadata :reference d/name)
sto/default-bucket))
"file-media-object"))
(defn- process-objects!
[conn has-refs? bucket objects]

View File

@@ -45,8 +45,7 @@
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :file
{:deleted-at deleted-at
:is-shared false}
{:deleted-at deleted-at}
{:id id}
{::db/return-keys false})
@@ -54,7 +53,7 @@
(not *team-deletion*))
;; NOTE: we don't prevent file deletion on absorb operation failure
(try
(db/tx-run! cfg files/absorb-library id)
(db/tx-run! cfg files/absorb-library! id)
(catch Throwable cause
(l/warn :hint "error on absorbing library"
:file-id id

View File

@@ -7,18 +7,10 @@
(ns app.util.template
(:require
[app.common.exceptions :as ex]
[cuerdas.core :as str]
[selmer.filters :as sf]
[selmer.parser :as sp]))
;; (sp/cache-off!)
(sf/add-filter! :abbreviate
(fn [s n]
(let [n (parse-long n)]
(str/abbreviate s n))))
(defn render
[path context]
(try

View File

@@ -137,34 +137,33 @@ RETURNING task.id, task.queue")
::wait)))
(run-batch []
(try
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(finally
(.close ^AutoCloseable rconn))))
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(catch InterruptedException cause
(throw cause))
(catch InterruptedException cause
(throw cause))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))))
(finally
(.close ^AutoCloseable rconn)))))
(dispatcher []
(l/inf :hint "started")
@@ -177,7 +176,7 @@ RETURNING task.id, task.queue")
(catch InterruptedException _
(l/trc :hint "interrupted"))
(catch Throwable cause
(l/err :hint "unexpected exception" :cause cause))
(l/err :hint " unexpected exception" :cause cause))
(finally
(l/inf :hint "terminated"))))]

View File

@@ -595,8 +595,8 @@
(px/exec! :virtual #(rcp/write-body-to-stream body nil output))
(into []
(map (fn [{:keys [event data]}]
(d/vec2 (keyword event)
(tr/decode-str data))))
[(keyword event)
(tr/decode-str data)]))
(parse-sse (slurp' input)))
(finally
(.close input)))))

View File

@@ -1921,11 +1921,7 @@
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [result (:result out)]
(t/is (fn? result))
(let [[ev1 ev2 :as events] (th/consume-sse result)]
(t/is (= 2 (count events)))
(t/is (= (:ids data) (val ev2)))))
(t/is (= (:ids data) result)))
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
(t/is (= (:deleted-at row) now)))))))

1145
backend/yarn.lock Normal file
View File

File diff suppressed because it is too large Load Diff

7
common/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

View File

@@ -29,7 +29,8 @@
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
integrant/integrant {:mvn/version "1.0.0"}
funcool/cuerdas {:mvn/version "2026.415"}
funcool/tubax {:mvn/version "2021.05.20-0"}
funcool/cuerdas {:mvn/version "2025.06.16-414"}
funcool/promesa
{:git/sha "46048fc0d4bf5466a2a4121f5d52aefa6337f2e8"
:git/url "https://github.com/funcool/promesa"}
@@ -59,7 +60,7 @@
thheller/shadow-cljs {:mvn/version "3.2.0"}
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
com.bhauman/rebel-readline {:mvn/version "RELEASE"}
criterium/criterium {:mvn/version "0.4.6"}
criterium/criterium {:mvn/version "RELEASE"}
mockery/mockery {:mvn/version "RELEASE"}}
:extra-paths ["test" "dev"]}

View File

@@ -4,7 +4,7 @@
"license": "MPL-2.0",
"author": "Kaleidos INC",
"private": true,
"packageManager": "pnpm@10.26.2+sha512.0e308ff2005fc7410366f154f625f6631ab2b16b1d2e70238444dd6ae9d630a8482d92a451144debc492416896ed16f7b114a86ec68b8404b2443869e68ffda6",
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
"type": "module",
"repository": {
"type": "git",
@@ -23,9 +23,9 @@
"fmt:clj:check": "cljfmt check --parallel=false src/ test/",
"fmt:clj": "cljfmt fix --parallel=true src/ test/",
"lint:clj": "clj-kondo --parallel=true --lint src/",
"lint": "pnpm run lint:clj",
"lint": "yarn run lint:clj",
"watch:test": "concurrently \"clojure -M:dev:shadow-cljs watch test\" \"nodemon -C -d 2 -w target/tests/ --exec 'node target/tests/test.js'\"",
"build:test": "clojure -M:dev:shadow-cljs compile test",
"test": "pnpm run build:test && node target/tests/test.js"
"test": "yarn run build:test && node target/tests/test.js"
}
}

489
common/pnpm-lock.yaml generated
View File

@@ -1,489 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
date-fns:
specifier: ^4.1.0
version: 4.1.0
devDependencies:
concurrently:
specifier: ^9.1.2
version: 9.2.1
nodemon:
specifier: ^3.1.10
version: 3.1.11
source-map-support:
specifier: ^0.5.21
version: 0.5.21
ws:
specifier: ^8.18.2
version: 8.18.3
packages:
ansi-regex@5.0.1:
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
engines: {node: '>=8'}
ansi-styles@4.3.0:
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
engines: {node: '>=8'}
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
brace-expansion@1.1.12:
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
buffer-from@1.1.2:
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
chalk@4.1.2:
resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
engines: {node: '>=10'}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
cliui@8.0.1:
resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==}
engines: {node: '>=12'}
color-convert@2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
engines: {node: '>=7.0.0'}
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
concurrently@9.2.1:
resolution: {integrity: sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng==}
engines: {node: '>=18'}
hasBin: true
date-fns@4.1.0:
resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==}
debug@4.4.3:
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
escalade@3.2.0:
resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
engines: {node: '>=6'}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
has-flag@3.0.0:
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
engines: {node: '>=4'}
has-flag@4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-fullwidth-code-point@3.0.0:
resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
engines: {node: '>=8'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
minimatch@3.1.2:
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
nodemon@3.1.11:
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
engines: {node: '>=10'}
hasBin: true
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
require-directory@2.1.1:
resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
engines: {node: '>=0.10.0'}
rxjs@7.8.2:
resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==}
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
shell-quote@1.8.3:
resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==}
engines: {node: '>= 0.4'}
simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'}
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
source-map@0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
string-width@4.2.3:
resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
engines: {node: '>=8'}
strip-ansi@6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
supports-color@5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
supports-color@7.2.0:
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
engines: {node: '>=8'}
supports-color@8.1.1:
resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==}
engines: {node: '>=10'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
touch@3.1.1:
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
hasBin: true
tree-kill@1.2.2:
resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==}
hasBin: true
tslib@2.8.1:
resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
undefsafe@2.0.5:
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
wrap-ansi@7.0.0:
resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
engines: {node: '>=10'}
ws@8.18.3:
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
y18n@5.0.8:
resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
engines: {node: '>=10'}
yargs-parser@21.1.1:
resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==}
engines: {node: '>=12'}
yargs@17.7.2:
resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==}
engines: {node: '>=12'}
snapshots:
ansi-regex@5.0.1: {}
ansi-styles@4.3.0:
dependencies:
color-convert: 2.0.1
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
balanced-match@1.0.2: {}
binary-extensions@2.3.0: {}
brace-expansion@1.1.12:
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
braces@3.0.3:
dependencies:
fill-range: 7.1.1
buffer-from@1.1.2: {}
chalk@4.1.2:
dependencies:
ansi-styles: 4.3.0
supports-color: 7.2.0
chokidar@3.6.0:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
cliui@8.0.1:
dependencies:
string-width: 4.2.3
strip-ansi: 6.0.1
wrap-ansi: 7.0.0
color-convert@2.0.1:
dependencies:
color-name: 1.1.4
color-name@1.1.4: {}
concat-map@0.0.1: {}
concurrently@9.2.1:
dependencies:
chalk: 4.1.2
rxjs: 7.8.2
shell-quote: 1.8.3
supports-color: 8.1.1
tree-kill: 1.2.2
yargs: 17.7.2
date-fns@4.1.0: {}
debug@4.4.3(supports-color@5.5.0):
dependencies:
ms: 2.1.3
optionalDependencies:
supports-color: 5.5.0
emoji-regex@8.0.0: {}
escalade@3.2.0: {}
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fsevents@2.3.3:
optional: true
get-caller-file@2.0.5: {}
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
has-flag@3.0.0: {}
has-flag@4.0.0: {}
ignore-by-default@1.0.1: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-extglob@2.1.1: {}
is-fullwidth-code-point@3.0.0: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
minimatch@3.1.2:
dependencies:
brace-expansion: 1.1.12
ms@2.1.3: {}
nodemon@3.1.11:
dependencies:
chokidar: 3.6.0
debug: 4.4.3(supports-color@5.5.0)
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.7.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
undefsafe: 2.0.5
normalize-path@3.0.0: {}
picomatch@2.3.1: {}
pstree.remy@1.1.8: {}
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
require-directory@2.1.1: {}
rxjs@7.8.2:
dependencies:
tslib: 2.8.1
semver@7.7.3: {}
shell-quote@1.8.3: {}
simple-update-notifier@2.0.0:
dependencies:
semver: 7.7.3
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
source-map: 0.6.1
source-map@0.6.1: {}
string-width@4.2.3:
dependencies:
emoji-regex: 8.0.0
is-fullwidth-code-point: 3.0.0
strip-ansi: 6.0.1
strip-ansi@6.0.1:
dependencies:
ansi-regex: 5.0.1
supports-color@5.5.0:
dependencies:
has-flag: 3.0.0
supports-color@7.2.0:
dependencies:
has-flag: 4.0.0
supports-color@8.1.1:
dependencies:
has-flag: 4.0.0
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
touch@3.1.1: {}
tree-kill@1.2.2: {}
tslib@2.8.1: {}
undefsafe@2.0.5: {}
wrap-ansi@7.0.0:
dependencies:
ansi-styles: 4.3.0
string-width: 4.2.3
strip-ansi: 6.0.1
ws@8.18.3: {}
y18n@5.0.8: {}
yargs-parser@21.1.1: {}
yargs@17.7.2:
dependencies:
cliui: 8.0.1
escalade: 3.2.0
get-caller-file: 2.0.5
require-directory: 2.1.1
string-width: 4.2.3
y18n: 5.0.8
yargs-parser: 21.1.1

View File

View File

@@ -3,5 +3,5 @@
set -ex
corepack enable;
corepack install;
pnpm install;
pnpm run test;
yarn install;
yarn run test;

View File

@@ -1024,26 +1024,6 @@
:clj
(sort comp-fn items))))
(defn obfuscate-string
"Obfuscates potentially sensitive values.
- One-arg arity:
* For strings shorter than 10 characters, all characters are replaced by `*`.
* For longer strings, the first 5 characters are preserved and the rest obfuscated.
- Two-arg arity accepts a boolean `full?` that, when true, replaces the whole value
by `*`, preserving only the length."
([v]
(obfuscate-string v false))
([v full?]
(let [s (str v)
n (count s)]
(cond
(zero? n) s
full? (apply str (repeat n "*"))
(< n 10) (apply str (repeat n "*"))
:else (str (subs s 0 5)
(apply str (repeat (- n 5) "*")))))))
(defn reorder
"Reorder a vector by moving one of their items from some position to some space between positions.
It clamps the position numbers to a valid range."

View File

@@ -10,20 +10,16 @@
(:refer-clojure :exclude [instance?])
(:require
#?(:clj [clojure.stacktrace :as strace])
[app.common.data :refer [obfuscate-string]]
[app.common.pprint :as pp]
[app.common.schema :as sm]
[clojure.core :as c]
[clojure.spec.alpha :as s]
[cuerdas.core :as str])
[cuerdas.core :as str]
[expound.alpha :as expound])
#?(:clj
(:import
clojure.lang.IPersistentMap)))
(def ^:private sensitive-fields
"Keys whose values must be obfuscated in validation explains."
#{:password :old-password :token :invitation-token})
#?(:clj (set! *warn-on-reflection* true))
(def ^:dynamic *data-length* 8)
@@ -114,26 +110,15 @@
(contains? data :explain))
(explain (:explain data) opts)
(contains? data ::sm/explain)
(let [exp (::sm/explain data)
sanitize-map (fn sanitize-map [m]
(reduce-kv
(fn [acc k v]
(let [k* (if (string? k) (keyword k) k)]
(cond
(contains? sensitive-fields k*)
(assoc acc k (if (map? v)
(sanitize-map v)
(obfuscate-string v true)))
(and (contains? data ::s/problems)
(contains? data ::s/value)
(contains? data ::s/spec))
(binding [s/*explain-out* expound/printer]
(with-out-str
(s/explain-out (update data ::s/problems #(take (:length opts 10) %)))))
(map? v) (assoc acc k (sanitize-map v))
:else (assoc acc k v))))
{}
m))
sanitize-explain (fn [exp]
(cond-> exp
(:value exp) (update :value sanitize-map)))]
(sm/humanize-explain (sanitize-explain exp) opts))))
(contains? data ::sm/explain)
(sm/humanize-explain (::sm/explain data) opts)))
#?(:clj
(defn format-throwable

View File

@@ -526,25 +526,20 @@
ids))
(defn clean-loops
"Clean a list of ids from circular references. Optimized fast-path for single selections."
"Clean a list of ids from circular references."
[objects ids]
(if (<= (count ids) 1)
;; For single selection, there can't be circularity; return as ordered-set.
(into (d/ordered-set) ids)
(let [ids-set (if (set? ids) ids (set ids))
parent-selected?
(fn [id]
;; Stop early as soon as we find any selected parent
(let [parents (get-parent-ids objects id)]
(some #(contains? ids-set %) parents)))
(let [parent-selected?
(fn [id]
(let [parents (get-parent-ids objects id)]
(some ids parents)))
add-element
(fn [result id]
(cond-> result
(not (parent-selected? id))
(conj id)))]
add-element
(fn [result id]
(cond-> result
(not (parent-selected? id))
(conj id)))]
(reduce add-element (d/ordered-set) ids))))
(reduce add-element (d/ordered-set) ids)))
(defn- indexed-shapes
"Retrieves a vector with the indexes for each element in the layer

View File

@@ -82,113 +82,6 @@
(declare create-svg-children)
(declare parse-svg-element)
(defn- process-gradient-stops
"Processes gradient stops to extract stop-color and stop-opacity from style attributes
and convert them to direct attributes. This ensures stops with style='stop-color:#...;stop-opacity:1'
are properly converted to stop-color and stop-opacity attributes."
[stops]
(mapv (fn [stop]
(let [stop-attrs (:attrs stop)
stop-style (get stop-attrs :style)
;; Parse style if it's a string using csvg/parse-style utility
parsed-style (when (and (string? stop-style) (seq stop-style))
(csvg/parse-style stop-style))
;; Extract stop-color and stop-opacity from style
style-stop-color (when parsed-style (:stop-color parsed-style))
style-stop-opacity (when parsed-style (:stop-opacity parsed-style))
;; Merge: use direct attributes first, then style values as fallback
final-attrs (cond-> stop-attrs
(and style-stop-color (not (contains? stop-attrs :stop-color)))
(assoc :stop-color style-stop-color)
(and style-stop-opacity (not (contains? stop-attrs :stop-opacity)))
(assoc :stop-opacity style-stop-opacity)
;; Remove style attribute if we've extracted its values
(or style-stop-color style-stop-opacity)
(dissoc :style))]
(assoc stop :attrs final-attrs)))
stops))
(defn- resolve-gradient-href
"Resolves xlink:href references in gradients by merging the referenced gradient's
stops and attributes with the referencing gradient. This ensures gradients that
reference other gradients (like linearGradient3550 referencing linearGradient3536)
inherit the stops from the base gradient.
According to SVG spec, when a gradient has xlink:href:
- It inherits all attributes from the referenced gradient
- It inherits all stops from the referenced gradient
- The referencing gradient's attributes override the base ones
- If the referencing gradient has stops, they replace the base stops
Returns the defs map with all gradient href references resolved."
[defs]
(letfn [(resolve-gradient [gradient-id gradient-node defs visited]
(if (contains? visited gradient-id)
(do
#?(:cljs (js/console.warn "[resolve-gradient] Circular reference detected for" gradient-id)
:clj nil)
gradient-node) ;; Avoid circular references
(let [attrs (:attrs gradient-node)
href-id (or (:href attrs) (:xlink:href attrs))
href-id (when (and (string? href-id) (pos? (count href-id)))
(subs href-id 1)) ;; Remove leading #
base-gradient (when (and href-id (contains? defs href-id))
(get defs href-id))
resolved-base (when base-gradient (resolve-gradient href-id base-gradient defs (conj visited gradient-id)))]
(if resolved-base
;; Merge: base gradient attributes + referencing gradient attributes
;; Use referencing gradient's stops if present, otherwise use base stops
(let [base-attrs (:attrs resolved-base)
ref-attrs (:attrs gradient-node)
;; Start with base attributes (without id), then merge with ref attributes
;; This ensures ref attributes override base ones
base-attrs-clean (dissoc base-attrs :id)
ref-attrs-clean (dissoc ref-attrs :href :xlink:href :id)
;; Special handling for gradientTransform: if both have it, combine them
base-transform (get base-attrs :gradientTransform)
ref-transform (get ref-attrs :gradientTransform)
combined-transform (cond
(and base-transform ref-transform)
(str base-transform " " ref-transform) ;; Apply base first, then ref
:else (or ref-transform base-transform))
;; Merge attributes: base first, then ref (ref overrides)
merged-attrs (-> (d/deep-merge base-attrs-clean ref-attrs-clean)
(cond-> combined-transform
(assoc :gradientTransform combined-transform)))
;; If referencing gradient has content (stops), use it; otherwise use base content
final-content (if (seq (:content gradient-node))
(:content gradient-node)
(:content resolved-base))
;; Process stops to extract stop-color and stop-opacity from style attributes
processed-content (process-gradient-stops final-content)
result {:tag (:tag gradient-node)
:attrs (assoc merged-attrs :id gradient-id)
:content processed-content}]
result)
;; Process stops even for gradients without references to extract style attributes
(let [processed-content (process-gradient-stops (:content gradient-node))]
(assoc gradient-node :content processed-content))))))]
(let [gradient-tags #{:linearGradient :radialGradient}
result (reduce-kv
(fn [acc id node]
(if (contains? gradient-tags (:tag node))
(assoc acc id (resolve-gradient id node defs #{}))
(assoc acc id node)))
{}
defs)]
result)))
(defn create-svg-shapes
([svg-data pos objects frame-id parent-id selected center?]
(create-svg-shapes (uuid/next) svg-data pos objects frame-id parent-id selected center?))
@@ -219,9 +112,6 @@
(csvg/fix-percents)
(csvg/extract-defs))
;; Resolve gradient href references in all defs before processing shapes
def-nodes (resolve-gradient-href def-nodes)
;; In penpot groups have the size of their children. To
;; respect the imported svg size and empty space let's create
;; a transparent shape as background to respect the imported
@@ -252,23 +142,12 @@
(reduce (partial create-svg-children objects selected frame-id root-id svg-data)
[unames []]
(d/enumerate (->> (:content svg-data)
(mapv #(csvg/inherit-attributes root-attrs %)))))
(mapv #(csvg/inherit-attributes root-attrs %)))))]
;; Collect all defs from children and merge into root shape
all-defs-from-children (reduce (fn [acc child]
(if-let [child-defs (:svg-defs child)]
(merge acc child-defs)
acc))
{}
children)
;; Merge defs from svg-data and children into root shape
root-shape-with-defs (assoc root-shape :svg-defs (merge def-nodes all-defs-from-children))]
[root-shape-with-defs children])))
[root-shape children])))
(defn create-raw-svg
[name frame-id {:keys [x y width height offset-x offset-y defs] :as svg-data} {:keys [attrs] :as data}]
[name frame-id {:keys [x y width height offset-x offset-y]} {:keys [attrs] :as data}]
(let [props (csvg/attrs->props attrs)
vbox (grc/make-rect offset-x offset-y width height)]
(cts/setup-shape
@@ -281,11 +160,10 @@
:y y
:content data
:svg-attrs props
:svg-viewbox vbox
:svg-defs defs})))
:svg-viewbox vbox})))
(defn create-svg-root
[id frame-id parent-id {:keys [name x y width height offset-x offset-y attrs defs] :as svg-data}]
[id frame-id parent-id {:keys [name x y width height offset-x offset-y attrs]}]
(let [props (-> (dissoc attrs :viewBox :view-box :xmlns)
(d/without-keys csvg/inheritable-props)
(csvg/attrs->props))]
@@ -299,8 +177,7 @@
:height height
:x (+ x offset-x)
:y (+ y offset-y)
:svg-attrs props
:svg-defs defs})))
:svg-attrs props})))
(defn create-svg-children
[objects selected frame-id parent-id svg-data [unames children] [_index svg-element]]
@@ -321,7 +198,7 @@
(defn create-group
[name frame-id {:keys [x y width height offset-x offset-y defs] :as svg-data} {:keys [attrs]}]
[name frame-id {:keys [x y width height offset-x offset-y] :as svg-data} {:keys [attrs]}]
(let [transform (csvg/parse-transform (:transform attrs))
attrs (-> attrs
(d/without-keys csvg/inheritable-props)
@@ -337,8 +214,7 @@
:height height
:svg-transform transform
:svg-attrs attrs
:svg-viewbox vbox
:svg-defs defs})))
:svg-viewbox vbox})))
(defn create-path-shape [name frame-id svg-data {:keys [attrs] :as data}]
(when (and (contains? attrs :d) (seq (:d attrs)))
@@ -647,21 +523,6 @@
:else (dm/str tag))]
(dm/str "svg-" suffix)))
(defn- filter-valid-def-references
"Filters out false positive references that are not valid def IDs.
Filters out:
- Colors in style attributes (hex colors like #f9dd67)
- Style fragments that contain CSS keywords (like stop-opacity)
- References that don't exist in defs"
[ref-ids defs]
(let [is-style-fragment? (fn [ref-id]
(or (clr/hex-color-string? (str "#" ref-id))
(str/includes? ref-id ";") ;; Contains CSS separator
(str/includes? ref-id "stop-opacity") ;; CSS keyword
(str/includes? ref-id "stop-color")))] ;; CSS keyword
(->> ref-ids
(remove is-style-fragment?) ;; Filter style fragments and hex colors
(filter #(contains? defs %))))) ;; Only existing defs
(defn parse-svg-element
[frame-id svg-data {:keys [tag attrs hidden] :as element} unames]
@@ -673,11 +534,7 @@
(let [name (or (:id attrs) (tag->name tag))
att-refs (csvg/find-attr-references attrs)
defs (get svg-data :defs)
valid-refs (filter-valid-def-references att-refs defs)
all-refs (csvg/find-def-references defs valid-refs)
;; Filter the final result to ensure all references are valid defs
;; This prevents false positives from style attributes in gradient stops
references (filter-valid-def-references all-refs defs)
references (csvg/find-def-references defs att-refs)
href-id (or (:href attrs) (:xlink:href attrs) " ")
href-id (if (and (string? href-id)

View File

@@ -169,7 +169,6 @@
:enable-component-thumbnails
:enable-render-wasm-dpr
:enable-token-color
:enable-token-shadow
:enable-inspect-styles
:enable-feature-fdata-objects-map])

View File

@@ -75,25 +75,20 @@
#?(:cljs
(defn ->clj
[o & {:keys [key-fn val-fn recursive] :or {key-fn read-kebab-key val-fn identity recursive true}}]
[o & {:keys [key-fn val-fn] :or {key-fn read-kebab-key val-fn identity}}]
(let [f (fn this-fn [x]
(let [x (val-fn x)]
(cond
(array? x)
(persistent!
(.reduce ^js/Array x
#(conj! %1 (if recursive
(this-fn %2)
%2))
#(conj! %1 (this-fn %2))
(transient [])))
(identical? (type x) js/Object)
(persistent!
(.reduce ^js/Array (js-keys x)
#(assoc! %1 (key-fn %2)
(if recursive
(this-fn (unchecked-get x %2))
(unchecked-get x %2)))
#(assoc! %1 (key-fn %2) (this-fn (unchecked-get x %2)))
(transient {})))
:else

View File

@@ -43,6 +43,8 @@
"
#?(:cljs (:require-macros [app.common.logging :as l]))
(:require
#?(:clj [clojure.edn :as edn]
:cljs [cljs.reader :as edn])
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.pprint :as pp]

View File

@@ -21,6 +21,7 @@
[app.common.logic.shapes :as cls]
[app.common.logic.variant-properties :as clvp]
[app.common.path-names :as cpn]
[app.common.spec :as us]
[app.common.types.component :as ctk]
[app.common.types.components-list :as ctkl]
[app.common.types.container :as ctn]
@@ -38,7 +39,8 @@
[app.common.types.typography :as cty]
[app.common.types.variant :as ctv]
[app.common.uuid :as uuid]
[clojure.set :as set]))
[clojure.set :as set]
[clojure.spec.alpha :as s]))
;; Change this to :info :debug or :trace to debug this module, or :warn to reset to default
(log/set-level! :warn)
@@ -475,10 +477,10 @@
If an asset id is given, only shapes linked to this particular asset will
be synchronized."
[changes file-id asset-type asset-id library-id libraries current-file-id]
(assert (contains? #{:colors :components :typographies} asset-type))
(assert (or (nil? asset-id) (uuid? asset-id)))
(assert (uuid? file-id))
(assert (uuid? library-id))
(s/assert #{:colors :components :typographies} asset-type)
(s/assert (s/nilable ::us/uuid) asset-id)
(s/assert ::us/uuid file-id)
(s/assert ::us/uuid library-id)
(container-log :info asset-id
:msg "Sync file with library"
@@ -512,10 +514,10 @@
If an asset id is given, only shapes linked to this particular asset will
be synchronized."
[changes file-id asset-type asset-id library-id libraries current-file-id]
(assert (contains? #{:colors :components :typographies} asset-type))
(assert (or (nil? asset-id) (uuid? asset-id)))
(assert (uuid? file-id))
(assert (uuid? library-id))
(s/assert #{:colors :components :typographies} asset-type)
(s/assert (s/nilable ::us/uuid) asset-id)
(s/assert ::us/uuid file-id)
(s/assert ::us/uuid library-id)
(container-log :info asset-id
:msg "Sync local components with library"
@@ -2491,13 +2493,11 @@
(ctk/get-swap-slot))
(constantly false))
;; In the cases where the swapped shape was the first element of the masked group it would make the group to loose the
;; mask property as part of the sanitization check on generate-delete-shapes, passing "ignore-mask" to prevent this
[all-parents changes]
(-> changes
(cls/generate-delete-shapes
file page objects (d/ordered-set (:id shape))
{:allow-altering-copies true :ignore-children-fn ignore-swapped-fn :ignore-mask true :ignore-flows-for #{(:id shape)}}))
{:allow-altering-copies true :ignore-children-fn ignore-swapped-fn}))
[new-shape changes]
(-> changes
(generate-new-shape-for-swap shape file page libraries id-new-component index target-cell keep-props-values))]
@@ -2867,15 +2867,13 @@
ids-map (into {} (map #(vector % (uuid/next))) all-ids)
;; If there is an alt-duplication we change to root
;; For variants so the copy is made as a child of root
;; If there is an alt-duplication of a variant, change its parent to root
;; so the copy is made as a child of root
;; This is because inside a variant-container can't be a copy
;; For other shape this way the layout won't be changed when duplicated
;; and if you move outside the layout will not change
shapes (map (fn [shape]
(cond-> shape
alt-duplication?
(assoc :parent-id uuid/zero :frame-id uuid/zero)))
(if (and alt-duplication? (ctk/is-variant? shape))
(assoc shape :parent-id uuid/zero :frame-id nil)
shape))
shapes)

View File

@@ -123,12 +123,8 @@
;; ignore-children-fn is used to ignore some descendants
;; on the deletion process. It should receive a shape and
;; return a boolean
ignore-children-fn
ignore-mask
ignore-flows-for]
:or {ignore-children-fn (constantly false)
ignore-mask false
ignore-flows-for #{}}}]
ignore-children-fn]
:or {ignore-children-fn (constantly false)}}]
(let [objects (pcb/get-objects changes)
data (pcb/get-library-data changes)
page-id (pcb/get-page-id changes)
@@ -166,20 +162,18 @@
lookup (d/getf objects)
groups-to-unmask
(when-not ignore-mask
(reduce (fn [group-ids id]
;; When the shape to delete is the mask of a masked group,
;; the mask condition must be removed, and it must be
;; converted to a normal group.
(let [obj (lookup id)
parent (lookup (:parent-id obj))]
(if (and (:masked-group parent)
(= id (first (:shapes parent))))
(conj group-ids (:id parent))
group-ids)))
#{}
ids-to-delete)
[])
(reduce (fn [group-ids id]
;; When the shape to delete is the mask of a masked group,
;; the mask condition must be removed, and it must be
;; converted to a normal group.
(let [obj (lookup id)
parent (lookup (:parent-id obj))]
(if (and (:masked-group parent)
(= id (first (:shapes parent))))
(conj group-ids (:id parent))
group-ids)))
#{}
ids-to-delete)
interacting-shapes
(filter (fn [shape]
@@ -196,8 +190,7 @@
(->> (:flows page)
(reduce
(fn [changes [id flow]]
(if (and (id-to-delete? (:starting-frame flow))
(not (contains? ignore-flows-for (:starting-frame flow))))
(if (id-to-delete? (:starting-frame flow))
(-> changes
(pcb/with-page page)
(pcb/set-flow id nil))

View File

@@ -132,94 +132,3 @@ Some naming conventions:
(if-let [last-period (str/last-index-of s ".")]
[(subs s 0 (inc last-period)) (subs s (inc last-period))]
[s ""]))
;; Tree building functions --------------------------------------------------
"Build tree structure from flat list of paths"
"`build-tree-root` is the main function to build the tree."
"Receives a list of segments with 'name' properties representing paths,
and a separator string."
"E.g segments = [{... :name 'one/two/three'} {... :name 'one/two/four'} {... :name 'one/five'}]"
"Transforms into a tree structure like:
[{:name 'one'
:path 'one'
:depth 0
:leaf nil
:children-fn (fn [] [{:name 'two'
:path 'one.two'
:depth 1
:leaf nil
:children-fn (fn [] [{... :name 'three'} {... :name 'four'}])}
{:name 'five'
:path 'one.five'
:depth 1
:leaf {... :name 'five'}
...}])}]"
(defn- sort-by-children
"Sorts segments so that those with children come first."
[segments separator]
(sort-by (fn [segment]
(let [path (split-path (:name segment) :separator separator)
path-length (count path)]
(if (= path-length 1)
1
0)))
segments))
(defn- group-by-first-segment
"Groups segments by their first path segment and update segment name."
[segments separator]
(reduce (fn [acc segment]
(let [[first-segment & remaining-segments] (split-path (:name segment) :separator separator)
rest-path (when (seq remaining-segments) (join-path remaining-segments :separator separator :with-spaces? false))]
(update acc first-segment (fnil conj [])
(if rest-path
(assoc segment :name rest-path)
segment))))
{}
segments))
(defn- sort-and-group-segments
"Sorts elements and groups them by their first path segment."
[segments separator]
(let [sorted (sort-by-children segments separator)
grouped (group-by-first-segment sorted separator)]
grouped))
(defn- build-tree-node
"Builds a single tree node with lazy children."
[segment-name remaining-segments separator parent-path depth]
(let [current-path (if parent-path
(str parent-path "." segment-name)
segment-name)
is-leaf? (and (seq remaining-segments)
(every? (fn [segment]
(let [remaining-segment-name (first (split-path (:name segment) :separator separator))]
(= segment-name remaining-segment-name)))
remaining-segments))
leaf-segment (when is-leaf? (first remaining-segments))
node {:name segment-name
:path current-path
:depth depth
:leaf leaf-segment
:children-fn (when-not is-leaf?
(fn []
(let [grouped-elements (sort-and-group-segments remaining-segments separator)]
(mapv (fn [[child-segment-name remaining-child-segments]]
(build-tree-node child-segment-name remaining-child-segments separator current-path (inc depth)))
grouped-elements))))}]
node))
(defn build-tree-root
"Builds the root level of the tree."
[segments separator]
(let [grouped-elements (sort-and-group-segments segments separator)]
(mapv (fn [[segment-name remaining-segments]]
(build-tree-node segment-name remaining-segments separator nil 0))
grouped-elements)))

View File

@@ -8,8 +8,6 @@
(:refer-clojure :exclude [deref merge parse-uuid parse-long parse-double parse-boolean type keys])
#?(:cljs (:require-macros [app.common.schema :refer [ignoring]]))
(:require
#?(:clj [malli.dev.pretty :as mdp])
#?(:clj [malli.dev.virhe :as v])
[app.common.data :as d]
[app.common.math :as mth]
[app.common.pprint :as pp]
@@ -21,6 +19,8 @@
[clojure.core :as c]
[cuerdas.core :as str]
[malli.core :as m]
[malli.dev.pretty :as mdp]
[malli.dev.virhe :as v]
[malli.error :as me]
[malli.generator :as mg]
[malli.registry :as mr]
@@ -245,30 +245,27 @@
:level (d/nilv level 8)
:length (d/nilv length 12)})))))
#?(:clj
(defmethod v/-format ::schemaless-explain
[_ explanation printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer)]}))
(defmethod v/-format ::schemaless-explain
[_ explanation printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer)]})
#?(:clj
(defmethod v/-format ::explain
[_ {:keys [schema] :as explanation} printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer) :break :break
(v/-block "Schema" (v/-visit schema printer) printer)]}))
(defmethod v/-format ::explain
[_ {:keys [schema] :as explanation} printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer) :break :break
(v/-block "Schema" (v/-visit schema printer) printer)]})
#?(:clj
(defn pretty-explain
"A helper that allows print a console-friendly output for the explain;
should not be used for other purposes"
[explain & {:keys [variant message]
:or {variant ::explain
message "Validation Error"}}]
(let [explain (fn [] (me/with-error-messages explain))]
((mdp/prettifier variant message explain default-options)))))
(defn pretty-explain
"A helper that allows print a console-friendly output for the
explain; should not be used for other purposes"
[explain & {:keys [variant message]
:or {variant ::explain
message "Validation Error"}}]
(let [explain (fn [] (me/with-error-messages explain))]
((mdp/prettifier variant message explain default-options))))
(defmacro ignoring
[expr]
@@ -315,13 +312,6 @@
::explain explain}))))
value))))
(defn coercer
[schema & {:as opts}]
(let [decode-fn (lazy-decoder schema json-transformer)
check-fn (check-fn schema opts)]
(fn [data]
(-> data decode-fn check-fn))))
(defn check
"A helper intended to be used on assertions for validate/check the
schema over provided data. Raises an assertion exception.
@@ -1016,9 +1006,6 @@
(def valid-safe-number?
(lazy-validator ::safe-number))
(def valid-safe-int?
(lazy-validator ::safe-int))
(def valid-text?
(validator ::text))

View File

@@ -546,19 +546,9 @@
filter-values)))
(defn extract-ids [val]
;; Extract referenced ids from string values like "url(#myId)".
;; Non-string values (maps, numbers, nil, etc.) return an empty seq
;; to avoid re-seq type errors when attributes carry nested structures.
(cond
(string? val)
(when (some? val)
(->> (re-seq xml-id-regex val)
(mapv second))
(sequential? val)
(mapcat extract-ids val)
:else
[]))
(mapv second))))
(defn fix-dot-number
"Fixes decimal numbers starting in dot but without leading 0"

View File

@@ -340,7 +340,7 @@
(dfn-diff t2 t1)))
#?(:cljs
(defn set-default-locale
(defn set-default-locale!
[locale]
(when-let [locale (unchecked-get locales locale)]
(dfn-set-default-options #js {:locale locale}))))

View File

@@ -140,8 +140,7 @@
:layout-item-min-w
:layout-item-absolute
:layout-item-z-index
:layout-item-align-self
:interactions})
:layout-item-align-self})
(defn component-attr?
"Check if some attribute is one that is involved in component syncrhonization.

View File

@@ -269,8 +269,8 @@
"Remove flex children properties except the fit-content for flex layouts. These are properties
that we don't have to propagate to copies but will be respected when swapping components"
[shape]
(let [layout-item-h-sizing (when (and (ctl/any-layout? shape) (ctl/auto-width? shape)) :auto)
layout-item-v-sizing (when (and (ctl/any-layout? shape) (ctl/auto-height? shape)) :auto)]
(let [layout-item-h-sizing (when (and (ctl/flex-layout? shape) (ctl/auto-width? shape)) :auto)
layout-item-v-sizing (when (and (ctl/flex-layout? shape) (ctl/auto-height? shape)) :auto)]
(-> shape
(d/without-keys ctk/swap-keep-attrs)
(cond-> (some? layout-item-h-sizing)

View File

@@ -362,24 +362,24 @@
component (ctkl/get-component component-file (:component-id top-instance) true)
remote-shape (get-ref-shape component-file component shape)
component-container (get-component-container component-file component)
[remote-shape component-container component-file]
[remote-shape component-container]
(if (some? remote-shape)
[remote-shape component-container component-file]
[remote-shape component-container]
;; If not found, try the case of this being a fostered or swapped children
(let [head-instance (ctn/get-head-shape (:objects container) shape)
component-file (get-in libraries [(:component-file head-instance) :data])
head-component (ctkl/get-component component-file (:component-id head-instance) true)
remote-shape' (get-ref-shape component-file head-component shape)
component-container' (get-component-container component-file head-component)]
[remote-shape' component-container' component-file]))]
(let [head-instance (ctn/get-head-shape (:objects container) shape)
component-file (get-in libraries [(:component-file head-instance) :data])
head-component (ctkl/get-component component-file (:component-id head-instance) true)
remote-shape' (get-ref-shape component-file head-component shape)
component-container (get-component-container component-file component)]
[remote-shape' component-container]))]
(if (nil? remote-shape)
nil
(if (nil? (:shape-ref remote-shape))
(cond-> remote-shape
(and remote-shape with-context?)
(with-meta {:file {:id (:id component-file)
:data component-file}
(with-meta {:file {:id (:id file-data)
:data file-data}
:container component-container}))
(find-remote-shape component-container libraries remote-shape :with-context? with-context?)))))

View File

@@ -112,10 +112,8 @@
(:c2y params) (update-in [index :params :c2y] + (:c2y params)))
content))]
(if (some? modifiers)
(impl/path-data
(reduce apply-to-index (vec content) modifiers))
content)))
(impl/path-data
(reduce apply-to-index (vec content) modifiers))))
(defn transform-content
"Applies a transformation matrix over content and returns a new
@@ -236,15 +234,16 @@
"Calculate the boolean content from shape and objects. Returns a
packed PathData instance"
[shape objects]
(let [content (calc-bool-content* shape objects)]
(let [content (if (fn? wasm:calc-bool-content)
(wasm:calc-bool-content (get shape :bool-type)
(get shape :shapes))
(calc-bool-content* shape objects))]
(impl/path-data content)))
(defn update-bool-shape
"Calculates the selrect+points for the boolean shape"
[shape objects]
(let [content (if (fn? wasm:calc-bool-content)
(wasm:calc-bool-content shape objects)
(calc-bool-content shape objects))
(let [content (calc-bool-content shape objects)
shape (assoc shape :content content)]
(update-geometry shape)))

View File

@@ -1,29 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.common.types.project
(:require
[app.common.schema :as sm]
[app.common.time :as cm]))
(def schema:project
[:map {:title "Profile"}
[:id ::sm/uuid]
[:created-at {:optional true} ::cm/inst]
[:modified-at {:optional true} ::cm/inst]
[:name :string]
[:is-default {:optional true} ::sm/boolean]
[:is-pinned {:optional true} ::sm/boolean]
[:count {:optional true} ::sm/int]
[:total-count {:optional true} ::sm/int]
[:team-id ::sm/uuid]])
(def valid-project?
(sm/lazy-validator schema:project))
(def check-project
(sm/check-fn schema:project))

View File

@@ -47,18 +47,6 @@
self-reference? (get token-references token-name)]
self-reference?))
(defn references-token?
"Recursively check if a value references the token name. Handles strings, maps, and sequences."
[value token-name]
(cond
(string? value)
(boolean (some #(= % token-name) (find-token-value-references value)))
(map? value)
(some true? (map #(references-token? % token-name) (vals value)))
(sequential? value)
(some true? (map #(references-token? % token-name) value))
:else false))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SCHEMA
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -71,7 +59,6 @@
:dimensions "dimension"
:font-family "fontFamilies"
:font-size "fontSizes"
:font-weight "fontWeights"
:letter-spacing "letterSpacing"
:number "number"
:opacity "opacity"
@@ -83,6 +70,7 @@
:stroke-width "borderWidth"
:text-case "textCase"
:text-decoration "textDecoration"
:font-weight "fontWeights"
:typography "typography"})
(def dtcg-token-type->token-type
@@ -487,7 +475,6 @@
:vertical-margin #{:spacing :dimensions}
:sided-margins #{:spacing :dimensions}
:line-height #{:line-height :number}
:opacity #{:opacity}
:font-size #{:font-size}
:letter-spacing #{:letter-spacing}
:fill #{:color}
@@ -571,18 +558,3 @@
"Predicate if a shadow composite token is a reference value - a string pointing to another reference token."
[token-value]
(string? token-value))
(defn update-token-value-references
"Recursively update token references within a token value, supporting complex token values (maps, sequences, strings)."
[value old-name new-name]
(cond
(string? value)
(str/replace value
(re-pattern (str "\\{" (str/replace old-name "." "\\.") "\\}"))
(str "{" new-name "}"))
(map? value)
(d/update-vals value #(update-token-value-references % old-name new-name))
(sequential? value)
(mapv #(update-token-value-references % old-name new-name) value)
:else
value))

View File

@@ -909,8 +909,7 @@ Will return a value that matches this schema:
`:all` All of the nested sets are active
`:partial` Mixed active state of nested sets")
(get-tokens-in-active-sets [_] "set of set names that are active in the the active themes")
(get-all-tokens [_] "all tokens in the lib, as a sequence")
(get-all-tokens-map [_] "all tokens in the lib, as a map name -> token")
(get-all-tokens [_] "all tokens in the lib")
(get-tokens [_ set-id] "return a map of tokens in the set, indexed by token-name"))
(declare parse-multi-set-dtcg-json)
@@ -1307,10 +1306,6 @@ Will return a value that matches this schema:
tokens))
(get-all-tokens [this]
(mapcat #(vals (get-tokens- %))
(get-sets this)))
(get-all-tokens-map [this]
(reduce
(fn [tokens' set]
(into tokens' (map (fn [x] [(:name x) x]) (vals (get-tokens- set)))))
@@ -1415,8 +1410,8 @@ Will return a value that matches this schema:
;; NOTE: we can't assign statically at eval time the value of a
;; function that is declared but not defined; so we need to pass
;; an anonymous function and delegate the resolution to runtime
{:encode/json #(some-> % export-dtcg-json)
:decode/json #(some-> % read-multi-set-dtcg)
{:encode/json #(export-dtcg-json %)
:decode/json #(read-multi-set-dtcg %)
;; FIXME: add better, more reallistic generator
:gen/gen (->> (sg/small-int)
(sg/fmap (fn [_]
@@ -1550,7 +1545,7 @@ Will return a value that matches this schema:
(and (not (contains? decoded-json "$metadata"))
(not (contains? decoded-json "$themes"))))
(defn convert-dtcg-font-family
(defn- convert-dtcg-font-family
"Convert font-family token value from DTCG format to internal format.
- If value is a string, split it into a collection of font families
- If value is already an array, keep it as is
@@ -1561,7 +1556,7 @@ Will return a value that matches this schema:
(sequential? value) value
:else value))
(defn convert-dtcg-typography-composite
(defn- convert-dtcg-typography-composite
"Convert typography token value keys from DTCG format to internal format."
[value]
(if (map? value)
@@ -1573,17 +1568,17 @@ Will return a value that matches this schema:
;; Reference value
value))
(defn convert-dtcg-shadow-composite
(defn- convert-dtcg-shadow-composite
"Convert shadow token value from DTCG format to internal format."
[value]
(let [process-shadow (fn [shadow]
(if (map? shadow)
(let [legacy-shadow-type (get "type" shadow)]
(-> shadow
(set/rename-keys {"x" :offset-x
"offsetX" :offset-x
"y" :offset-y
"offsetY" :offset-y
(set/rename-keys {"x" :offsetX
"offsetX" :offsetX
"y" :offsetY
"offsetY" :offsetY
"blur" :blur
"spread" :spread
"color" :color
@@ -1594,7 +1589,7 @@ Will return a value that matches this schema:
(= "false" %) false
(= legacy-shadow-type "innerShadow") true
:else false))
(select-keys [:offset-x :offset-y :blur :spread :color :inset])))
(select-keys [:offsetX :offsetY :blur :spread :color :inset])))
shadow))]
(cond
;; Reference value - keep as string
@@ -1865,8 +1860,8 @@ Will return a value that matches this schema:
(mapv (fn [shadow]
(if (map? shadow)
(-> shadow
(set/rename-keys {:offset-x "offsetX"
:offset-y "offsetY"
(set/rename-keys {:offsetX "offsetX"
:offsetY "offsetY"
:blur "blur"
:spread "spread"
:color "color"

View File

@@ -14,8 +14,7 @@
(defn parse
[data]
(cond
(or (str/starts-with? data "%")
(= data "develop"))
(str/starts-with? data "%")
{:full "develop"
:branch "develop"
:base "0.0.0"

View File

@@ -1897,15 +1897,15 @@
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-single")]
(t/is (some? token))
(t/is (= :shadow (:type token)))
(t/is (= [{:offset-x "0", :offset-y "2px", :blur "4px", :spread "0", :color "#000", :inset false}]
(t/is (= [{:offsetX "0", :offsetY "2px", :blur "4px", :spread "0", :color "#000", :inset false}]
(:value token)))))
(t/testing "multiple shadow token"
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-multiple")]
(t/is (some? token))
(t/is (= :shadow (:type token)))
(t/is (= [{:offset-x "0", :offset-y "2px", :blur "4px", :spread "0", :color "#000", :inset true}
{:offset-x "0", :offset-y "8px", :blur "16px", :spread "0", :color "#000", :inset true}]
(t/is (= [{:offsetX "0", :offsetY "2px", :blur "4px", :spread "0", :color "#000", :inset true}
{:offsetX "0", :offsetY "8px", :blur "16px", :spread "0", :color "#000", :inset true}]
(:value token)))))
(t/testing "shadow token with reference"
@@ -1918,7 +1918,7 @@
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-with-type")]
(t/is (some? token))
(t/is (= :shadow (:type token)))
(t/is (= [{:offset-x "0", :offset-y "4px", :blur "8px", :spread "0", :color "rgba(0,0,0,0.2)", :inset false}]
(t/is (= [{:offsetX "0", :offsetY "4px", :blur "8px", :spread "0", :color "rgba(0,0,0,0.2)", :inset false}]
(:value token)))))
(t/testing "shadow token with description"
@@ -1937,14 +1937,14 @@
(ctob/make-token
{:name "shadow.single"
:type :shadow
:value [{:offset-x "0" :offset-y "2px" :blur "4px" :spread "0" :color "#0000001A"}]
:value [{:offsetX "0" :offsetY "2px" :blur "4px" :spread "0" :color "#0000001A"}]
:description "A single shadow"})
"shadow.multiple"
(ctob/make-token
{:name "shadow.multiple"
:type :shadow
:value [{:offset-x "0" :offset-y "2px" :blur "4px" :spread "0" :color "#0000001A"}
{:offset-x "0" :offset-y "8px" :blur "16px" :spread "0" :color "#0000001A"}]})
:value [{:offsetX "0" :offsetY "2px" :blur "4px" :spread "0" :color "#0000001A"}
{:offsetX "0" :offsetY "8px" :blur "16px" :spread "0" :color "#0000001A"}]})
"shadow.ref"
(ctob/make-token
{:name "shadow.ref"
@@ -1991,7 +1991,7 @@
(ctob/make-token
{:name "shadow.test"
:type :shadow
:value [{:offset-x "1" :offset-y "1" :blur "1" :spread "1" :color "red" :inset true}]
:value [{:offsetX "1" :offsetY "1" :blur "1" :spread "1" :color "red" :inset true}]
:description "Round trip test"})
"shadow.ref"
(ctob/make-token

1291
common/yarn.lock Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -25,6 +25,48 @@ RUN set -ex; \
binutils \
build-essential autoconf libtool pkg-config
################################################################################
## IMAGE MAGICK
################################################################################
FROM base AS build-imagemagick
ENV IMAGEMAGICK_VERSION=7.1.1-47 \
DEBIAN_FRONTEND=noninteractive
RUN set -ex; \
apt-get -qq update; \
apt-get -qq upgrade; \
apt-get -qqy --no-install-recommends install \
libltdl-dev \
libpng-dev \
libjpeg-dev \
libtiff-dev \
libwebp-dev \
libopenexr-dev \
libfftw3-dev \
libzip-dev \
liblcms2-dev \
liblzma-dev \
libzstd-dev \
libheif-dev \
librsvg2-dev \
; \
rm -rf /var/lib/apt/lists/*
RUN set -eux; \
curl -LfsSo /tmp/magick.tar.gz https://github.com/ImageMagick/ImageMagick/archive/refs/tags/${IMAGEMAGICK_VERSION}.tar.gz; \
mkdir -p /tmp/magick; \
cd /tmp/magick; \
tar -xf /tmp/magick.tar.gz --strip-components=1; \
./configure --prefix=/opt/imagick; \
make -j 2; \
make install; \
rm -rf /opt/imagick/lib/libMagick++*; \
rm -rf /opt/imagick/include; \
rm -rf /opt/imagick/share;
################################################################################
## NODE SETUP
################################################################################
@@ -375,7 +417,7 @@ ENV LANG='C.UTF-8' \
RUSTUP_HOME="/opt/rustup" \
PATH="/opt/jdk/bin:/opt/utils/bin:/opt/clojure/bin:/opt/node/bin:/opt/imagick/bin:/opt/cargo/bin:$PATH"
COPY --from=penpotapp/imagemagick:7.1.2-0 /opt/imagick /opt/imagick
COPY --from=build-imagemagick /opt/imagick /opt/imagick
COPY --from=setup-jvm /opt/jdk /opt/jdk
COPY --from=setup-jvm /opt/clojure /opt/clojure
COPY --from=setup-node /opt/node /opt/node

View File

@@ -41,10 +41,7 @@ services:
- 6062:6062
- 6063:6063
- 6064:6064
- 9000:9000
- 9001:9001
- 9090:9090
- 9091:9091
environment:
- EXTERNAL_UID=${CURRENT_USER_ID}
@@ -72,11 +69,6 @@ services:
- PENPOT_LDAP_ATTRS_FULLNAME=cn
- PENPOT_LDAP_ATTRS_PHOTO=jpegPhoto
networks:
default:
aliases:
- main
minio:
image: "minio/minio:RELEASE.2025-04-03T14-56-28Z"
command: minio server /mnt/data --console-address ":9001"
@@ -88,6 +80,10 @@ services:
- MINIO_ROOT_USER=minioadmin
- MINIO_ROOT_PASSWORD=minioadmin
ports:
- 9000:9000
- 9001:9001
networks:
default:
aliases:

View File

@@ -10,7 +10,3 @@ localhost:3449 {
http://localhost:3450 {
reverse_proxy localhost:4449
}
http://penpot-devenv-main:3450 {
reverse_proxy localhost:4449
}

View File

@@ -38,11 +38,11 @@ http {
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_comp_level 3;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml application/wasm;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml;
map $http_upgrade $connection_upgrade {
default upgrade;
@@ -145,8 +145,8 @@ http {
proxy_pass http://127.0.0.1:3000/;
}
location /wasm-playground {
alias /home/penpot/penpot/frontend/resources/public/wasm-playground/;
location /playground {
alias /home/penpot/penpot/experiments/;
add_header Cache-Control "no-cache, max-age=0";
autoindex on;
}
@@ -223,7 +223,7 @@ http {
add_header X-Cache-Status $upstream_cache_status;
}
location ~* \.(jpg|png|svg|ttf|woff|woff2|gif)$ {
location ~* \.(jpg|png|svg|ttf|woff|woff2)$ {
add_header Cache-Control "public, max-age=604800" always; # 7 days
}

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
sudo chown penpot:users /home/penpot
cd ~;
source ~/.bashrc
set -e;
echo "[start-tmux.sh] Installing node dependencies"
pushd ~/penpot/exporter/
yarn install
popd
tmux -2 new-session -d -s penpot
tmux rename-window -t penpot:0 'exporter'
tmux select-window -t penpot:0
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot 'rm -f target/app.js*' enter C-l
tmux send-keys -t penpot 'clojure -M:dev:shadow-cljs watch main' enter
tmux split-window -v
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot './scripts/wait-and-start.sh' enter
tmux new-window -t penpot:1 -n 'backend'
tmux select-window -t penpot:1
tmux send-keys -t penpot 'cd penpot/backend' enter C-l
tmux send-keys -t penpot './scripts/start-dev' enter
tmux -2 attach-session -t penpot

View File

@@ -8,10 +8,14 @@ source ~/.bashrc
echo "[start-tmux.sh] Installing node dependencies"
pushd ~/penpot/frontend/
./scripts/setup;
corepack install;
yarn install;
yarn playwright install chromium
popd
pushd ~/penpot/exporter/
./scripts/setup;
corepack install;
yarn install
yarn playwright install chromium
popd
tmux -2 new-session -d -s penpot
@@ -19,25 +23,30 @@ tmux -2 new-session -d -s penpot
tmux rename-window -t penpot:0 'frontend watch'
tmux select-window -t penpot:0
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot './scripts/watch app' enter
tmux send-keys -t penpot 'yarn run watch' enter
tmux new-window -t penpot:1 -n 'frontend storybook'
tmux new-window -t penpot:1 -n 'frontend shadow'
tmux select-window -t penpot:1
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot './scripts/watch storybook' enter
tmux send-keys -t penpot 'yarn run watch:app' enter
tmux new-window -t penpot:2 -n 'exporter'
tmux new-window -t penpot:2 -n 'frontend storybook'
tmux select-window -t penpot:2
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot 'yarn run watch:storybook' enter
tmux new-window -t penpot:3 -n 'exporter'
tmux select-window -t penpot:3
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot 'rm -f target/app.js*' enter C-l
tmux send-keys -t penpot './scripts/watch' enter
tmux send-keys -t penpot 'yarn run watch' enter
tmux split-window -v
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot './scripts/wait-and-start.sh' enter
tmux new-window -t penpot:3 -n 'backend'
tmux select-window -t penpot:3
tmux new-window -t penpot:4 -n 'backend'
tmux select-window -t penpot:4
tmux send-keys -t penpot 'cd penpot/backend' enter C-l
tmux send-keys -t penpot './scripts/start-dev' enter

View File

@@ -112,6 +112,10 @@ COPY --from=penpotapp/imagemagick:7.1.2-0 /opt/imagick /opt/imagick
WORKDIR /opt/penpot/exporter
USER penpot:penpot
RUN ./setup
RUN set -ex; \
corepack install; \
yarn install; \
yarn run playwright install chromium; \
rm -rf /opt/penpot/.yarn
CMD ["node", "app.js"]

View File

@@ -7,10 +7,8 @@ RUN set -ex; \
useradd -U -M -u 1001 -s /bin/false -d /opt/penpot penpot; \
mkdir -p /opt/data/assets; \
chown -R penpot:penpot /opt/data; \
mkdir -p /etc/nginx/overrides/main.d/; \
mkdir -p /etc/nginx/overrides/http.d/; \
mkdir -p /etc/nginx/overrides/server.d/; \
mkdir -p /etc/nginx/overrides/assets.d/; \
mkdir -p /etc/nginx/overrides/location.d/;
ARG BUNDLE_PATH="./bundle-frontend/"

View File

@@ -130,6 +130,12 @@ services:
environment:
<< : [*penpot-flags, *penpot-public-uri, *penpot-http-body-size, *penpot-secret-key]
## The PREPL host. Mainly used for external programatic access to penpot backend
## (example: admin). By default it will listen on `localhost` but if you are going to use
## the `admin`, you will need to uncomment this and set the host to `0.0.0.0`.
# PENPOT_PREPL_HOST: 0.0.0.0
## Database connection parameters. Don't touch them unless you are using custom
## postgresql connection parameters.
@@ -145,16 +151,16 @@ services:
## Default configuration for assets storage: using filesystem based with all files
## stored in a docker volume.
PENPOT_OBJECTS_STORAGE_BACKEND: fs
PENPOT_OBJECTS_STORAGE_FS_DIRECTORY: /opt/data/assets
PENPOT_ASSETS_STORAGE_BACKEND: assets-fs
PENPOT_STORAGE_ASSETS_FS_DIRECTORY: /opt/data/assets
## Also can be configured to to use a S3 compatible storage.
# AWS_ACCESS_KEY_ID: <KEY_ID>
# AWS_SECRET_ACCESS_KEY: <ACCESS_KEY>
# PENPOT_OBJECTS_STORAGE_BACKEND: s3
# PENPOT_OBJECTS_STORAGE_S3_ENDPOINT: <ENDPOINT>
# PENPOT_OBJECTS_STORAGE_S3_BUCKET: <BUKET_NAME>
# PENPOT_ASSETS_STORAGE_BACKEND: assets-s3
# PENPOT_STORAGE_ASSETS_S3_ENDPOINT: <ENDPOINT>
# PENPOT_STORAGE_ASSETS_S3_BUCKET: <BUKET_NAME>
## Telemetry. When enabled, a periodical process will send anonymous data about this
## instance. Telemetry data will enable us to learn how the application is used,

View File

@@ -42,11 +42,11 @@ http {
gzip_vary on;
gzip_proxied any;
gzip_static on;
gzip_comp_level 6;
gzip_comp_level 4;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml application/wasm;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml;
proxy_buffer_size 16k;
proxy_busy_buffers_size 24k; # essentially, proxy_buffer_size + 2 small buffers of 4k
@@ -110,8 +110,6 @@ http {
recursive_error_pages on;
proxy_intercept_errors on;
error_page 301 302 307 = @handle_redirect;
include /etc/nginx/overrides/assets.d/*.conf;
}
location /internal/assets {
@@ -144,18 +142,25 @@ http {
location / {
include /etc/nginx/overrides/location.d/*.conf;
location ~* \.(js|css|jpg|png|svg|gif|ttf|woff|woff2|wasm|map)$ {
add_header Cache-Control "public, max-age=604800" always; # 7 days
location ~ ^/js/config.js$ {
add_header Cache-Control "no-store, no-cache, max-age=0" always;
}
location ~* \.(js|css|jpg|svg|png|mjs|map)$ {
add_header Cache-Control "max-age=604800" always; # 7 days
}
location ~ ^/(/|css|fonts|images|js|wasm|mjs|map) {
}
location ~ ^/[^/]+/(.*)$ {
return 301 " /404";
}
add_header X-Frame-Options SAMEORIGIN always;
add_header Last-Modified $date_gmt;
add_header Cache-Control "no-store, no-cache, max-age=0" always;
if_modified_since off;
try_files $uri /index.html$is_args$args /index.html =404;
}
}
}

View File

@@ -10,15 +10,16 @@ To view this site locally, first set up the environment:
# only if necessary
nvm install
nvm use
# only if necessary
corepack enable
pnpm install
yarn install
```
And launch a development server:
```sh
pnpm start
yarn start
```
You can then point a browser to [http://localhost:8080](http://localhost:8080).

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 161 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.0 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.8 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.2 KiB

View File

@@ -39,5 +39,5 @@
"markdown-it-anchor": "^9.0.1",
"markdown-it-plantuml": "^1.4.1"
},
"packageManager": "pnpm@10.28.0+sha512.05df71d1421f21399e053fde567cea34d446fa02c76571441bfc1c7956e98e363088982d940465fd34480d4d90a0668bc12362f8aa88000a64e83d0b0e47be48"
"packageManager": "yarn@4.3.1"
}

2065
docs/pnpm-lock.yaml generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,8 @@
#!/usr/bin/env bash
source ~/.bashrc
set -ex
corepack enable;
corepack install;
rm -rf ./_dist
pnpm install
pnpm run build
yarn
yarn run build

View File

@@ -114,7 +114,14 @@ configuration.
The callback has the following format:
```html
https://<your_domain>/api/auth/oidc/callback
https://<your_domain>/api/auth/oauth/<oauth_provider>/callback
```
You will need to change <your_domain> and <oauth_provider> according to your setup.
This is how it looks with Gitlab provider:
```html
https://<your_domain>/api/auth/oauth/gitlab/callback
```
#### Google

View File

@@ -281,8 +281,8 @@ for how to define custom metadata and other ways of selecting tests.
it, but for now we use shadow-cljs with <code class="language-text">package.json</code> scripts:
```bash
pnpm run test
pnpm run test:watch
yarn run test
yarn run test:watch
```
#### Test output

Some files were not shown because too many files have changed in this diff Show More