Compare commits

..

57 Commits

Author SHA1 Message Date
Alejandro Alonso
2ded3211b5 🐛 Fix using cache on first zoom after pan 2025-12-30 09:49:12 +01:00
Alejandro Alonso
0bcec53ad3 🐛 Detecting situations where WebGL context is lost or no WebGL support 2025-12-30 09:47:48 +01:00
Alejandro Alonso
5e8d46e47b 🎉 Resize cache only when required 2025-12-30 09:47:48 +01:00
Alejandro Alonso
cc43f4c1af 🐛 Fix resize cache memory leak 2025-12-30 09:47:48 +01:00
alonso.torres
032c2c5edb 🐛 Fix problem when changing colors with multiple fonts 2025-12-30 09:47:48 +01:00
Alejandro Alonso
8df488d0b7 🐛 Fix object added in different page (#7988) 2025-12-30 09:47:48 +01:00
Alejandro Alonso
44df4aca48 🐛 Fix unmasking shapes (#7989) 2025-12-30 09:47:48 +01:00
Alonso Torres
c7b47f5d98 🐛 Fix font weight token (#7991) 2025-12-30 09:47:48 +01:00
Alejandro Alonso
1e2f87e8f7 🐛 Fix comment bubbles (#7990) 2025-12-30 09:47:48 +01:00
Belén Albeza
6586ab79e6 🐛 Fix text editor not getting focus back after font variant change 2025-12-30 09:47:48 +01:00
alonso.torres
5497fa2a66 🐛 Fix problems with alignments and margins 2025-12-30 09:47:48 +01:00
alonso.torres
afa43e35f4 🐛 Fix problem with flex fill size distribution 2025-12-30 09:47:48 +01:00
alonso.torres
33b8d4b04b 🐛 Fix problem with reflow layout 2025-12-30 09:47:48 +01:00
Alejandro Alonso
e9fd384d94 🐛 Fix too many active WEBGL contexts 2025-12-30 09:47:48 +01:00
alonso.torres
5561946a0a 🐛 Fix problem with border radius to path 2025-12-30 09:47:48 +01:00
Belén Albeza
153452a40c ♻️ Make SerializableResult to depend on From traits 2025-12-30 09:47:48 +01:00
alonso.torres
66aea88457 Calculate position data in wasm 2025-12-30 09:47:47 +01:00
Elena Torro
3493429b4e 🐛 Set layout data from set-object 2025-12-30 09:47:47 +01:00
Alejandro Alonso
886614ecab 🐛 Fix svg extract ids 2025-12-30 09:47:47 +01:00
Belén Albeza
fd9b4431f3 🐛 Fix text selection not being restore if it was only 1 word 2025-12-30 09:47:47 +01:00
Belén Albeza
38ffc809f2 🔧 Add formatting rules to the TextEditor 2025-12-30 09:47:47 +01:00
Alejandro Alonso
9b9dd57050 🎉 Improve svg import 2025-12-30 09:47:47 +01:00
Aitor Moreno
f4f3d9d8c3 🐛 Fix font-variant-id mixed value 2025-12-30 09:47:47 +01:00
Elena Torro
6eeec08331 🔧 Log performance when building using profile-macros 2025-12-30 09:47:47 +01:00
Elena Torro
b5da73f957 🔧 Rebuild indices on zoom change, not pan 2025-12-30 09:47:47 +01:00
Elena Torro
3fc6a70da6 🔧 Skip slow operations on fast render 2025-12-30 09:47:47 +01:00
Elena Torro
f3a442cf24 🔧 Support variants interactivity on the new render's UI 2025-12-30 09:47:47 +01:00
Elena Torro
cae9afc5b7 🐛 Fix default case on vertical align 2025-12-30 09:47:47 +01:00
Elena Torro
084c5141a2 🔧 Fix line height calculation 2025-12-30 09:47:47 +01:00
alonso.torres
422be8f4f5 🐛 Fix problem when exporting texts 2025-12-30 09:47:47 +01:00
Belén Albeza
28682e4c5c 🐛 Fix internal error while importing a library 2025-12-30 09:47:47 +01:00
Elena Torro
ecc63e5cec 🔧 Update rendering settings to smooth render 2025-12-30 09:47:47 +01:00
Aitor Moreno
be7d91a5c4 🐛 Fix applyStylesTo entire selection 2025-12-30 09:47:47 +01:00
Elena Torro
b2736be858 🐛 Fix italic variant 2025-12-30 09:47:47 +01:00
Elena Torro
3f4b7c5fcc 🐛 Do not merge fill styles 2025-12-30 09:47:47 +01:00
Elena Torro
f23724ddfe 🐛 Fix selectAll on mixed span styles 2025-12-30 09:47:47 +01:00
Elena Torro
bfb6d63a6d 🐛 Fix merge fill styles when there are multiple fills 2025-12-30 09:47:47 +01:00
alonso.torres
0fadcde413 🐛 Fix race condition with fix fonts patch 2025-12-30 09:47:47 +01:00
alonso.torres
95bf311c24 🐛 Fix race condition with text and type 2025-12-30 09:47:47 +01:00
alonso.torres
cbba2c6de1 🐛 Fix problem with boolean shapes updates 2025-12-30 09:47:45 +01:00
Belén Albeza
d4e09280ca 🐛 Fix viewport not being fully drawn on first load until a mouse hover 2025-12-30 09:46:48 +01:00
alonso.torres
307920168a 🐛 Fix problem with reordering layers 2025-12-30 09:46:48 +01:00
alonso.torres
5cbed12763 🐛 Fix outline with single click text creation 2025-12-30 09:46:48 +01:00
Elena Torro
bcbbbd0d5d 🐛 Fix create empty text on click regression 2025-12-30 09:46:48 +01:00
Aitor Moreno
4e33112e2c 🐛 Fix letter spacing applied to paragraph 2025-12-30 09:46:48 +01:00
alonso.torres
0349cc1859 🐛 Fix visual feedback on padding/margin/gaps modified 2025-12-30 09:46:48 +01:00
Aitor Moreno
922587bc9e Add text editor v2 integration tests 2025-12-30 09:46:48 +01:00
Elena Torro
d545de17a6 🔧 Normalize font attributes to support old formats 2025-12-30 09:46:48 +01:00
Alejandro Alonso
d30579aedb 🐛 Fix nested shadows clipping 2025-12-30 09:46:48 +01:00
alonso.torres
5db9b173c4 🐛 Fix problem with auto-size and element margins 2025-12-30 09:46:48 +01:00
alonso.torres
851d7d414a 🐛 Fix problem with grid layout editor 2025-12-30 09:46:48 +01:00
alonso.torres
af5b672c3e 🐛 Fix problems with flex layout in new render 2025-12-30 09:46:48 +01:00
alonso.torres
ce8aeb7028 🐛 Fix crash when cleanup 2025-12-30 09:46:48 +01:00
alonso.torres
b023184394 🐛 Fix problem with change gap/margin/padding 2025-12-30 09:46:48 +01:00
Belén Albeza
6ddce5bcba 🐛 Fix mismatch between fonts for rendered and selected text when no fallback fonts apply 2025-12-30 09:46:48 +01:00
Belén Albeza
d9680ea159 Fix playwright tests 2025-12-30 09:46:48 +01:00
Belén Albeza
56e956644c 🔧 Update google fonts list 2025-12-30 09:46:47 +01:00
914 changed files with 59735 additions and 112232 deletions

305
.circleci/config.yml Normal file
View File

@@ -0,0 +1,305 @@
version: 2.1
jobs:
lint:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
steps:
- checkout
- run:
name: "fmt check"
working_directory: "."
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "lint clj common"
working_directory: "."
command: |
yarn run lint:clj:common
- run:
name: "lint clj frontend"
working_directory: "."
command: |
yarn run lint:clj:frontend
- run:
name: "lint clj backend"
working_directory: "."
command: |
yarn run lint:clj:backend
- run:
name: "lint clj exporter"
working_directory: "."
command: |
yarn run lint:clj:exporter
- run:
name: "lint clj library"
working_directory: "."
command: |
yarn run lint:clj:library
test-common:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
- run:
name: "JVM tests"
working_directory: "./common"
command: |
clojure -M:dev:test
- run:
name: "NODE tests"
working_directory: "./common"
command: |
yarn install
yarn run test
- save_cache:
paths:
- ~/.m2
- ~/.yarn
- ~/.gitlibs
- ~/.cache/ms-playwright
key: v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
test-frontend:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: "install dependencies"
working_directory: "./frontend"
# We install playwright here because the dependent tasks
# uses the same cache as this task so we prepopulate it
command: |
yarn install
yarn run playwright install chromium --with-deps
- run:
name: "lint scss on frontend"
working_directory: "./frontend"
command: |
yarn run lint:scss
- run:
name: "unit tests"
working_directory: "./frontend"
command: |
yarn run test
- save_cache:
paths:
- ~/.m2
- ~/.yarn
- ~/.gitlibs
- ~/.cache/ms-playwright
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
test-library:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx6g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: Install dependencies and build
working_directory: "./library"
command: |
yarn install
- run:
name: Build and Test
working_directory: "./library"
command: |
./scripts/build
yarn run test
test-components:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx6g -Xms2g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: Install dependencies
working_directory: "./frontend"
command: |
yarn install
yarn run playwright install chromium
- run:
name: Build Storybook
working_directory: "./frontend"
command: yarn run build:storybook
- run:
name: Serve Storybook and run tests
working_directory: "./frontend"
command: |
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
"npx http-server storybook-static --port 6006 --silent" \
"npx wait-on tcp:6006 && yarn test:storybook"
test-backend:
docker:
- image: penpotapp/devenv:latest
- image: cimg/postgres:14.5
environment:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
- image: cimg/redis:7.0.5
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
- restore_cache:
keys:
- v1-dependencies-{{ checksum "backend/deps.edn" }}
- run:
name: "tests"
working_directory: "./backend"
command: |
clojure -M:dev:test --reporter kaocha.report/documentation
environment:
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
- save_cache:
paths:
- ~/.m2
- ~/.gitlibs
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
test-render-wasm:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
steps:
- checkout
- run:
name: "fmt check"
working_directory: "./render-wasm"
command: |
cargo fmt --check
- run:
name: "lint"
working_directory: "./render-wasm"
command: |
./lint
- run:
name: "cargo tests"
working_directory: "./render-wasm"
command: |
./test
workflows:
penpot:
jobs:
- test-frontend:
requires:
- lint: success
- test-library:
requires:
- lint: success
- test-components:
requires:
- lint: success
- test-backend:
requires:
- lint: success
- test-common:
requires:
- lint: success
- lint
- test-render-wasm

View File

@@ -1,38 +0,0 @@
---
name: New Render Bug Report
about: Create a report about the bugs you have found in the new render
title: ''
labels: new render
assignees: claragvinola
---
**Describe the bug**
A clear and concise description of what the bug is.
**Steps to Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots or screen recordings**
If applicable, add screenshots or screen recording to help illustrate your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View File

@@ -40,7 +40,7 @@ on:
jobs:
build-bundle:
name: Build and Upload Penpot Bundle
runs-on: penpot-runner-01
runs-on: ubuntu-24.04
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View File

@@ -7,14 +7,9 @@ jobs:
build-and-push:
name: Build and push DevEnv Docker image
environment: release-admins
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
steps:
- name: Set common environment variables
run: |
# Each job execution will use its own docker configuration.
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4

View File

@@ -19,14 +19,9 @@ on:
jobs:
build-and-push:
name: Build and Push Penpot Docker Images
runs-on: penpot-runner-02
runs-on: ubuntu-24.04-arm
steps:
- name: Set common environment variables
run: |
# Each job execution will use its own docker configuration.
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4
with:
@@ -71,15 +66,6 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
# To avoid the “429 Too Many Requests” error when downloading
# images from DockerHub for unregistered users.
# https://docs.docker.com/docker-hub/usage/
- name: Login to DockerHub Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.PUB_DOCKER_USERNAME }}
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v5

View File

@@ -1,21 +0,0 @@
name: _NITRATE MODULE
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "nitrate-module"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: "nitrate-module"

View File

@@ -1,14 +0,0 @@
name: _STAGING RENDER
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "staging-render"
build_wasm: "yes"
build_storybook: "yes"

View File

@@ -33,7 +33,7 @@ jobs:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
🐳 *[PENPOT] Docker image available: ${{ github.ref_name }}*
🐳 *[PENPOT] Docker image available.*
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -26,7 +26,7 @@ jobs:
- name: Check Commit Type
uses: gsactions/commit-message-checker@v2
with:
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert|Reapply).+[^.])$'
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert).+[^.])$'
flags: 'gm'
error: 'Commit should match CONTRIBUTING.md guideline'
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request

View File

@@ -1,113 +0,0 @@
name: Plugins/api-doc deployer
on:
push:
branches:
- develop
- staging
- main
paths:
- "plugins/libs/plugin-types/index.d.ts"
- "plugins/libs/plugin-types/REAME.md"
- "plugins/tools/typedoc.css"
- "plugins/CHANGELOG.md"
- "plugins/wrangler-penpot-plugins-api-doc.toml"
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch'
type: choice
required: true
default: 'develop'
options:
- develop
- staging
- main
permissions:
contents: read
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ steps.vars.outputs.gh_ref }}
# START: Setup Node and PNPM enabling cache
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Enable PNPM
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
- name: Get pnpm store path
id: pnpm-store
working-directory: ./plugins
shell: bash
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
# END: Setup Node and PNPM enabling cache
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
pnpm install --no-frozen-lockfile;
pnpm add -D -w wrangler@latest;
- name: Build docs
working-directory: plugins
shell: bash
run: pnpm run build:doc
- name: Select Worker name
run: |
REF="${{ steps.vars.outputs.gh_ref }}"
case "$REF" in
main) echo "WORKER_NAME=penpot-plugins-api-doc-pro" >> $GITHUB_ENV ;;
staging) echo "WORKER_NAME=penpot-plugins-api-doc-pre" >> $GITHUB_ENV ;;
develop) echo "WORKER_NAME=penpot-plugins-api-doc-hourly" >> $GITHUB_ENV ;;
*) echo "Unsupported branch ${REF}" && exit 1 ;;
esac
- name: Deploy to Cloudflare Workers
uses: cloudflare/wrangler-action@v3
with:
workingDirectory: plugins
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --config wrangler-penpot-plugins-api-doc.toml --name ${{ env.WORKER_NAME }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🧩📚 *[PENPOT PLUGINS] Error deploying API documentation.*
📄 Triggered from ref: `${{ inputs.gh_ref }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,127 +0,0 @@
name: Plugins/package deployer
on:
# Deploy package from manual action
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch'
type: choice
required: true
default: 'develop'
options:
- develop
- staging
- main
plugin_name:
description: 'Pluging name (like plugins/apps/<plugin_name>-plugin)'
type: string
required: true
workflow_call:
inputs:
gh_ref:
description: 'Name of the branch'
type: string
required: true
default: 'develop'
plugin_name:
description: 'Publig name (from plugins/apps/<plugin_name>-plugin)'
type: string
required: true
permissions:
contents: read
jobs:
deploy:
runs-on: penpot-runner-01
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.gh_ref }}
# START: Setup Node and PNPM enabling cache
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Enable PNPM
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
- name: Get pnpm store path
id: pnpm-store
working-directory: ./plugins
shell: bash
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
# END: Setup Node and PNPM enabling cache
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
pnpm install --no-frozen-lockfile;
pnpm add -D -w wrangler@latest;
- name: "Build package for ${{ inputs.plugin_name }}-plugin"
working-directory: plugins
shell: bash
run: npx nx build ${{ inputs.plugin_name }}-plugin
- name: Select Worker name
run: |
REF="${{ inputs.gh_ref }}"
case "$REF" in
main)
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pro" >> $GITHUB_ENV
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.app" >> $GITHUB_ENV ;;
staging)
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pre" >> $GITHUB_ENV
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.dev" >> $GITHUB_ENV ;;
develop)
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-hourly" >> $GITHUB_ENV
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
*) echo "Unsupported branch ${REF}" && exit 1 ;;
esac
- name: Set the custom url
working-directory: plugins
shell: bash
run: |
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" apps/${{ inputs.plugin_name }}-plugin/wrangler.toml
- name: Deploy to Cloudflare Workers
uses: cloudflare/wrangler-action@v3
with:
workingDirectory: plugins
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --config apps/${{ inputs.plugin_name }}-plugin/wrangler.toml --name ${{ env.WORKER_NAME }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🧩📦 *[PENPOT PLUGINS] Error deploying ${{ env.WORKER_NAME }}.*
📄 Triggered from ref: `${{ inputs.gh_ref }}`
Plugin name: `${{ inputs.plugin_name }}-plugin`
Cloudflare worker name: `${{ env.WORKER_NAME }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,143 +0,0 @@
name: Plugins/packages deployer
on:
push:
branches:
- develop
- staging
- main
paths:
- 'plugins/apps/*-plugin/**'
- 'libs/plugins-styles/**'
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch'
type: choice
required: true
default: 'develop'
options:
- develop
- staging
- main
jobs:
detect-changes:
runs-on: ubuntu-latest
outputs:
colors_to_tokens: ${{ steps.filter.outputs.colors_to_tokens }}
create_palette: ${{ steps.filter.outputs.create_palette }}
lorem_ipsum: ${{ steps.filter.outputs.lorem_ipsum }}
rename_layers: ${{ steps.filter.outputs.rename_layers }}
contrast: ${{ steps.filter.outputs.contrast }}
icons: ${{ steps.filter.outputs.icons }}
poc_state: ${{ steps.filter.outputs.poc_state }}
table: ${{ steps.filter.outputs.table }}
# [For new plugins]
# Add more outputs here
steps:
- uses: actions/checkout@v4
- id: filter
uses: dorny/paths-filter@v3
with:
filters: |
colors_to_tokens:
- 'plugins/apps/colors-to-tokens-plugin/**'
- 'libs/plugins-styles/**'
contrast:
- 'plugins/apps/contrast-plugin/**'
- 'libs/plugins-styles/**'
create_palette:
- 'plugins/apps/create-palette-plugin/**'
- 'libs/plugins-styles/**'
icons:
- 'plugins/apps/icons-plugin/**'
- 'libs/plugins-styles/**'
lorem_ipsum:
- 'plugins/apps/lorem-ipsum-plugin/**'
- 'libs/plugins-styles/**'
rename_layers:
- 'plugins/apps/rename-layers-plugin/**'
- 'libs/plugins-styles/**'
table:
- 'plugins/apps/table-plugin/**'
- 'libs/plugins-styles/**'
# [For new plugins]
# Add more plugin filters here
# another_plugin:
# - 'plugins/apps/another-plugin/**'
# - 'libs/plugins-styles/**'
colors-to-tokens-plugin:
needs: detect-changes
if: needs.detect-changes.outputs.colors_to_tokens == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: colors-to-tokens
contrast-plugin:
needs: detect-changes
if: needs.detect-changes.outputs.contrast == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: contrast
create-palette-plugin:
needs: detect-changes
if: needs.detect-changes.outputs.create_palette == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: create-palette
icons-plugin:
needs: detect-changes
if: needs.detect-changes.outputs.icons == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: icons
lorem-ipsum-plugin:
needs: detect-changes
if: needs.detect-changes.outputs.lorem_ipsum == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: lorem-ipsum
rename-layers-plugin:
needs: detect-changes
if: needs.detect-changes.outputs.rename_layers == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: rename-layers
table-plugin:
needs: detect-changes
if: needs.detect-changes.outputs.table == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: table
# [For new plugins]
# Add more jobs for other plugins below, following the same pattern
# another-plugin:
# needs: detect-changes
# if: needs.detect-changes.outputs.another_plugin == 'true'
# uses: ./.github/workflows/plugins-deploy-package.yml
# secrets: inherit
# with:
# gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
# plugin_name: another

View File

@@ -21,7 +21,7 @@ concurrency:
jobs:
lint:
name: "Linter"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -34,7 +34,7 @@ jobs:
test-common:
name: "Common Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -51,55 +51,9 @@ jobs:
run: |
./scripts/test
test-plugins:
name: Plugins Runtime Linter & Tests
runs-on: penpot-runner-02
container: penpotapp/devenv:latest
steps:
- uses: actions/checkout@v4
- name: Setup Node
id: setup-node
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
pnpm install;
- name: Run Lint
working-directory: ./plugins
run: pnpm run lint
- name: Run Format Check
working-directory: ./plugins
run: pnpm run format:check
- name: Run Test
working-directory: ./plugins
run: pnpm run test
- name: Build runtime
working-directory: ./plugins
run: pnpm run build
- name: Build plugins
working-directory: ./plugins
run: pnpm run build:plugins
- name: Build styles
working-directory: ./plugins
run: pnpm run build:styles-example
test-frontend:
name: "Frontend Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -113,14 +67,12 @@ jobs:
- name: Component Tests
working-directory: ./frontend
env:
VITEST_BROWSER_TIMEOUT: 120000
run: |
./scripts/test-components
test-render-wasm:
name: "Render WASM Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -144,7 +96,7 @@ jobs:
test-backend:
name: "Backend Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
services:
@@ -183,7 +135,7 @@ jobs:
test-library:
name: "Library Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -197,7 +149,7 @@ jobs:
build-integration:
name: "Build Integration Bundle"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -207,7 +159,17 @@ jobs:
- name: Build Bundle
working-directory: ./frontend
run: |
./scripts/build 0.0.0
corepack enable;
corepack install;
yarn install
yarn run build:app:assets
yarn run build:app
yarn run build:app:libs
- name: Build WASM
working-directory: "./render-wasm"
run: |
./build release
- name: Store Bundle Cache
uses: actions/cache@v4
@@ -215,10 +177,9 @@ jobs:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
test-integration-1:
name: "Integration Tests 1/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -248,7 +209,7 @@ jobs:
test-integration-2:
name: "Integration Tests 2/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -278,7 +239,7 @@ jobs:
test-integration-3:
name: "Integration Tests 3/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -308,7 +269,7 @@ jobs:
test-integration-4:
name: "Integration Tests 4/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration

3
.gitignore vendored
View File

@@ -5,7 +5,6 @@
!.yarn/releases
!.yarn/sdks
!.yarn/versions
.pnpm-store
*-init.clj
*.css.json
*.jar
@@ -54,8 +53,6 @@
/exporter/target
/frontend/.storybook/preview-body.html
/frontend/.storybook/preview-head.html
/frontend/playwright-report/
/frontend/text-editor/src/wasm/
/frontend/dist/
/frontend/npm-debug.log
/frontend/out/

105
.gitpod.yml Normal file
View File

@@ -0,0 +1,105 @@
image:
file: docker/gitpod/Dockerfile
ports:
# nginx
- port: 3449
onOpen: open-preview
# frontend nREPL
- port: 3447
onOpen: ignore
visibility: private
# frontend shadow server
- port: 3448
onOpen: ignore
visibility: private
# backend
- port: 6060
onOpen: ignore
# exporter shadow server
- port: 9630
onOpen: ignore
visibility: private
# exporter http server
- port: 6061
onOpen: ignore
# mailhog web interface
- port: 8025
onOpen: ignore
# mailhog postfix
- port: 1025
onOpen: ignore
# postgres
- port: 5432
onOpen: ignore
# redis
- port: 6379
onOpen: ignore
# openldap
- port: 389
onOpen: ignore
tasks:
# https://github.com/gitpod-io/gitpod/issues/666#issuecomment-534347856
- name: gulp
command: >
cd $GITPOD_REPO_ROOT/frontend/;
yarn && gp sync-done 'frontend-yarn';
npx gulp --theme=${PENPOT_THEME} watch
- name: frontend shadow watch
command: >
cd $GITPOD_REPO_ROOT/frontend/;
gp sync-await 'frontend-yarn';
npx shadow-cljs watch main
- init: gp await-port 5432 && psql -f $GITPOD_REPO_ROOT/docker/gitpod/files/postgresql_init.sql
name: backend
command: >
cd $GITPOD_REPO_ROOT/backend/;
./scripts/start-dev
- name: exporter shadow watch
command:
cd $GITPOD_REPO_ROOT/exporter/;
gp sync-await 'frontend-yarn';
yarn && npx shadow-cljs watch main
- name: exporter web server
command: >
cd $GITPOD_REPO_ROOT/exporter/;
./scripts/wait-and-start.sh
- name: signed terminal
before: >
[[ ! -z ${GNUGPG} ]] &&
cd ~ &&
rm -rf .gnupg &&
echo ${GNUGPG} | base64 -d | tar --no-same-owner -xzvf -
init: >
[[ ! -z ${GNUGPG_KEY} ]] &&
git config --global commit.gpgsign true &&
git config --global user.signingkey ${GNUGPG_KEY}
command: cd $GITPOD_REPO_ROOT
- name: redis
command: redis-server
- before: go get github.com/mailhog/MailHog
name: mailhog
command: MailHog
- name: Nginx
command: >
nginx &&
multitail /var/log/nginx/access.log -I /var/log/nginx/error.log

2
.nvmrc
View File

@@ -1 +1 @@
v22.21.1
v22.19.0

40
.travis.yml Normal file
View File

@@ -0,0 +1,40 @@
dist: xenial
language: generic
sudo: required
cache:
directories:
- $HOME/.m2
services:
- docker
branches:
only:
- master
- develop
install:
- curl -O https://download.clojure.org/install/linux-install-1.10.1.447.sh
- chmod +x linux-install-1.10.1.447.sh
- sudo ./linux-install-1.10.1.447.sh
before_script:
- env | sort
script:
- ./manage.sh build-devenv
- ./manage.sh run-frontend-tests
- ./manage.sh run-backend-tests
- ./manage.sh build-images
- ./manage.sh run
after_script:
- docker images
notifications:
email: false
env:
- NODE_VERSION=10.16.0

11
.yarnrc.yml Normal file
View File

@@ -0,0 +1,11 @@
enableGlobalCache: true
enableImmutableCache: false
enableImmutableInstalls: false
enableTelemetry: false
httpTimeout: 600000
nodeLinker: node-modules

View File

@@ -1,84 +1,5 @@
# CHANGELOG
## 2.14.0 (Unreleased)
### :boom: Breaking changes & Deprecations
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- Remap references when renaming tokens [Taiga #10202](https://tree.taiga.io/project/penpot/us/10202)
- Tokens panel nested path view [Taiga #9966](https://tree.taiga.io/project/penpot/us/9966)
- Improve usability of lock and hide buttons in the layer panel. [Taiga #12916](https://tree.taiga.io/project/penpot/issue/12916)
- Optimize sidebar performance for deeply nested shapes [Taiga #13017](https://tree.taiga.io/project/penpot/task/13017)
- Remove tokens path node and bulk remove tokens [Taiga #13007](https://tree.taiga.io/project/penpot/us/13007)
- Replace themes management modal radio buttons for switches [Taiga #9215](https://tree.taiga.io/project/penpot/us/9215)
### :bug: Bugs fixed
- Remove whitespaces from asset export filename [Github #8133](https://github.com/penpot/penpot/pull/8133)
- Fix prototype connections lost when switching between variants [Taiga #12812](https://tree.taiga.io/project/penpot/issue/12812)
- Fix wrong image in the onboarding invitation block [Taiga #13040](https://tree.taiga.io/project/penpot/issue/13040)
- Fix wrong register image [Taiga #12955](https://tree.taiga.io/project/penpot/task/12955)
- Fix error message on components doesn't close automatically [Taiga #12012](https://tree.taiga.io/project/penpot/issue/12012)
- Fix incorrect handling of input values on layout gap and padding inputs [Github #8113](https://github.com/penpot/penpot/issues/8113)
- Fix incorrect default option on tokens import dialog [Github #8051](https://github.com/penpot/penpot/pull/8051)
- Fix unhandled exception tokens creation dialog [Github #8110](https://github.com/penpot/penpot/issues/8110)
- Fix displaying a hidden user avatar when there is only one more [Taiga #13058](https://tree.taiga.io/project/penpot/issue/13058)
- Fix unhandled exception on open-new-window helper [Github #7787](https://github.com/penpot/penpot/issues/7787)
- Fix exception on uploading large fonts [Github #8135](https://github.com/penpot/penpot/pull/8135)
## 2.13.0 (Unreleased)
### :boom: Breaking changes & Deprecations
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
- Fix mask issues with component swap (by @dfelinto) [Github #7675](https://github.com/penpot/penpot/issues/7675)
### :sparkles: New features & Enhancements
- Add new Box Shadow Tokens [Taiga #10201](https://tree.taiga.io/project/penpot/us/10201)
- Make i18n translation files load on-demand [Taiga #11474](https://tree.taiga.io/project/penpot/us/11474)
- Add deleted files to dashboard [Taiga #8149](https://tree.taiga.io/project/penpot/us/8149)
### :bug: Bugs fixed
- Fix problem when drag+duplicate a full grid [Taiga #12565](https://tree.taiga.io/project/penpot/issue/12565)
- Fix problem when pasting elements in reverse flex layout [Taiga #12460](https://tree.taiga.io/project/penpot/issue/12460)
- Fix wrong board size presets in Android [Taiga #12339](https://tree.taiga.io/project/penpot/issue/12339)
- Fix problem with grid layout components and auto sizing [Github #7797](https://github.com/penpot/penpot/issues/7797)
- Fix some alignments on inspect tab [Taiga #12915](https://tree.taiga.io/project/penpot/issue/12915)
- Fix problem with text editor maintaining previous styles [Taiga #12835](https://tree.taiga.io/project/penpot/issue/12835)
- Fix color assets from shared libraries not appearing as assets in Selected colors panel [Taiga #12957](https://tree.taiga.io/project/penpot/issue/12957)
- Fix CSS generated box-shadow property [Taiga #12997](https://tree.taiga.io/project/penpot/issue/12997)
- Fix inner shadow selector on shadow token [Taiga #12951](https://tree.taiga.io/project/penpot/issue/12951)
- Fix missing text color token from selected shapes in selected colors list [Taiga #12956](https://tree.taiga.io/project/penpot/issue/12956)
- Fix dropdown option width in Guides columns dropdown [Taiga #12959](https://tree.taiga.io/project/penpot/issue/12959)
- Fix typos on download modal [Taiga #12865](https://tree.taiga.io/project/penpot/issue/12865)
- Fix problem with text editor maintaining previous styles [Taiga #12835](https://tree.taiga.io/project/penpot/issue/12835)
- Fix unhandled exception tokens creation dialog [Github #8110](https://github.com/penpot/penpot/issues/8110)
- Fix allow negative spread values on shadow token creation [Taiga #13167](https://tree.taiga.io/project/penpot/issue/13167)
- Fix spanish translations on import export token modal [Taiga #13171](https://tree.taiga.io/project/penpot/issue/13171)
- Remove whitespaces from asset export filename [Github #8133](https://github.com/penpot/penpot/pull/8133)
- Fix exception on uploading large fonts [Github #8135](https://github.com/penpot/penpot/pull/8135)
- Fix unhandled exception on open-new-window helper [Github #7787](https://github.com/penpot/penpot/issues/7787)
- Fix incorrect handling of input values on layout gap and padding inputs [Github #8113](https://github.com/penpot/penpot/issues/8113)
## 2.12.1
### :bug: Bugs fixed
- Fix setting a portion of text as bold or underline messes things up [Github #7980](https://github.com/penpot/penpot/issues/7980)
- Fix problem with style in fonts input [Taiga #12935](https://tree.taiga.io/project/penpot/issue/12935)
- Fix problem with path editor and right click [Github #7917](https://github.com/penpot/penpot/issues/7917)
## 2.12.0
### :boom: Breaking changes & Deprecations
@@ -90,6 +11,7 @@ The backend RPC API URLS are changed from `/api/rpc/command/<name>` to
compatibility; however, if you are a user of this API, it is strongly
recommended that you adapt your code to use the new PATH.
#### Updated SSO Callback URL
The OAuth / Single Sign-On (SSO) callback endpoint has changed to
@@ -122,6 +44,7 @@ This update standardizes all authentication flows under the single URL
and makis it more modular, enabling the ability to configure SSO auth
provider dinamically.
#### Changes on default docker compose
We have updated the `docker/images/docker-compose.yaml` with a small
@@ -185,6 +108,7 @@ example. It's still usable as before, we just removed the example.
- Deprecated configuration variables with the prefix `PENPOT_ASSETS_*`, and will be
removed in future versions:
- The `PENPOT_ASSETS_STORAGE_BACKEND` becomes `PENPOT_OBJECTS_STORAGE_BACKEND` and its
values passes from (`assets-fs` or `assets-s3`) to (`fs` or `s3`)
- The `PENPOT_STORAGE_ASSETS_FS_DIRECTORY` becomes `PENPOT_OBJECTS_STORAGE_FS_DIRECTORY`

View File

@@ -120,12 +120,17 @@ them on your system, you can run them with:
```bash
# Check formatting
./scripts/fmt
yarn fmt:clj:check
# Lint
./scripts/lint
# Check and fix formatting
yarn fmt:clj
# Run the linter
yarn lint:clj
```
There are more choices in `package.json`.
Ideally, you should run these commands as git pre-commit hooks. A convenient way
of defining them is to use [Husky](https://typicode.github.io/husky/#/).

7
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

View File

@@ -97,8 +97,8 @@
:jmx-remote
{:jvm-opts ["-Dcom.sun.management.jmxremote"
"-Dcom.sun.management.jmxremote.port=9000"
"-Dcom.sun.management.jmxremote.rmi.port=9000"
"-Dcom.sun.management.jmxremote.port=9090"
"-Dcom.sun.management.jmxremote.rmi.port=9090"
"-Dcom.sun.management.jmxremote.local.only=false"
"-Dcom.sun.management.jmxremote.authenticate=false"
"-Dcom.sun.management.jmxremote.ssl=false"

View File

@@ -4,7 +4,7 @@
"license": "MPL-2.0",
"author": "Kaleidos INC",
"private": true,
"packageManager": "pnpm@10.26.2+sha512.0e308ff2005fc7410366f154f625f6631ab2b16b1d2e70238444dd6ae9d630a8482d92a451144debc492416896ed16f7b114a86ec68b8404b2443869e68ffda6",
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
"repository": {
"type": "git",
"url": "https://github.com/penpot/penpot"

306
backend/pnpm-lock.yaml generated
View File

@@ -1,306 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
luxon:
specifier: ^3.4.4
version: 3.7.2
sax:
specifier: ^1.4.1
version: 1.4.3
devDependencies:
nodemon:
specifier: ^3.1.2
version: 3.1.11
source-map-support:
specifier: ^0.5.21
version: 0.5.21
ws:
specifier: ^8.17.0
version: 8.18.3
packages:
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
brace-expansion@1.1.12:
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
buffer-from@1.1.2:
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
debug@4.4.3:
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
has-flag@3.0.0:
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
engines: {node: '>=4'}
ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
luxon@3.7.2:
resolution: {integrity: sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==}
engines: {node: '>=12'}
minimatch@3.1.2:
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
nodemon@3.1.11:
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
engines: {node: '>=10'}
hasBin: true
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
sax@1.4.3:
resolution: {integrity: sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==}
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'}
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
source-map@0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
supports-color@5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
touch@3.1.1:
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
hasBin: true
undefsafe@2.0.5:
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
ws@8.18.3:
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
snapshots:
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
balanced-match@1.0.2: {}
binary-extensions@2.3.0: {}
brace-expansion@1.1.12:
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
braces@3.0.3:
dependencies:
fill-range: 7.1.1
buffer-from@1.1.2: {}
chokidar@3.6.0:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
concat-map@0.0.1: {}
debug@4.4.3(supports-color@5.5.0):
dependencies:
ms: 2.1.3
optionalDependencies:
supports-color: 5.5.0
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fsevents@2.3.3:
optional: true
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
has-flag@3.0.0: {}
ignore-by-default@1.0.1: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-extglob@2.1.1: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
luxon@3.7.2: {}
minimatch@3.1.2:
dependencies:
brace-expansion: 1.1.12
ms@2.1.3: {}
nodemon@3.1.11:
dependencies:
chokidar: 3.6.0
debug: 4.4.3(supports-color@5.5.0)
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.7.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
undefsafe: 2.0.5
normalize-path@3.0.0: {}
picomatch@2.3.1: {}
pstree.remy@1.1.8: {}
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
sax@1.4.3: {}
semver@7.7.3: {}
simple-update-notifier@2.0.0:
dependencies:
semver: 7.7.3
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
source-map: 0.6.1
source-map@0.6.1: {}
supports-color@5.5.0:
dependencies:
has-flag: 3.0.0
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
touch@3.1.1: {}
undefsafe@2.0.5: {}
ws@8.18.3: {}

View File

View File

@@ -12,22 +12,43 @@ Debug Main Page
</nav>
<main class="dashboard">
<section class="widget">
<fieldset>
<legend>Error reports</legend>
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
</fieldset>
<fieldset>
<legend>CURRENT PROFILE</legend>
<desc>
<p>
Name: <b>{{profile.fullname}}</b> <br />
Email: <b>{{profile.email}}</b>
</p>
</desc>
<legend>Profile Management</legend>
<form method="post" action="/dbg/actions/resend-email-verification">
<div class="row">
<input type="email" name="email" placeholder="example@example.com" value="" />
</div>
<div class="row">
<label for="force-verify">Are you sure?</label>
<input id="force-verify" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" name="resend" value="Resend Verification" />
<input type="submit" name="verify" value="Verify" />
</div>
<div class="row">
<input type="submit" class="danger" name="block" value="Block" />
<input type="submit" class="danger" name="unblock" value="Unblock" />
</div>
</form>
</fieldset>
<fieldset>
<legend>VIRTUAL CLOCK</legend>
<desc>
<p><b>IMPORTANT:</b> The virtual clock is profile based and only affects the currently logged-in profile.</p>
<p>
CURRENT CLOCK: <b>{{current-clock}}</b>
<br />
@@ -60,93 +81,8 @@ Debug Main Page
</form>
</fieldset>
<fieldset>
<legend>ERROR REPORTS</legend>
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
</fieldset>
</section>
<section class="widget">
<fieldset>
<legend>Profile Management</legend>
<form method="post" action="/dbg/actions/resend-email-verification">
<div class="row">
<input type="email" name="email" placeholder="example@example.com" value="" />
</div>
<div class="row">
<label for="force-verify">Are you sure?</label>
<input id="force-verify" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" name="resend" value="Resend Verification" />
<input type="submit" name="verify" value="Verify" />
</div>
<div class="row">
<input type="submit" class="danger" name="block" value="Block" />
<input type="submit" class="danger" name="unblock" value="Unblock" />
</div>
</form>
</fieldset>
<fieldset>
<legend>Feature Flags for Team</legend>
<desc>Add a feature flag to a team</desc>
<form method="post" action="/dbg/actions/handle-team-features">
<div class="row">
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
</div>
<div class="row">
<select type="text" style="width:100px" name="feature">
{% for feature in supported-features %}
<option value="{{feature}}">{{feature}}</option>
{% endfor %}
</select>
</div>
<div class="row">
<select style="width:100px" name="action">
<option value="">Action...</option>
<option value="show">Show</option>
<option value="enable">Enable</option>
<option value="disable">Disable</option>
</select>
</div>
<div class="row">
<label for="check-feature">Skip feature check</label>
<input id="check-feature" type="checkbox" name="skip-check" />
<br />
<small>
Do not check if the feature is supported
</small>
</div>
<div class="row">
<label for="force-version">Are you sure?</label>
<input id="force-version" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" value="Submit" />
</div>
</form>
</fieldset>
</section>
<section class="widget">
<fieldset>
@@ -237,5 +173,55 @@ Debug Main Page
</form>
</fieldset>
</section>
<section class="widget">
<fieldset>
<legend>Feature Flags for Team</legend>
<desc>Add a feature flag to a team</desc>
<form method="post" action="/dbg/actions/handle-team-features">
<div class="row">
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
</div>
<div class="row">
<select type="text" style="width:100px" name="feature">
{% for feature in supported-features %}
<option value="{{feature}}">{{feature}}</option>
{% endfor %}
</select>
</div>
<div class="row">
<select style="width:100px" name="action">
<option value="">Action...</option>
<option value="show">Show</option>
<option value="enable">Enable</option>
<option value="disable">Disable</option>
</select>
</div>
<div class="row">
<label for="check-feature">Skip feature check</label>
<input id="check-feature" type="checkbox" name="skip-check" />
<br />
<small>
Do not check if the feature is supported
</small>
</div>
<div class="row">
<label for="force-version">Are you sure?</label>
<input id="force-version" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" value="Submit" />
</div>
</form>
</fieldset>
</section>
</main>
{% endblock %}

View File

@@ -1,12 +1,7 @@
#!/usr/bin/env bash
export PENPOT_NITRATE_SHARED_KEY=super-secret-nitrate-api-key
export PENPOT_EXPORTER_SHARED_KEY=super-secret-exporter-api-key
export PENPOT_SECRET_KEY=super-secret-devenv-key
# DEPRECATED: only used for subscriptions
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_PUBLIC_URI=https://localhost:3449

View File

@@ -36,6 +36,17 @@
[integrant.core :as ig]
[yetti.response :as-alias yres]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn obfuscate-string
[s]
(if (< (count s) 10)
(apply str (take (count s) (repeat "*")))
(str (subs s 0 5)
(apply str (take (- (count s) 5) (repeat "*"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OIDC PROVIDER (GENERIC)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -166,7 +177,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -211,7 +222,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -288,7 +299,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -330,7 +341,7 @@
:provider "gitlab"
:base-uri (:base-uri provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
(ex/raise :type ::internal
@@ -350,7 +361,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -448,7 +459,7 @@
(l/trc :hint "fetch access token"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider))
:client-secret (obfuscate-string (:client-secret provider))
:grant-type (:grant_type params)
:redirect-uri (:redirect_uri params))
@@ -501,7 +512,7 @@
[cfg provider tdata]
(l/trc :hint "fetch user info"
:uri (:user-uri provider)
:token (d/obfuscate-string (:token/access tdata)))
:token (obfuscate-string (:token/access tdata)))
(let [params {:uri (:user-uri provider)
:headers {"Authorization" (str (:token/type tdata) " " (:token/access tdata))}

View File

@@ -331,81 +331,6 @@
(set/difference cfeat/backend-only-features))
#{}))))
(defn check-file-exists
[cfg id & {:keys [include-deleted?]
:or {include-deleted? false}
:as options}]
(db/get-with-sql cfg [sql:get-minimal-file id]
{:db/remove-deleted (not include-deleted?)}))
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?")
(defn- get-file-permissions*
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-file-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions* conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-file-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(defn get-project
[cfg project-id]
(db/get cfg :project {:id project-id}))

View File

@@ -102,8 +102,6 @@
[:http-server-io-threads {:optional true} ::sm/int]
[:http-server-max-worker-threads {:optional true} ::sm/int]
[:exporter-shared-key {:optional true} :string]
[:nitrate-shared-key {:optional true} :string]
[:management-api-key {:optional true} :string]
[:telemetry-uri {:optional true} :string]

View File

@@ -124,6 +124,8 @@
(throw (IllegalArgumentException. "invalid email body provided")))
(doseq [[name content] attachments]
(prn "attachment" name)
(let [attachment-part (MimeBodyPart.)]
(.setFileName attachment-part ^String name)
(.setContent attachment-part ^String content (str "text/plain; charset=" charset))

View File

@@ -30,7 +30,7 @@
(defn- get-file-media-object
[pool id]
(db/get pool :file-media-object {:id id} {::db/remove-deleted false}))
(db/get pool :file-media-object {:id id}))
(defn- serve-object-from-s3
[{:keys [::sto/storage] :as cfg} obj]

View File

@@ -49,16 +49,13 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn index-handler
[cfg request]
(let [profile-id (::session/profile-id request)
offset (clock/get-offset profile-id)
profile (profile/get-profile cfg profile-id)]
[_cfg _request]
(let [{:keys [clock offset]} @clock/current]
{::yres/status 200
::yres/headers {"content-type" "text/html"}
::yres/body (-> (io/resource "app/templates/debug.tmpl")
(tmpl/render {:version (:full cf/version)
:profile profile
:current-clock ct/*clock*
:current-clock (str clock)
:current-offset (if offset
(ct/format-duration offset)
"NO OFFSET")
@@ -450,16 +447,15 @@
(defn- set-virtual-clock
[_ {:keys [params] :as request}]
(let [offset (some-> params :offset str/trim not-empty ct/duration)
profile-id (::session/profile-id request)
reset? (contains? params :reset)]
(let [offset (some-> params :offset str/trim not-empty ct/duration)
reset? (contains? params :reset)]
(if (= "production" (cf/get :tenant))
{::yres/status 501
::yres/body "OPERATION NOT ALLOWED"}
(do
(if (or reset? (zero? (inst-ms offset)))
(clock/assign-offset profile-id nil)
(clock/assign-offset profile-id offset))
(clock/set-offset! nil)
(clock/set-offset! offset))
{::yres/status 302
::yres/headers {"location" "/dbg"}}))))
@@ -499,7 +495,7 @@
(defn authorized?
[pool {:keys [::session/profile-id]}]
(or (and (= "devenv" (cf/get :host)) profile-id)
(or (= "devenv" (cf/get :host))
(let [profile (ex/ignoring (profile/get-profile pool profile-id))
admins (or (cf/get :admins) #{})]
(contains? admins (:email profile)))))

View File

@@ -13,13 +13,13 @@
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.http.middleware :as mw]
[app.main :as-alias main]
[app.rpc.commands.profile :as cmd.profile]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.worker :as-alias wrk]
[integrant.core :as ig]
[yetti.request :as yreq]
[yetti.response :as-alias yres]))
;; ---- ROUTES
@@ -49,40 +49,28 @@
(fn [cfg request]
(db/tx-run! cfg handler request)))))})
(def ^:private shared-key-auth
{:name ::shared-key-auth
:compile
(fn [_ _]
(fn [handler key]
(if key
(fn [request]
(if-let [key' (yreq/get-header request "x-shared-key")]
(if (= key key')
(handler request)
{::yres/status 403})
{::yres/status 403}))
(fn [_ _]
{::yres/status 403}))))})
(defmethod ig/init-key ::routes
[_ cfg]
[_ {:keys [::setup/props] :as cfg}]
["" {:middleware [[shared-key-auth (cf/get :management-api-key)]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
(let [management-key (or (cf/get :management-api-key)
(get props :management-key))]
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["" {:middleware [[mw/shared-key-auth management-key]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]])
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]]))
;; ---- HELPERS

View File

@@ -16,6 +16,7 @@
[app.http.errors :as errors]
[app.tokens :as tokens]
[app.util.pointer-map :as pmap]
[buddy.core.codecs :as bc]
[cuerdas.core :as str]
[yetti.adapter :as yt]
[yetti.middleware :as ymw]
@@ -300,20 +301,16 @@
:compile (constantly wrap-auth)})
(defn- wrap-shared-key-auth
[handler keys]
(if (seq keys)
(fn [request]
(if-let [[key-id key] (some-> (yreq/get-header request "x-shared-key")
(str/split #"\s+" 2))]
(let [key-id (str/lower key-id)]
(if (and (string? key)
(contains? keys key-id)
(= key (get keys key-id)))
(-> request
(assoc ::http/auth-key-id key-id)
(handler))
{::yres/status 403}))
{::yres/status 403}))
[handler shared-key]
(if shared-key
(let [shared-key (if (string? shared-key)
shared-key
(bc/bytes->b64-str shared-key true))]
(fn [request]
(let [key (yreq/get-header request "x-shared-key")]
(if (= key shared-key)
(handler request)
{::yres/status 403}))))
(fn [_ _]
{::yres/status 403})))

View File

@@ -20,7 +20,6 @@
[app.http.session.tasks :as-alias tasks]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.setup.clock :as clock]
[app.tokens :as tokens]
[integrant.core :as ig]
[yetti.request :as yreq]
@@ -230,22 +229,18 @@
(let [{:keys [type token claims metadata]} (get request ::http/auth-data)]
(cond
(= type :cookie)
(let [session
(case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
request
(cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))
response
(binding [ct/*clock* (clock/get-clock (:profile-id session))]
(handler request))]
response (handler request)]
(if (and session (renew-session? session))
(let [session (->> session

View File

@@ -9,7 +9,6 @@
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
@@ -140,14 +139,10 @@
client-version (get-client-version request)
client-user-agent (get-client-user-agent request)
session-id (get-external-session-id request)
key-id (::http/auth-key-id request)
token-id (::actoken/id request)
token-type (::actoken/type request)]
token-id (::actoken/id request)]
(d/without-nils
{:external-session-id session-id
:initiator (or key-id "app")
:access-token-id (some-> token-id str)
:access-token-type (some-> token-type str)
:client-event-origin client-event-origin
:client-user-agent client-user-agent
:client-version client-version
@@ -228,7 +223,7 @@
(some? tnow)
(assoc :tracked-at tnow))))
(defn- append-audit-entry
(defn- append-audit-entry!
[cfg params]
(let [params (-> params
(update :props db/tjson)
@@ -241,16 +236,6 @@
(let [params (event->params event)
tnow (ct/now)]
(when (contains? cf/flags :audit-log-logger)
(l/log! ::l/logger "app.audit"
::l/level :info
:profile-id (str (::profile-id event))
:ip-addr (str (::ip-addr event))
:type (::type event)
:name (::name event)
:props (json/encode (::props event) :key-fn json/write-camel-key)
:context (json/encode (::context event) :key-fn json/write-camel-key)))
(when (contains? cf/flags :audit-log)
;; NOTE: this operation may cause primary key conflicts on inserts
;; because of the timestamp precission (two concurrent requests), in
@@ -258,7 +243,7 @@
(let [params (-> params
(assoc :created-at tnow)
(update :tracked-at #(or % tnow)))]
(append-audit-entry cfg params)))
(append-audit-entry! cfg params)))
(when (and (or (contains? cf/flags :telemetry)
(cf/get :telemetry-enabled))
@@ -273,7 +258,7 @@
(update :tracked-at #(or % tnow))
(assoc :props {})
(assoc :context {}))]
(append-audit-entry cfg params)))
(append-audit-entry! cfg params)))
(when (and (contains? cf/flags :webhooks)
(::webhooks/event? event))
@@ -327,4 +312,4 @@
params (-> (event->params event)
(assoc :created-at tnow)
(update :tracked-at #(or % tnow)))]
(append-audit-entry cfg params)))))))
(append-audit-entry! cfg params)))))))

View File

@@ -275,7 +275,8 @@
::email/whitelist (ig/ref ::email/whitelist)}
::mgmt/routes
{::db/pool (ig/ref ::db/pool)}
{::db/pool (ig/ref ::db/pool)
::setup/props (ig/ref ::setup/props)}
:app.http/router
{::session/manager (ig/ref ::session/manager)
@@ -351,13 +352,13 @@
::setup/props (ig/ref ::setup/props)}
::rpc/routes
{::rpc/methods (ig/ref :app.rpc/methods)
{::rpc/methods (ig/ref :app.rpc/methods)
::rpc/management-methods (ig/ref :app.rpc/management-methods)
;; FIXME: revisit if db/pool is necessary here
::db/pool (ig/ref ::db/pool)
::session/manager (ig/ref ::session/manager)
::setup/shared-keys (ig/ref ::setup/shared-keys)}
::setup/props (ig/ref ::setup/props)}
::wrk/registry
{::mtx/metrics (ig/ref ::mtx/metrics)
@@ -445,11 +446,6 @@
;; module requires the migrations to run before initialize.
::migrations (ig/ref :app.migrations/migrations)}
::setup/shared-keys
{::setup/props (ig/ref ::setup/props)
:nitrate (cf/get :nitrate-shared-key)
:exporter (cf/get :exporter-shared-key)}
::setup/clock
{}

View File

@@ -14,7 +14,6 @@
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
@@ -92,12 +91,8 @@
(fn [{:keys [params path-params method] :as request}]
(let [handler-name (:type path-params)
etag (yreq/get-header request "if-none-match")
key-id (get request ::http/auth-key-id)
profile-id (or (::session/profile-id request)
(::actoken/profile-id request)
(if key-id uuid/zero nil))
(::actoken/profile-id request))
ip-addr (inet/parse-request request)
data (-> params
@@ -345,20 +340,23 @@
(defmethod ig/assert-key ::routes
[_ params]
(assert (map? (::setup/shared-keys params)))
(assert (db/pool? (::db/pool params)) "expect valid database pool")
(assert (some? (::setup/props params)))
(assert (session/manager? (::session/manager params)) "expect valid session manager")
(assert (valid-methods? (::methods params)) "expect valid methods map")
(assert (valid-methods? (::management-methods params)) "expect valid methods map"))
(defmethod ig/init-key ::routes
[_ {:keys [::methods ::management-methods ::setup/shared-keys] :as cfg}]
[_ {:keys [::methods ::management-methods ::setup/props] :as cfg}]
(let [public-uri (cf/get :public-uri)
management-key (or (cf/get :management-api-key)
(get props :management-key))]
(let [public-uri (cf/get :public-uri)]
["/api"
["/management"
["/methods/:type"
{:middleware [[mw/shared-key-auth shared-keys]
{:middleware [[mw/shared-key-auth management-key]
[session/authz cfg]]
:handler (make-rpc-handler management-methods)}]

View File

@@ -79,14 +79,85 @@
;; --- FILE PERMISSIONS
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
and f.deleted_at is null
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
and f.deleted_at is null
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?
and f.deleted_at is null")
(defn get-file-permissions
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(def has-edit-permissions?
(perms/make-edition-predicate-fn bfc/get-file-permissions))
(perms/make-edition-predicate-fn get-permissions))
(def has-read-permissions?
(perms/make-read-predicate-fn bfc/get-file-permissions))
(perms/make-read-predicate-fn get-permissions))
(def has-comment-permissions?
(perms/make-comment-predicate-fn bfc/get-file-permissions))
(perms/make-comment-predicate-fn get-permissions))
(def check-edition-permissions!
(perms/make-check-fn has-edit-permissions?))
@@ -99,7 +170,7 @@
(defn check-comment-permissions!
[conn profile-id file-id share-id]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
can-read (has-read-permissions? perms)
can-comment (has-comment-permissions? perms)]
(when-not (or can-read can-comment)
@@ -151,7 +222,7 @@
(defn- get-minimal-file-with-perms
[cfg {:keys [:id ::rpc/profile-id]}]
(let [mfile (get-minimal-file cfg id)
perms (bfc/get-file-permissions cfg profile-id id)]
perms (get-permissions cfg profile-id id)]
(assoc mfile :permissions perms)))
(defn get-file-etag
@@ -177,7 +248,7 @@
;; will be already prefetched and we just reuse them instead
;; of making an additional database queries.
(let [perms (or (:permissions (::cond/object params))
(bfc/get-file-permissions conn profile-id id))]
(get-permissions conn profile-id id))]
(check-read-permissions! perms)
(let [team (teams/get-team conn
@@ -240,7 +311,7 @@
::sm/result schema:file-fragment}
[cfg {:keys [::rpc/profile-id file-id fragment-id share-id]}]
(db/run! cfg (fn [cfg]
(let [perms (bfc/get-file-permissions cfg profile-id file-id share-id)]
(let [perms (get-permissions cfg profile-id file-id share-id)]
(check-read-permissions! perms)
(-> (get-file-fragment cfg file-id fragment-id)
(rph/with-http-cache long-cache-duration))))))
@@ -385,7 +456,8 @@
:code :params-validation
:hint "page-id is required when object-id is provided"))
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
file (bfc/get-file cfg file-id :read-only? true)
proj (db/get conn :project {:id (:project-id file)})
@@ -616,10 +688,11 @@
"Get libraries used by the specified file."
{::doc/added "1.17"
::sm/params schema:get-file-libraries}
[cfg {:keys [::rpc/profile-id file-id]}]
(bfc/check-file-exists cfg file-id)
(check-read-permissions! cfg profile-id file-id)
(bfc/get-file-libraries cfg file-id))
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
(dm/with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id file-id)
(bfc/get-file-libraries conn file-id)))
;; --- COMMAND QUERY: Files that use this File library
@@ -704,6 +777,7 @@
f.created_at,
f.modified_at,
f.name,
f.is_shared,
f.deleted_at AS will_be_deleted_at,
ft.media_id AS thumbnail_id,
row_number() OVER w AS row_num,
@@ -711,7 +785,8 @@
FROM file AS f
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN file_thumbnail AS ft on (ft.file_id = f.id
AND ft.revn = f.revn)
AND ft.revn = f.revn
AND ft.deleted_at is null)
WHERE p.team_id = ?
AND (p.deleted_at > ?::timestamptz OR
f.deleted_at > ?::timestamptz)
@@ -813,7 +888,7 @@
AND (f.deleted_at IS NULL OR f.deleted_at > now())
ORDER BY f.created_at ASC;")
(defn- absorb-library-by-file
(defn- absorb-library-by-file!
[cfg ldata file-id]
(assert (db/connection-map? cfg)
@@ -837,7 +912,7 @@
:modified-at (ct/now)
:has-media-trimmed false}))))
(defn- absorb-library*
(defn- absorb-library
"Find all files using a shared library, and absorb all library assets
into the file local libraries"
[cfg {:keys [id data] :as library}]
@@ -852,10 +927,10 @@
:library-id (str id)
:files (str/join "," (map str ids)))
(run! (partial absorb-library-by-file cfg data) ids)
(run! (partial absorb-library-by-file! cfg data) ids)
library))
(defn absorb-library
(defn absorb-library!
[{:keys [::db/conn] :as cfg} id]
(let [file (-> (bfc/get-file cfg id
:realize? true
@@ -872,7 +947,7 @@
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-file-features! (:features file)))
(absorb-library* cfg file)))
(absorb-library cfg file)))
(defn- set-file-shared
[{:keys [::db/conn] :as cfg} {:keys [profile-id id] :as params}]
@@ -885,14 +960,14 @@
;; file, we need to perform more complex operation,
;; so in this case we retrieve the complete file and
;; perform all required validations.
(let [file (-> (absorb-library cfg id)
(let [file (-> (absorb-library! cfg id)
(assoc :is-shared false))]
(db/delete! conn :file-library-rel {:library-file-id id})
(db/update! conn :file
{:is-shared false
:modified-at (ct/now)}
{:id id})
file)
(select-keys file [:id :name :is-shared]))
(and (false? (:is-shared file))
(true? (:is-shared params)))
@@ -939,11 +1014,6 @@
{:id file-id}
{::db/return-keys [:id :name :is-shared :deleted-at
:project-id :created-at :modified-at]})]
;; Remove all possible relations for that file
(db/delete! conn :file-library-rel
{:library-file-id file-id})
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
@@ -1094,53 +1164,47 @@
;; --- MUTATION COMMAND: delete-files-immediatelly
(def ^:private sql:get-delete-team-files-candidates
"SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])")
(def ^:private sql:delete-team-files
"UPDATE file AS uf SET deleted_at = ?::timestamptz
FROM (
SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])
) AS subquery
WHERE uf.id = subquery.id
RETURNING uf.id, uf.deleted_at;")
(def ^:private schema:permanently-delete-team-files
[:map {:title "permanently-delete-team-files"}
[:team-id ::sm/uuid]
[:ids [::sm/set ::sm/uuid]]])
(defn- permanently-delete-team-files
[{:keys [::db/conn]} {:keys [::rpc/request-at team-id ids]}]
(let [ids (into #{}
d/xf:map-id
(db/exec! conn [sql:get-delete-team-files-candidates team-id
(db/create-array conn "uuid" ids)]))]
(reduce (fn [acc id]
(events/tap :progress {:file-id id :index (inc (count acc)) :total (count ids)})
(db/update! conn :file
{:deleted-at request-at}
{:id id}
{::db/return-keys false})
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
:deleted-at request-at
:id id}})
(conj acc id))
#{}
ids)))
(sv/defmethod ::permanently-delete-team-files
"Mark the specified files to be deleted immediatelly on the
specified team. The team-id on params will be used to filter and
check writable permissons on team."
{::doc/added "2.13"
::sm/params schema:permanently-delete-team-files}
{::doc/added "2.12"
::sm/params schema:permanently-delete-team-files
::db/transaction true}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
(teams/check-edition-permissions! pool profile-id team-id)
(sse/response #(db/tx-run! cfg permanently-delete-team-files params)))
[{:keys [::db/conn]} {:keys [::rpc/profile-id ::rpc/request-at team-id ids]}]
(teams/check-edition-permissions! conn profile-id team-id)
(reduce (fn [acc {:keys [id deleted-at]}]
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
:deleted-at deleted-at
:id id}})
(conj acc id))
#{}
(db/plan conn [sql:delete-team-files request-at team-id
(db/create-array conn "uuid" ids)])))
;; --- MUTATION COMMAND: restore-files-immediatelly
@@ -1204,7 +1268,7 @@
{:keys [files projects]}
(reduce (fn [result {:keys [id project-id]}]
(let [index (-> result :files count)]
(events/tap :progress {:file-id id :index (inc index) :total total-files})
(events/tap :progress {:file-id id :index index :total total-files})
(restore-file conn id)
(-> result
@@ -1227,7 +1291,7 @@
(sv/defmethod ::restore-deleted-team-files
"Removes the deletion mark from the specified files (and respective
projects) on the specified team."
{::doc/added "2.13"
{::doc/added "2.12"
::sse/stream? true
::sm/params schema:restore-deleted-team-files}
[cfg params]

View File

@@ -199,13 +199,15 @@
[cfg {:keys [::rpc/profile-id file-id strip-frames-with-thumbnails] :as params}]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/check-read-permissions! conn profile-id file-id)
(let [team (teams/get-team conn
:profile-id profile-id
:file-id file-id)
file (bfc/get-file cfg file-id
:include-deleted? true
:realize? true
:read-only? true)
strip-frames-with-thumbnails
(or (nil? strip-frames-with-thumbnails) ;; if not present, default to true
(true? strip-frames-with-thumbnails))]
@@ -331,16 +333,12 @@
;; --- MUTATION COMMAND: create-file-thumbnail
(defn- create-file-thumbnail
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [file-id revn props media] :as params}]
(defn- create-file-thumbnail!
[{:keys [::db/conn ::sto/storage]} {:keys [file-id revn props media] :as params}]
(media/validate-media-type! media)
(media/validate-media-size! media)
(let [file (bfc/get-file cfg file-id
:include-deleted? true
:load-data? false)
props (db/tjson (or props {}))
(let [props (db/tjson (or props {}))
path (:path media)
mtype (:mtype media)
hash (sto/calculate-hash path)
@@ -369,7 +367,7 @@
(db/update! conn :file-thumbnail
{:media-id (:id media)
:deleted-at (:deleted-at file)
:deleted-at nil
:updated-at tnow
:props props}
{:file-id file-id
@@ -380,7 +378,6 @@
:revn revn
:created-at tnow
:updated-at tnow
:deleted-at (:deleted-at file)
:props props
:media-id (:id media)}))
@@ -405,8 +402,6 @@
::rtry/when rtry/conflict-exception?
::sm/params schema:create-file-thumbnail}
;; FIXME: do not run the thumbnail upload inside a transaction
[cfg {:keys [::rpc/profile-id file-id] :as params}]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
;; TODO For now we check read permissions instead of write,
@@ -414,6 +409,6 @@
;; review this approach on the future.
(files/check-read-permissions! conn profile-id file-id)
(when-not (db/read-only? conn)
(let [media (create-file-thumbnail cfg params)]
(let [media (create-file-thumbnail! cfg params)]
{:uri (files/resolve-public-uri (:id media))
:id (:id media)})))))

View File

@@ -6,7 +6,6 @@
(ns app.rpc.commands.fonts
(:require
[app.binfile.common :as bfc]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
@@ -27,17 +26,7 @@
[app.rpc.helpers :as rph]
[app.rpc.quotes :as quotes]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[datoteka.io :as io])
(:import
java.io.InputStream
java.io.OutputStream
java.io.SequenceInputStream
java.util.Collections))
(set! *warn-on-reflection* true)
[app.util.services :as sv]))
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
(def valid-style #{"normal" "italic"})
@@ -77,7 +66,7 @@
(uuid? file-id)
(let [file (db/get-by-id conn :file file-id {:columns [:id :project-id]})
project (db/get-by-id conn :project (:project-id file) {:columns [:id :team-id]})
perms (bfc/get-file-permissions conn profile-id file-id share-id)]
perms (files/get-permissions conn profile-id file-id share-id)]
(files/check-read-permissions! perms)
(db/query conn :team-font-variant
{:team-id (:team-id project)
@@ -115,7 +104,7 @@
(defn create-font-variant
[{:keys [::sto/storage ::db/conn]} {:keys [data] :as params}]
(letfn [(generate-missing [data]
(letfn [(generate-missing! [data]
(let [data (media/run {:cmd :generate-fonts :input data})]
(when (and (not (contains? data "font/otf"))
(not (contains? data "font/ttf"))
@@ -126,26 +115,8 @@
:hint "invalid font upload, unable to generate missing font assets"))
data))
(process-chunks [chunks]
(let [tmp (tmp/tempfile :prefix "penpot.tempfont." :suffix "")
streams (map io/input-stream chunks)
streams (Collections/enumeration streams)]
(with-open [^OutputStream output (io/output-stream tmp)
^InputStream input (SequenceInputStream. streams)]
(io/copy input output))
tmp))
(join-chunks [data]
(reduce-kv (fn [data mtype content]
(if (vector? content)
(assoc data mtype (process-chunks content))
data))
data
data))
(prepare-font [data mtype]
(when-let [resource (get data mtype)]
(let [hash (sto/calculate-hash resource)
content (-> (sto/content resource)
(sto/wrap-with-hash hash))]
@@ -184,8 +155,7 @@
:otf-file-id (:id otf)
:ttf-file-id (:id ttf)}))]
(let [data (join-chunks data)
data (generate-missing data)
(let [data (generate-missing! data)
assets (persist-fonts-files! data)
result (insert-font-variant! assets)]
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))

View File

@@ -19,7 +19,7 @@
inner join team_profile_rel as tpr on (tpr.team_id = p.team_id)
where tpr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null)
and (p.deleted_at is null or p.deleted_at > now())
and (tpr.is_admin = true or
tpr.is_owner = true or
tpr.can_edit = true)
@@ -29,7 +29,7 @@
inner join project_profile_rel as ppr on (ppr.project_id = p.id)
where ppr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null)
and (p.deleted_at is null or p.deleted_at > now())
and (ppr.is_admin = true or
ppr.is_owner = true or
ppr.can_edit = true)
@@ -47,7 +47,7 @@
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn)
inner join projects as pr on (f.project_id = pr.id)
where f.name ilike ('%' || ? || '%')
and (f.deleted_at is null)
and (f.deleted_at is null or f.deleted_at > now())
order by f.created_at asc")
(defn search-files

View File

@@ -13,6 +13,7 @@
[app.config :as cf]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.teams :as teams]
[app.rpc.cond :as-alias cond]
[app.rpc.doc :as-alias doc]
@@ -120,7 +121,7 @@
[system {:keys [::rpc/profile-id file-id share-id] :as params}]
(db/run! system
(fn [{:keys [::db/conn] :as system}]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (files/get-permissions conn profile-id file-id share-id)
params (-> params
(assoc ::perms perms)
(assoc :profile-id profile-id))]

View File

@@ -17,7 +17,6 @@
[app.setup.templates]
[buddy.core.codecs :as bc]
[buddy.core.nonce :as bn]
[cuerdas.core :as str]
[integrant.core :as ig]))
(defn- generate-random-key
@@ -89,38 +88,7 @@
(-> (get-all-props conn)
(assoc :secret-key secret)
(assoc :tokens-key (keys/derive secret :salt "tokens"))
(assoc :management-key (keys/derive secret :salt "management"))
(update :instance-id handle-instance-id conn (db/read-only? pool)))))))
(sm/register! ::props [:map-of :keyword ::sm/any])
(defmethod ig/init-key ::shared-keys
[_ {:keys [::props] :as cfg}]
(let [secret (get props :secret-key)]
(d/without-nils
{"exporter"
(let [key (or (get cfg :exporter)
(-> (keys/derive secret :salt "exporter")
(bc/bytes->b64-str true)))]
(if (or (str/empty? key)
(str/blank? key))
(do
(l/wrn :hint "exporter key is disabled because empty string found")
nil)
(do
(l/inf :hint "exporter key initialized" :key (d/obfuscate-string key))
key)))
"nitrate"
(let [key (or (get cfg :nitrate)
(-> (keys/derive secret :salt "nitrate")
(bc/bytes->b64-str true)))]
(if (or (str/empty? key)
(str/blank? key))
(do
(l/wrn :hint "nitrate key is disabled because empty string found")
nil)
(do
(l/inf :hint "nitrate key initialized" :key (d/obfuscate-string key))
key)))})))

View File

@@ -9,35 +9,48 @@
modification of time offset (useful for testing and time adjustments)."
(:require
[app.common.logging :as l]
[app.common.time :as ct]))
[app.common.time :as ct]
[app.setup :as-alias setup]
[integrant.core :as ig])
(:import
java.time.Clock
java.time.Duration
java.time.Instant
java.time.ZoneId))
(defonce state
(atom {}))
(defonce current
(atom {:clock (Clock/systemDefaultZone)
:offset nil}))
(defn assign-offset
"Assign virtual clock offset to a specific user. Is the responsability
of RPC module to properly bind the correct clock to the user
request."
[profile-id duration]
(swap! state (fn [state]
(if (nil? duration)
(dissoc state profile-id)
(assoc state profile-id duration)))))
(defmethod ig/init-key ::setup/clock
[_ _]
(add-watch current ::common
(fn [_ _ _ {:keys [clock offset]}]
(let [clock (if (ct/duration? offset)
(Clock/offset ^Clock clock
^Duration offset)
clock)]
(l/wrn :hint "altering clock" :clock (str clock))
(alter-var-root #'ct/*clock* (constantly clock))))))
(defn get-offset
[profile-id]
(get @state profile-id))
(defn get-clock
[profile-id]
(if-let [offset (get-offset profile-id)]
(ct/offset-clock offset)
(ct/get-system-clock)))
(defmethod ig/halt-key! ::setup/clock
[_ _]
(remove-watch current ::common))
(defn set-global-clock
(defn fixed
"Get fixed clock, mainly used in tests"
[instant]
(Clock/fixed ^Instant (ct/inst instant)
^ZoneId (ZoneId/of "Z")))
(defn set-offset!
[duration]
(swap! current assoc :offset (some-> duration ct/duration)))
(defn set-clock!
([]
(set-global-clock (ct/get-system-clock)))
(swap! current assoc :clock (Clock/systemDefaultZone)))
([clock]
(assert (ct/clock? clock) "expected valid clock instance")
(l/wrn :hint "altering clock" :clock (str clock))
(alter-var-root #'ct/*clock* (constantly clock))))
(when (instance? Clock clock)
(swap! current assoc :clock clock))))

View File

@@ -45,8 +45,7 @@
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :file
{:deleted-at deleted-at
:is-shared false}
{:deleted-at deleted-at}
{:id id}
{::db/return-keys false})
@@ -54,7 +53,7 @@
(not *team-deletion*))
;; NOTE: we don't prevent file deletion on absorb operation failure
(try
(db/tx-run! cfg files/absorb-library id)
(db/tx-run! cfg files/absorb-library! id)
(catch Throwable cause
(l/warn :hint "error on absorbing library"
:file-id id

View File

@@ -595,8 +595,8 @@
(px/exec! :virtual #(rcp/write-body-to-stream body nil output))
(into []
(map (fn [{:keys [event data]}]
(d/vec2 (keyword event)
(tr/decode-str data))))
[(keyword event)
(tr/decode-str data)]))
(parse-sse (slurp' input)))
(finally
(.close input)))))

View File

@@ -86,7 +86,7 @@
(t/deftest shared-key-auth
(let [handler (#'app.http.middleware/wrap-shared-key-auth
(fn [req] {::yres/status 200})
{"test1" "secret-key"})]
"secret-key")]
(let [response (handler (->DummyRequest {} {}))]
(t/is (= 403 (::yres/status response))))
@@ -95,9 +95,6 @@
(t/is (= 403 (::yres/status response))))
(let [response (handler (->DummyRequest {"x-shared-key" "secret-key"} {}))]
(t/is (= 403 (::yres/status response))))
(let [response (handler (->DummyRequest {"x-shared-key" "test1 secret-key"} {}))]
(t/is (= 200 (::yres/status response))))))
(t/deftest access-token-authz

View File

@@ -17,6 +17,7 @@
[app.db.sql :as sql]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -133,7 +134,7 @@
;; this will run pending task triggered by deleting user snapshot
(th/run-pending-tasks!)
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
;; delete 2 snapshots and 2 file data entries
(t/is (= 4 (:processed res)))))))))

View File

@@ -19,6 +19,7 @@
[app.http :as http]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -921,7 +922,7 @@
(t/is (= 0 (:processed result))))
;; run permanent deletion
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 3 (:processed result)))))
@@ -1874,7 +1875,7 @@
file-id (uuid/next)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [data {::th/type :create-file
::rpc/profile-id (:id prof)
:project-id proj-id
@@ -1920,11 +1921,7 @@
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [result (:result out)]
(t/is (fn? result))
(let [[ev1 ev2 :as events] (th/consume-sse result)]
(t/is (= 2 (count events)))
(t/is (= (:ids data) (val ev2)))))
(t/is (= (:ids data) result)))
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
(t/is (= (:deleted-at row) now)))))))
@@ -1936,7 +1933,7 @@
file-id (uuid/next)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [data {::th/type :create-file
::rpc/profile-id (:id prof)
:project-id proj-id
@@ -1999,7 +1996,7 @@
team-id (:default-team-id profile)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [project (th/create-project* 1 {:profile-id (:id profile)
:team-id team-id})
file (th/create-file* 1 {:profile-id (:id profile)

View File

@@ -85,7 +85,7 @@
(t/is (map? (:result out))))
;; run the task again
(let [res (binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 31}))]
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! "storage-gc-touched" {}))]
(t/is (= 2 (:freeze res))))
@@ -136,7 +136,7 @@
(t/is (some? (sto/get-object storage (:media-id row2))))
;; run the task again
(let [res (binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 31}))]
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! :storage-gc-touched {}))]
(t/is (= 1 (:delete res)))
(t/is (= 0 (:freeze res))))
@@ -147,7 +147,7 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
@@ -247,7 +247,7 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted result)))))

View File

@@ -12,6 +12,7 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -146,7 +147,7 @@
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed res))))
@@ -207,7 +208,7 @@
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))
@@ -267,7 +268,7 @@
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))

View File

@@ -12,6 +12,7 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[backend-tests.helpers :as th]
[clojure.test :as t]))
@@ -227,7 +228,7 @@
(t/is (= 0 (count result)))))
;; run permanent deletion
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed result)))))

View File

@@ -13,6 +13,7 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
@@ -525,7 +526,7 @@
(t/is (= :not-found (:type edata)))))
;; run permanent deletion
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed result)))))
@@ -582,7 +583,7 @@
(t/is (= 1 (count rows)))
(t/is (ct/inst? (:deleted-at (first rows)))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 7 (:processed result)))))))

View File

@@ -11,6 +11,7 @@
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -98,14 +99,14 @@
::sto/expired-at (ct/in-future {:hours 1})
:content-type "text/plain"})]
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 0}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 0}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
(t/is (= 2 (:count res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 61}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 61}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
@@ -330,22 +331,22 @@
:content-type "text/plain"})]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res)))))
(binding [ct/*clock* (ct/fixed-clock (ct/plus now {:minutes 1}))]
(binding [ct/*clock* (clock/fixed (ct/plus now {:minutes 1}))]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 1 (:delete res)))))
(binding [ct/*clock* (ct/fixed-clock (ct/plus now {:hours 1}))]
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 1}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))
(binding [ct/*clock* (ct/fixed-clock (ct/plus now {:hours 2}))]
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 2}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))))

1145
backend/yarn.lock Normal file
View File

File diff suppressed because it is too large Load Diff

7
common/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

View File

@@ -29,6 +29,7 @@
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
integrant/integrant {:mvn/version "1.0.0"}
funcool/tubax {:mvn/version "2021.05.20-0"}
funcool/cuerdas {:mvn/version "2026.415"}
funcool/promesa
{:git/sha "46048fc0d4bf5466a2a4121f5d52aefa6337f2e8"
@@ -59,7 +60,7 @@
thheller/shadow-cljs {:mvn/version "3.2.0"}
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
com.bhauman/rebel-readline {:mvn/version "RELEASE"}
criterium/criterium {:mvn/version "0.4.6"}
criterium/criterium {:mvn/version "RELEASE"}
mockery/mockery {:mvn/version "RELEASE"}}
:extra-paths ["test" "dev"]}

View File

@@ -4,7 +4,7 @@
"license": "MPL-2.0",
"author": "Kaleidos INC",
"private": true,
"packageManager": "pnpm@10.26.2+sha512.0e308ff2005fc7410366f154f625f6631ab2b16b1d2e70238444dd6ae9d630a8482d92a451144debc492416896ed16f7b114a86ec68b8404b2443869e68ffda6",
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
"type": "module",
"repository": {
"type": "git",
@@ -23,9 +23,9 @@
"fmt:clj:check": "cljfmt check --parallel=false src/ test/",
"fmt:clj": "cljfmt fix --parallel=true src/ test/",
"lint:clj": "clj-kondo --parallel=true --lint src/",
"lint": "pnpm run lint:clj",
"lint": "yarn run lint:clj",
"watch:test": "concurrently \"clojure -M:dev:shadow-cljs watch test\" \"nodemon -C -d 2 -w target/tests/ --exec 'node target/tests/test.js'\"",
"build:test": "clojure -M:dev:shadow-cljs compile test",
"test": "pnpm run build:test && node target/tests/test.js"
"test": "yarn run build:test && node target/tests/test.js"
}
}

489
common/pnpm-lock.yaml generated
View File

@@ -1,489 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
date-fns:
specifier: ^4.1.0
version: 4.1.0
devDependencies:
concurrently:
specifier: ^9.1.2
version: 9.2.1
nodemon:
specifier: ^3.1.10
version: 3.1.11
source-map-support:
specifier: ^0.5.21
version: 0.5.21
ws:
specifier: ^8.18.2
version: 8.18.3
packages:
ansi-regex@5.0.1:
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
engines: {node: '>=8'}
ansi-styles@4.3.0:
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
engines: {node: '>=8'}
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
brace-expansion@1.1.12:
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
buffer-from@1.1.2:
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
chalk@4.1.2:
resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
engines: {node: '>=10'}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
cliui@8.0.1:
resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==}
engines: {node: '>=12'}
color-convert@2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
engines: {node: '>=7.0.0'}
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
concurrently@9.2.1:
resolution: {integrity: sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng==}
engines: {node: '>=18'}
hasBin: true
date-fns@4.1.0:
resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==}
debug@4.4.3:
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
escalade@3.2.0:
resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
engines: {node: '>=6'}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
has-flag@3.0.0:
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
engines: {node: '>=4'}
has-flag@4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-fullwidth-code-point@3.0.0:
resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
engines: {node: '>=8'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
minimatch@3.1.2:
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
nodemon@3.1.11:
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
engines: {node: '>=10'}
hasBin: true
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
require-directory@2.1.1:
resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
engines: {node: '>=0.10.0'}
rxjs@7.8.2:
resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==}
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
shell-quote@1.8.3:
resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==}
engines: {node: '>= 0.4'}
simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'}
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
source-map@0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
string-width@4.2.3:
resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
engines: {node: '>=8'}
strip-ansi@6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
supports-color@5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
supports-color@7.2.0:
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
engines: {node: '>=8'}
supports-color@8.1.1:
resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==}
engines: {node: '>=10'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
touch@3.1.1:
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
hasBin: true
tree-kill@1.2.2:
resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==}
hasBin: true
tslib@2.8.1:
resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
undefsafe@2.0.5:
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
wrap-ansi@7.0.0:
resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
engines: {node: '>=10'}
ws@8.18.3:
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
y18n@5.0.8:
resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
engines: {node: '>=10'}
yargs-parser@21.1.1:
resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==}
engines: {node: '>=12'}
yargs@17.7.2:
resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==}
engines: {node: '>=12'}
snapshots:
ansi-regex@5.0.1: {}
ansi-styles@4.3.0:
dependencies:
color-convert: 2.0.1
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
balanced-match@1.0.2: {}
binary-extensions@2.3.0: {}
brace-expansion@1.1.12:
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
braces@3.0.3:
dependencies:
fill-range: 7.1.1
buffer-from@1.1.2: {}
chalk@4.1.2:
dependencies:
ansi-styles: 4.3.0
supports-color: 7.2.0
chokidar@3.6.0:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
cliui@8.0.1:
dependencies:
string-width: 4.2.3
strip-ansi: 6.0.1
wrap-ansi: 7.0.0
color-convert@2.0.1:
dependencies:
color-name: 1.1.4
color-name@1.1.4: {}
concat-map@0.0.1: {}
concurrently@9.2.1:
dependencies:
chalk: 4.1.2
rxjs: 7.8.2
shell-quote: 1.8.3
supports-color: 8.1.1
tree-kill: 1.2.2
yargs: 17.7.2
date-fns@4.1.0: {}
debug@4.4.3(supports-color@5.5.0):
dependencies:
ms: 2.1.3
optionalDependencies:
supports-color: 5.5.0
emoji-regex@8.0.0: {}
escalade@3.2.0: {}
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fsevents@2.3.3:
optional: true
get-caller-file@2.0.5: {}
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
has-flag@3.0.0: {}
has-flag@4.0.0: {}
ignore-by-default@1.0.1: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-extglob@2.1.1: {}
is-fullwidth-code-point@3.0.0: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
minimatch@3.1.2:
dependencies:
brace-expansion: 1.1.12
ms@2.1.3: {}
nodemon@3.1.11:
dependencies:
chokidar: 3.6.0
debug: 4.4.3(supports-color@5.5.0)
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.7.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
undefsafe: 2.0.5
normalize-path@3.0.0: {}
picomatch@2.3.1: {}
pstree.remy@1.1.8: {}
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
require-directory@2.1.1: {}
rxjs@7.8.2:
dependencies:
tslib: 2.8.1
semver@7.7.3: {}
shell-quote@1.8.3: {}
simple-update-notifier@2.0.0:
dependencies:
semver: 7.7.3
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
source-map: 0.6.1
source-map@0.6.1: {}
string-width@4.2.3:
dependencies:
emoji-regex: 8.0.0
is-fullwidth-code-point: 3.0.0
strip-ansi: 6.0.1
strip-ansi@6.0.1:
dependencies:
ansi-regex: 5.0.1
supports-color@5.5.0:
dependencies:
has-flag: 3.0.0
supports-color@7.2.0:
dependencies:
has-flag: 4.0.0
supports-color@8.1.1:
dependencies:
has-flag: 4.0.0
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
touch@3.1.1: {}
tree-kill@1.2.2: {}
tslib@2.8.1: {}
undefsafe@2.0.5: {}
wrap-ansi@7.0.0:
dependencies:
ansi-styles: 4.3.0
string-width: 4.2.3
strip-ansi: 6.0.1
ws@8.18.3: {}
y18n@5.0.8: {}
yargs-parser@21.1.1: {}
yargs@17.7.2:
dependencies:
cliui: 8.0.1
escalade: 3.2.0
get-caller-file: 2.0.5
require-directory: 2.1.1
string-width: 4.2.3
y18n: 5.0.8
yargs-parser: 21.1.1

View File

View File

@@ -3,5 +3,5 @@
set -ex
corepack enable;
corepack install;
pnpm install;
pnpm run test;
yarn install;
yarn run test;

View File

@@ -1024,26 +1024,6 @@
:clj
(sort comp-fn items))))
(defn obfuscate-string
"Obfuscates potentially sensitive values.
- One-arg arity:
* For strings shorter than 10 characters, all characters are replaced by `*`.
* For longer strings, the first 5 characters are preserved and the rest obfuscated.
- Two-arg arity accepts a boolean `full?` that, when true, replaces the whole value
by `*`, preserving only the length."
([v]
(obfuscate-string v false))
([v full?]
(let [s (str v)
n (count s)]
(cond
(zero? n) s
full? (apply str (repeat n "*"))
(< n 10) (apply str (repeat n "*"))
:else (str (subs s 0 5)
(apply str (repeat (- n 5) "*")))))))
(defn reorder
"Reorder a vector by moving one of their items from some position to some space between positions.
It clamps the position numbers to a valid range."

View File

@@ -10,20 +10,16 @@
(:refer-clojure :exclude [instance?])
(:require
#?(:clj [clojure.stacktrace :as strace])
[app.common.data :refer [obfuscate-string]]
[app.common.pprint :as pp]
[app.common.schema :as sm]
[clojure.core :as c]
[clojure.spec.alpha :as s]
[cuerdas.core :as str])
[cuerdas.core :as str]
[expound.alpha :as expound])
#?(:clj
(:import
clojure.lang.IPersistentMap)))
(def ^:private sensitive-fields
"Keys whose values must be obfuscated in validation explains."
#{:password :old-password :token :invitation-token})
#?(:clj (set! *warn-on-reflection* true))
(def ^:dynamic *data-length* 8)
@@ -114,26 +110,15 @@
(contains? data :explain))
(explain (:explain data) opts)
(contains? data ::sm/explain)
(let [exp (::sm/explain data)
sanitize-map (fn sanitize-map [m]
(reduce-kv
(fn [acc k v]
(let [k* (if (string? k) (keyword k) k)]
(cond
(contains? sensitive-fields k*)
(assoc acc k (if (map? v)
(sanitize-map v)
(obfuscate-string v true)))
(and (contains? data ::s/problems)
(contains? data ::s/value)
(contains? data ::s/spec))
(binding [s/*explain-out* expound/printer]
(with-out-str
(s/explain-out (update data ::s/problems #(take (:length opts 10) %)))))
(map? v) (assoc acc k (sanitize-map v))
:else (assoc acc k v))))
{}
m))
sanitize-explain (fn [exp]
(cond-> exp
(:value exp) (update :value sanitize-map)))]
(sm/humanize-explain (sanitize-explain exp) opts))))
(contains? data ::sm/explain)
(sm/humanize-explain (::sm/explain data) opts)))
#?(:clj
(defn format-throwable

View File

@@ -526,25 +526,20 @@
ids))
(defn clean-loops
"Clean a list of ids from circular references. Optimized fast-path for single selections."
"Clean a list of ids from circular references."
[objects ids]
(if (<= (count ids) 1)
;; For single selection, there can't be circularity; return as ordered-set.
(into (d/ordered-set) ids)
(let [ids-set (if (set? ids) ids (set ids))
parent-selected?
(fn [id]
;; Stop early as soon as we find any selected parent
(let [parents (get-parent-ids objects id)]
(some #(contains? ids-set %) parents)))
(let [parent-selected?
(fn [id]
(let [parents (get-parent-ids objects id)]
(some ids parents)))
add-element
(fn [result id]
(cond-> result
(not (parent-selected? id))
(conj id)))]
add-element
(fn [result id]
(cond-> result
(not (parent-selected? id))
(conj id)))]
(reduce add-element (d/ordered-set) ids))))
(reduce add-element (d/ordered-set) ids)))
(defn- indexed-shapes
"Retrieves a vector with the indexes for each element in the layer

View File

@@ -62,7 +62,6 @@
#{:audit-log
:audit-log-archive
:audit-log-gc
:audit-log-logger
:auto-file-snapshot
;; enables the `/api/doc` endpoint that lists all the rpc methods available.
:backend-api-doc
@@ -135,8 +134,6 @@
:subscriptions
:subscriptions-old
:inspect-styles
;; Enable performance logs in devconsole (disabled by default)
:perf-logs
;; Security layer middleware that filters request by fetch
;; metadata headers
@@ -172,7 +169,6 @@
:enable-component-thumbnails
:enable-render-wasm-dpr
:enable-token-color
:enable-token-shadow
:enable-inspect-styles
:enable-feature-fdata-objects-map])

View File

@@ -75,25 +75,20 @@
#?(:cljs
(defn ->clj
[o & {:keys [key-fn val-fn recursive] :or {key-fn read-kebab-key val-fn identity recursive true}}]
[o & {:keys [key-fn val-fn] :or {key-fn read-kebab-key val-fn identity}}]
(let [f (fn this-fn [x]
(let [x (val-fn x)]
(cond
(array? x)
(persistent!
(.reduce ^js/Array x
#(conj! %1 (if recursive
(this-fn %2)
%2))
#(conj! %1 (this-fn %2))
(transient [])))
(identical? (type x) js/Object)
(persistent!
(.reduce ^js/Array (js-keys x)
#(assoc! %1 (key-fn %2)
(if recursive
(this-fn (unchecked-get x %2))
(unchecked-get x %2)))
#(assoc! %1 (key-fn %2) (this-fn (unchecked-get x %2)))
(transient {})))
:else

View File

@@ -43,6 +43,8 @@
"
#?(:cljs (:require-macros [app.common.logging :as l]))
(:require
#?(:clj [clojure.edn :as edn]
:cljs [cljs.reader :as edn])
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.pprint :as pp]

View File

@@ -21,6 +21,7 @@
[app.common.logic.shapes :as cls]
[app.common.logic.variant-properties :as clvp]
[app.common.path-names :as cpn]
[app.common.spec :as us]
[app.common.types.component :as ctk]
[app.common.types.components-list :as ctkl]
[app.common.types.container :as ctn]
@@ -38,7 +39,8 @@
[app.common.types.typography :as cty]
[app.common.types.variant :as ctv]
[app.common.uuid :as uuid]
[clojure.set :as set]))
[clojure.set :as set]
[clojure.spec.alpha :as s]))
;; Change this to :info :debug or :trace to debug this module, or :warn to reset to default
(log/set-level! :warn)
@@ -475,10 +477,10 @@
If an asset id is given, only shapes linked to this particular asset will
be synchronized."
[changes file-id asset-type asset-id library-id libraries current-file-id]
(assert (contains? #{:colors :components :typographies} asset-type))
(assert (or (nil? asset-id) (uuid? asset-id)))
(assert (uuid? file-id))
(assert (uuid? library-id))
(s/assert #{:colors :components :typographies} asset-type)
(s/assert (s/nilable ::us/uuid) asset-id)
(s/assert ::us/uuid file-id)
(s/assert ::us/uuid library-id)
(container-log :info asset-id
:msg "Sync file with library"
@@ -512,10 +514,10 @@
If an asset id is given, only shapes linked to this particular asset will
be synchronized."
[changes file-id asset-type asset-id library-id libraries current-file-id]
(assert (contains? #{:colors :components :typographies} asset-type))
(assert (or (nil? asset-id) (uuid? asset-id)))
(assert (uuid? file-id))
(assert (uuid? library-id))
(s/assert #{:colors :components :typographies} asset-type)
(s/assert (s/nilable ::us/uuid) asset-id)
(s/assert ::us/uuid file-id)
(s/assert ::us/uuid library-id)
(container-log :info asset-id
:msg "Sync local components with library"
@@ -2491,13 +2493,11 @@
(ctk/get-swap-slot))
(constantly false))
;; In the cases where the swapped shape was the first element of the masked group it would make the group to loose the
;; mask property as part of the sanitization check on generate-delete-shapes, passing "ignore-mask" to prevent this
[all-parents changes]
(-> changes
(cls/generate-delete-shapes
file page objects (d/ordered-set (:id shape))
{:allow-altering-copies true :ignore-children-fn ignore-swapped-fn :ignore-mask true :ignore-flows-for #{(:id shape)}}))
{:allow-altering-copies true :ignore-children-fn ignore-swapped-fn}))
[new-shape changes]
(-> changes
(generate-new-shape-for-swap shape file page libraries id-new-component index target-cell keep-props-values))]
@@ -2867,15 +2867,13 @@
ids-map (into {} (map #(vector % (uuid/next))) all-ids)
;; If there is an alt-duplication we change to root
;; For variants so the copy is made as a child of root
;; If there is an alt-duplication of a variant, change its parent to root
;; so the copy is made as a child of root
;; This is because inside a variant-container can't be a copy
;; For other shape this way the layout won't be changed when duplicated
;; and if you move outside the layout will not change
shapes (map (fn [shape]
(cond-> shape
alt-duplication?
(assoc :parent-id uuid/zero :frame-id uuid/zero)))
(if (and alt-duplication? (ctk/is-variant? shape))
(assoc shape :parent-id uuid/zero :frame-id nil)
shape))
shapes)

View File

@@ -123,12 +123,8 @@
;; ignore-children-fn is used to ignore some descendants
;; on the deletion process. It should receive a shape and
;; return a boolean
ignore-children-fn
ignore-mask
ignore-flows-for]
:or {ignore-children-fn (constantly false)
ignore-mask false
ignore-flows-for #{}}}]
ignore-children-fn]
:or {ignore-children-fn (constantly false)}}]
(let [objects (pcb/get-objects changes)
data (pcb/get-library-data changes)
page-id (pcb/get-page-id changes)
@@ -166,20 +162,18 @@
lookup (d/getf objects)
groups-to-unmask
(when-not ignore-mask
(reduce (fn [group-ids id]
;; When the shape to delete is the mask of a masked group,
;; the mask condition must be removed, and it must be
;; converted to a normal group.
(let [obj (lookup id)
parent (lookup (:parent-id obj))]
(if (and (:masked-group parent)
(= id (first (:shapes parent))))
(conj group-ids (:id parent))
group-ids)))
#{}
ids-to-delete)
[])
(reduce (fn [group-ids id]
;; When the shape to delete is the mask of a masked group,
;; the mask condition must be removed, and it must be
;; converted to a normal group.
(let [obj (lookup id)
parent (lookup (:parent-id obj))]
(if (and (:masked-group parent)
(= id (first (:shapes parent))))
(conj group-ids (:id parent))
group-ids)))
#{}
ids-to-delete)
interacting-shapes
(filter (fn [shape]
@@ -196,8 +190,7 @@
(->> (:flows page)
(reduce
(fn [changes [id flow]]
(if (and (id-to-delete? (:starting-frame flow))
(not (contains? ignore-flows-for (:starting-frame flow))))
(if (id-to-delete? (:starting-frame flow))
(-> changes
(pcb/with-page page)
(pcb/set-flow id nil))

View File

@@ -132,94 +132,3 @@ Some naming conventions:
(if-let [last-period (str/last-index-of s ".")]
[(subs s 0 (inc last-period)) (subs s (inc last-period))]
[s ""]))
;; Tree building functions --------------------------------------------------
"Build tree structure from flat list of paths"
"`build-tree-root` is the main function to build the tree."
"Receives a list of segments with 'name' properties representing paths,
and a separator string."
"E.g segments = [{... :name 'one/two/three'} {... :name 'one/two/four'} {... :name 'one/five'}]"
"Transforms into a tree structure like:
[{:name 'one'
:path 'one'
:depth 0
:leaf nil
:children-fn (fn [] [{:name 'two'
:path 'one.two'
:depth 1
:leaf nil
:children-fn (fn [] [{... :name 'three'} {... :name 'four'}])}
{:name 'five'
:path 'one.five'
:depth 1
:leaf {... :name 'five'}
...}])}]"
(defn- sort-by-children
"Sorts segments so that those with children come first."
[segments separator]
(sort-by (fn [segment]
(let [path (split-path (:name segment) :separator separator)
path-length (count path)]
(if (= path-length 1)
1
0)))
segments))
(defn- group-by-first-segment
"Groups segments by their first path segment and update segment name."
[segments separator]
(reduce (fn [acc segment]
(let [[first-segment & remaining-segments] (split-path (:name segment) :separator separator)
rest-path (when (seq remaining-segments) (join-path remaining-segments :separator separator :with-spaces? false))]
(update acc first-segment (fnil conj [])
(if rest-path
(assoc segment :name rest-path)
segment))))
{}
segments))
(defn- sort-and-group-segments
"Sorts elements and groups them by their first path segment."
[segments separator]
(let [sorted (sort-by-children segments separator)
grouped (group-by-first-segment sorted separator)]
grouped))
(defn- build-tree-node
"Builds a single tree node with lazy children."
[segment-name remaining-segments separator parent-path depth]
(let [current-path (if parent-path
(str parent-path "." segment-name)
segment-name)
is-leaf? (and (seq remaining-segments)
(every? (fn [segment]
(let [remaining-segment-name (first (split-path (:name segment) :separator separator))]
(= segment-name remaining-segment-name)))
remaining-segments))
leaf-segment (when is-leaf? (first remaining-segments))
node {:name segment-name
:path current-path
:depth depth
:leaf leaf-segment
:children-fn (when-not is-leaf?
(fn []
(let [grouped-elements (sort-and-group-segments remaining-segments separator)]
(mapv (fn [[child-segment-name remaining-child-segments]]
(build-tree-node child-segment-name remaining-child-segments separator current-path (inc depth)))
grouped-elements))))}]
node))
(defn build-tree-root
"Builds the root level of the tree."
[segments separator]
(let [grouped-elements (sort-and-group-segments segments separator)]
(mapv (fn [[segment-name remaining-segments]]
(build-tree-node segment-name remaining-segments separator nil 0))
grouped-elements)))

View File

@@ -8,8 +8,6 @@
(:refer-clojure :exclude [deref merge parse-uuid parse-long parse-double parse-boolean type keys])
#?(:cljs (:require-macros [app.common.schema :refer [ignoring]]))
(:require
#?(:clj [malli.dev.pretty :as mdp])
#?(:clj [malli.dev.virhe :as v])
[app.common.data :as d]
[app.common.math :as mth]
[app.common.pprint :as pp]
@@ -21,6 +19,8 @@
[clojure.core :as c]
[cuerdas.core :as str]
[malli.core :as m]
[malli.dev.pretty :as mdp]
[malli.dev.virhe :as v]
[malli.error :as me]
[malli.generator :as mg]
[malli.registry :as mr]
@@ -245,30 +245,27 @@
:level (d/nilv level 8)
:length (d/nilv length 12)})))))
#?(:clj
(defmethod v/-format ::schemaless-explain
[_ explanation printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer)]}))
(defmethod v/-format ::schemaless-explain
[_ explanation printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer)]})
#?(:clj
(defmethod v/-format ::explain
[_ {:keys [schema] :as explanation} printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer) :break :break
(v/-block "Schema" (v/-visit schema printer) printer)]}))
(defmethod v/-format ::explain
[_ {:keys [schema] :as explanation} printer]
{:body [:group
(v/-block "Value" (v/-visit (me/error-value explanation printer) printer) printer) :break :break
(v/-block "Errors" (v/-visit (me/humanize (me/with-spell-checking explanation)) printer) printer) :break :break
(v/-block "Schema" (v/-visit schema printer) printer)]})
#?(:clj
(defn pretty-explain
"A helper that allows print a console-friendly output for the explain;
should not be used for other purposes"
[explain & {:keys [variant message]
:or {variant ::explain
message "Validation Error"}}]
(let [explain (fn [] (me/with-error-messages explain))]
((mdp/prettifier variant message explain default-options)))))
(defn pretty-explain
"A helper that allows print a console-friendly output for the
explain; should not be used for other purposes"
[explain & {:keys [variant message]
:or {variant ::explain
message "Validation Error"}}]
(let [explain (fn [] (me/with-error-messages explain))]
((mdp/prettifier variant message explain default-options))))
(defmacro ignoring
[expr]
@@ -315,13 +312,6 @@
::explain explain}))))
value))))
(defn coercer
[schema & {:as opts}]
(let [decode-fn (lazy-decoder schema json-transformer)
check-fn (check-fn schema opts)]
(fn [data]
(-> data decode-fn check-fn))))
(defn check
"A helper intended to be used on assertions for validate/check the
schema over provided data. Raises an assertion exception.
@@ -1016,9 +1006,6 @@
(def valid-safe-number?
(lazy-validator ::safe-number))
(def valid-safe-int?
(lazy-validator ::safe-int))
(def valid-text?
(validator ::text))

View File

@@ -64,33 +64,8 @@
java.time.temporal.TemporalAmount
java.time.temporal.TemporalUnit)))
(declare inst)
#?(:clj (def ^:dynamic *clock* (Clock/systemDefaultZone)))
#?(:clj
(defn clock?
[o]
(instance? Clock o)))
#?(:clj
(defn get-system-clock
[]
(Clock/systemDefaultZone)))
#?(:clj
(defn offset-clock
[offset]
(Clock/offset ^Clock (Clock/systemDefaultZone) ^Duration offset)))
#?(:clj
(defn fixed-clock
[instant]
(Clock/fixed ^Instant (inst instant)
^ZoneId (ZoneId/of "Z"))))
(defn now
[]
#?(:clj (Instant/now *clock*)
@@ -365,7 +340,7 @@
(dfn-diff t2 t1)))
#?(:cljs
(defn set-default-locale
(defn set-default-locale!
[locale]
(when-let [locale (unchecked-get locales locale)]
(dfn-set-default-options #js {:locale locale}))))

View File

@@ -140,8 +140,7 @@
:layout-item-min-w
:layout-item-absolute
:layout-item-z-index
:layout-item-align-self
:interactions})
:layout-item-align-self})
(defn component-attr?
"Check if some attribute is one that is involved in component syncrhonization.

View File

@@ -269,8 +269,8 @@
"Remove flex children properties except the fit-content for flex layouts. These are properties
that we don't have to propagate to copies but will be respected when swapping components"
[shape]
(let [layout-item-h-sizing (when (and (ctl/any-layout? shape) (ctl/auto-width? shape)) :auto)
layout-item-v-sizing (when (and (ctl/any-layout? shape) (ctl/auto-height? shape)) :auto)]
(let [layout-item-h-sizing (when (and (ctl/flex-layout? shape) (ctl/auto-width? shape)) :auto)
layout-item-v-sizing (when (and (ctl/flex-layout? shape) (ctl/auto-height? shape)) :auto)]
(-> shape
(d/without-keys ctk/swap-keep-attrs)
(cond-> (some? layout-item-h-sizing)

View File

@@ -112,10 +112,8 @@
(:c2y params) (update-in [index :params :c2y] + (:c2y params)))
content))]
(if (some? modifiers)
(impl/path-data
(reduce apply-to-index (vec content) modifiers))
content)))
(impl/path-data
(reduce apply-to-index (vec content) modifiers))))
(defn transform-content
"Applies a transformation matrix over content and returns a new

View File

@@ -1,29 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.common.types.project
(:require
[app.common.schema :as sm]
[app.common.time :as cm]))
(def schema:project
[:map {:title "Profile"}
[:id ::sm/uuid]
[:created-at {:optional true} ::cm/inst]
[:modified-at {:optional true} ::cm/inst]
[:name :string]
[:is-default {:optional true} ::sm/boolean]
[:is-pinned {:optional true} ::sm/boolean]
[:count {:optional true} ::sm/int]
[:total-count {:optional true} ::sm/int]
[:team-id ::sm/uuid]])
(def valid-project?
(sm/lazy-validator schema:project))
(def check-project
(sm/check-fn schema:project))

View File

@@ -47,18 +47,6 @@
self-reference? (get token-references token-name)]
self-reference?))
(defn references-token?
"Recursively check if a value references the token name. Handles strings, maps, and sequences."
[value token-name]
(cond
(string? value)
(boolean (some #(= % token-name) (find-token-value-references value)))
(map? value)
(some true? (map #(references-token? % token-name) (vals value)))
(sequential? value)
(some true? (map #(references-token? % token-name) value))
:else false))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SCHEMA
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -71,7 +59,6 @@
:dimensions "dimension"
:font-family "fontFamilies"
:font-size "fontSizes"
:font-weight "fontWeights"
:letter-spacing "letterSpacing"
:number "number"
:opacity "opacity"
@@ -83,6 +70,7 @@
:stroke-width "borderWidth"
:text-case "textCase"
:text-decoration "textDecoration"
:font-weight "fontWeights"
:typography "typography"})
(def dtcg-token-type->token-type
@@ -474,8 +462,8 @@
:height #{:sizing :dimensions}
:max-width #{:sizing :dimensions}
:max-height #{:sizing :dimensions}
:x #{:dimensions}
:y #{:dimensions}
:x #{:spacing :dimensions}
:y #{:spacing :dimensions}
:rotation #{:number :rotation}
:border-radius #{:border-radius :dimensions}
:row-gap #{:spacing :dimensions}
@@ -487,8 +475,6 @@
:vertical-margin #{:spacing :dimensions}
:sided-margins #{:spacing :dimensions}
:line-height #{:line-height :number}
:opacity #{:opacity}
:stroke-width #{:stroke-width :dimensions}
:font-size #{:font-size}
:letter-spacing #{:letter-spacing}
:fill #{:color}
@@ -572,18 +558,3 @@
"Predicate if a shadow composite token is a reference value - a string pointing to another reference token."
[token-value]
(string? token-value))
(defn update-token-value-references
"Recursively update token references within a token value, supporting complex token values (maps, sequences, strings)."
[value old-name new-name]
(cond
(string? value)
(str/replace value
(re-pattern (str "\\{" (str/replace old-name "." "\\.") "\\}"))
(str "{" new-name "}"))
(map? value)
(d/update-vals value #(update-token-value-references % old-name new-name))
(sequential? value)
(mapv #(update-token-value-references % old-name new-name) value)
:else
value))

View File

@@ -909,8 +909,7 @@ Will return a value that matches this schema:
`:all` All of the nested sets are active
`:partial` Mixed active state of nested sets")
(get-tokens-in-active-sets [_] "set of set names that are active in the the active themes")
(get-all-tokens [_] "all tokens in the lib, as a sequence")
(get-all-tokens-map [_] "all tokens in the lib, as a map name -> token")
(get-all-tokens [_] "all tokens in the lib")
(get-tokens [_ set-id] "return a map of tokens in the set, indexed by token-name"))
(declare parse-multi-set-dtcg-json)
@@ -1307,10 +1306,6 @@ Will return a value that matches this schema:
tokens))
(get-all-tokens [this]
(mapcat #(vals (get-tokens- %))
(get-sets this)))
(get-all-tokens-map [this]
(reduce
(fn [tokens' set]
(into tokens' (map (fn [x] [(:name x) x]) (vals (get-tokens- set)))))
@@ -1550,7 +1545,7 @@ Will return a value that matches this schema:
(and (not (contains? decoded-json "$metadata"))
(not (contains? decoded-json "$themes"))))
(defn convert-dtcg-font-family
(defn- convert-dtcg-font-family
"Convert font-family token value from DTCG format to internal format.
- If value is a string, split it into a collection of font families
- If value is already an array, keep it as is
@@ -1561,7 +1556,7 @@ Will return a value that matches this schema:
(sequential? value) value
:else value))
(defn convert-dtcg-typography-composite
(defn- convert-dtcg-typography-composite
"Convert typography token value keys from DTCG format to internal format."
[value]
(if (map? value)
@@ -1573,17 +1568,17 @@ Will return a value that matches this schema:
;; Reference value
value))
(defn convert-dtcg-shadow-composite
(defn- convert-dtcg-shadow-composite
"Convert shadow token value from DTCG format to internal format."
[value]
(let [process-shadow (fn [shadow]
(if (map? shadow)
(let [legacy-shadow-type (get "type" shadow)]
(-> shadow
(set/rename-keys {"x" :offset-x
"offsetX" :offset-x
"y" :offset-y
"offsetY" :offset-y
(set/rename-keys {"x" :offsetX
"offsetX" :offsetX
"y" :offsetY
"offsetY" :offsetY
"blur" :blur
"spread" :spread
"color" :color
@@ -1594,7 +1589,7 @@ Will return a value that matches this schema:
(= "false" %) false
(= legacy-shadow-type "innerShadow") true
:else false))
(select-keys [:offset-x :offset-y :blur :spread :color :inset])))
(select-keys [:offsetX :offsetY :blur :spread :color :inset])))
shadow))]
(cond
;; Reference value - keep as string
@@ -1865,8 +1860,8 @@ Will return a value that matches this schema:
(mapv (fn [shadow]
(if (map? shadow)
(-> shadow
(set/rename-keys {:offset-x "offsetX"
:offset-y "offsetY"
(set/rename-keys {:offsetX "offsetX"
:offsetY "offsetY"
:blur "blur"
:spread "spread"
:color "color"

View File

@@ -14,8 +14,7 @@
(defn parse
[data]
(cond
(or (str/starts-with? data "%")
(= data "develop"))
(str/starts-with? data "%")
{:full "develop"
:branch "develop"
:base "0.0.0"

View File

@@ -1897,15 +1897,15 @@
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-single")]
(t/is (some? token))
(t/is (= :shadow (:type token)))
(t/is (= [{:offset-x "0", :offset-y "2px", :blur "4px", :spread "0", :color "#000", :inset false}]
(t/is (= [{:offsetX "0", :offsetY "2px", :blur "4px", :spread "0", :color "#000", :inset false}]
(:value token)))))
(t/testing "multiple shadow token"
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-multiple")]
(t/is (some? token))
(t/is (= :shadow (:type token)))
(t/is (= [{:offset-x "0", :offset-y "2px", :blur "4px", :spread "0", :color "#000", :inset true}
{:offset-x "0", :offset-y "8px", :blur "16px", :spread "0", :color "#000", :inset true}]
(t/is (= [{:offsetX "0", :offsetY "2px", :blur "4px", :spread "0", :color "#000", :inset true}
{:offsetX "0", :offsetY "8px", :blur "16px", :spread "0", :color "#000", :inset true}]
(:value token)))))
(t/testing "shadow token with reference"
@@ -1918,7 +1918,7 @@
(let [token (ctob/get-token-by-name lib "shadow-test" "test.shadow-with-type")]
(t/is (some? token))
(t/is (= :shadow (:type token)))
(t/is (= [{:offset-x "0", :offset-y "4px", :blur "8px", :spread "0", :color "rgba(0,0,0,0.2)", :inset false}]
(t/is (= [{:offsetX "0", :offsetY "4px", :blur "8px", :spread "0", :color "rgba(0,0,0,0.2)", :inset false}]
(:value token)))))
(t/testing "shadow token with description"
@@ -1937,14 +1937,14 @@
(ctob/make-token
{:name "shadow.single"
:type :shadow
:value [{:offset-x "0" :offset-y "2px" :blur "4px" :spread "0" :color "#0000001A"}]
:value [{:offsetX "0" :offsetY "2px" :blur "4px" :spread "0" :color "#0000001A"}]
:description "A single shadow"})
"shadow.multiple"
(ctob/make-token
{:name "shadow.multiple"
:type :shadow
:value [{:offset-x "0" :offset-y "2px" :blur "4px" :spread "0" :color "#0000001A"}
{:offset-x "0" :offset-y "8px" :blur "16px" :spread "0" :color "#0000001A"}]})
:value [{:offsetX "0" :offsetY "2px" :blur "4px" :spread "0" :color "#0000001A"}
{:offsetX "0" :offsetY "8px" :blur "16px" :spread "0" :color "#0000001A"}]})
"shadow.ref"
(ctob/make-token
{:name "shadow.ref"
@@ -1991,7 +1991,7 @@
(ctob/make-token
{:name "shadow.test"
:type :shadow
:value [{:offset-x "1" :offset-y "1" :blur "1" :spread "1" :color "red" :inset true}]
:value [{:offsetX "1" :offsetY "1" :blur "1" :spread "1" :color "red" :inset true}]
:description "Round trip test"})
"shadow.ref"
(ctob/make-token

1291
common/yarn.lock Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -25,6 +25,48 @@ RUN set -ex; \
binutils \
build-essential autoconf libtool pkg-config
################################################################################
## IMAGE MAGICK
################################################################################
FROM base AS build-imagemagick
ENV IMAGEMAGICK_VERSION=7.1.1-47 \
DEBIAN_FRONTEND=noninteractive
RUN set -ex; \
apt-get -qq update; \
apt-get -qq upgrade; \
apt-get -qqy --no-install-recommends install \
libltdl-dev \
libpng-dev \
libjpeg-dev \
libtiff-dev \
libwebp-dev \
libopenexr-dev \
libfftw3-dev \
libzip-dev \
liblcms2-dev \
liblzma-dev \
libzstd-dev \
libheif-dev \
librsvg2-dev \
; \
rm -rf /var/lib/apt/lists/*
RUN set -eux; \
curl -LfsSo /tmp/magick.tar.gz https://github.com/ImageMagick/ImageMagick/archive/refs/tags/${IMAGEMAGICK_VERSION}.tar.gz; \
mkdir -p /tmp/magick; \
cd /tmp/magick; \
tar -xf /tmp/magick.tar.gz --strip-components=1; \
./configure --prefix=/opt/imagick; \
make -j 2; \
make install; \
rm -rf /opt/imagick/lib/libMagick++*; \
rm -rf /opt/imagick/include; \
rm -rf /opt/imagick/share;
################################################################################
## NODE SETUP
################################################################################
@@ -375,7 +417,7 @@ ENV LANG='C.UTF-8' \
RUSTUP_HOME="/opt/rustup" \
PATH="/opt/jdk/bin:/opt/utils/bin:/opt/clojure/bin:/opt/node/bin:/opt/imagick/bin:/opt/cargo/bin:$PATH"
COPY --from=penpotapp/imagemagick:7.1.2-0 /opt/imagick /opt/imagick
COPY --from=build-imagemagick /opt/imagick /opt/imagick
COPY --from=setup-jvm /opt/jdk /opt/jdk
COPY --from=setup-jvm /opt/clojure /opt/clojure
COPY --from=setup-node /opt/node /opt/node

View File

@@ -41,10 +41,7 @@ services:
- 6062:6062
- 6063:6063
- 6064:6064
- 9000:9000
- 9001:9001
- 9090:9090
- 9091:9091
environment:
- EXTERNAL_UID=${CURRENT_USER_ID}
@@ -72,11 +69,6 @@ services:
- PENPOT_LDAP_ATTRS_FULLNAME=cn
- PENPOT_LDAP_ATTRS_PHOTO=jpegPhoto
networks:
default:
aliases:
- main
minio:
image: "minio/minio:RELEASE.2025-04-03T14-56-28Z"
command: minio server /mnt/data --console-address ":9001"
@@ -88,6 +80,10 @@ services:
- MINIO_ROOT_USER=minioadmin
- MINIO_ROOT_PASSWORD=minioadmin
ports:
- 9000:9000
- 9001:9001
networks:
default:
aliases:

View File

@@ -10,7 +10,3 @@ localhost:3449 {
http://localhost:3450 {
reverse_proxy localhost:4449
}
http://penpot-devenv-main:3450 {
reverse_proxy localhost:4449
}

View File

@@ -38,11 +38,11 @@ http {
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_comp_level 3;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml application/wasm;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml;
map $http_upgrade $connection_upgrade {
default upgrade;
@@ -145,8 +145,8 @@ http {
proxy_pass http://127.0.0.1:3000/;
}
location /wasm-playground {
alias /home/penpot/penpot/frontend/resources/public/wasm-playground/;
location /playground {
alias /home/penpot/penpot/experiments/;
add_header Cache-Control "no-cache, max-age=0";
autoindex on;
}
@@ -223,7 +223,7 @@ http {
add_header X-Cache-Status $upstream_cache_status;
}
location ~* \.(jpg|png|svg|ttf|woff|woff2|gif)$ {
location ~* \.(jpg|png|svg|ttf|woff|woff2)$ {
add_header Cache-Control "public, max-age=604800" always; # 7 days
}

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
sudo chown penpot:users /home/penpot
cd ~;
source ~/.bashrc
set -e;
echo "[start-tmux.sh] Installing node dependencies"
pushd ~/penpot/exporter/
yarn install
popd
tmux -2 new-session -d -s penpot
tmux rename-window -t penpot:0 'exporter'
tmux select-window -t penpot:0
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot 'rm -f target/app.js*' enter C-l
tmux send-keys -t penpot 'clojure -M:dev:shadow-cljs watch main' enter
tmux split-window -v
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot './scripts/wait-and-start.sh' enter
tmux new-window -t penpot:1 -n 'backend'
tmux select-window -t penpot:1
tmux send-keys -t penpot 'cd penpot/backend' enter C-l
tmux send-keys -t penpot './scripts/start-dev' enter
tmux -2 attach-session -t penpot

View File

@@ -8,10 +8,14 @@ source ~/.bashrc
echo "[start-tmux.sh] Installing node dependencies"
pushd ~/penpot/frontend/
./scripts/setup;
corepack install;
yarn install;
yarn playwright install chromium
popd
pushd ~/penpot/exporter/
./scripts/setup;
corepack install;
yarn install
yarn playwright install chromium
popd
tmux -2 new-session -d -s penpot
@@ -19,25 +23,30 @@ tmux -2 new-session -d -s penpot
tmux rename-window -t penpot:0 'frontend watch'
tmux select-window -t penpot:0
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot './scripts/watch app' enter
tmux send-keys -t penpot 'yarn run watch' enter
tmux new-window -t penpot:1 -n 'frontend storybook'
tmux new-window -t penpot:1 -n 'frontend shadow'
tmux select-window -t penpot:1
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot './scripts/watch storybook' enter
tmux send-keys -t penpot 'yarn run watch:app' enter
tmux new-window -t penpot:2 -n 'exporter'
tmux new-window -t penpot:2 -n 'frontend storybook'
tmux select-window -t penpot:2
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot 'yarn run watch:storybook' enter
tmux new-window -t penpot:3 -n 'exporter'
tmux select-window -t penpot:3
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot 'rm -f target/app.js*' enter C-l
tmux send-keys -t penpot './scripts/watch' enter
tmux send-keys -t penpot 'yarn run watch' enter
tmux split-window -v
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot './scripts/wait-and-start.sh' enter
tmux new-window -t penpot:3 -n 'backend'
tmux select-window -t penpot:3
tmux new-window -t penpot:4 -n 'backend'
tmux select-window -t penpot:4
tmux send-keys -t penpot 'cd penpot/backend' enter C-l
tmux send-keys -t penpot './scripts/start-dev' enter

View File

@@ -112,6 +112,10 @@ COPY --from=penpotapp/imagemagick:7.1.2-0 /opt/imagick /opt/imagick
WORKDIR /opt/penpot/exporter
USER penpot:penpot
RUN ./setup
RUN set -ex; \
corepack install; \
yarn install; \
yarn run playwright install chromium; \
rm -rf /opt/penpot/.yarn
CMD ["node", "app.js"]

View File

@@ -7,10 +7,8 @@ RUN set -ex; \
useradd -U -M -u 1001 -s /bin/false -d /opt/penpot penpot; \
mkdir -p /opt/data/assets; \
chown -R penpot:penpot /opt/data; \
mkdir -p /etc/nginx/overrides/main.d/; \
mkdir -p /etc/nginx/overrides/http.d/; \
mkdir -p /etc/nginx/overrides/server.d/; \
mkdir -p /etc/nginx/overrides/assets.d/; \
mkdir -p /etc/nginx/overrides/location.d/;
ARG BUNDLE_PATH="./bundle-frontend/"

View File

@@ -130,6 +130,12 @@ services:
environment:
<< : [*penpot-flags, *penpot-public-uri, *penpot-http-body-size, *penpot-secret-key]
## The PREPL host. Mainly used for external programatic access to penpot backend
## (example: admin). By default it will listen on `localhost` but if you are going to use
## the `admin`, you will need to uncomment this and set the host to `0.0.0.0`.
# PENPOT_PREPL_HOST: 0.0.0.0
## Database connection parameters. Don't touch them unless you are using custom
## postgresql connection parameters.
@@ -145,16 +151,16 @@ services:
## Default configuration for assets storage: using filesystem based with all files
## stored in a docker volume.
PENPOT_OBJECTS_STORAGE_BACKEND: fs
PENPOT_OBJECTS_STORAGE_FS_DIRECTORY: /opt/data/assets
PENPOT_ASSETS_STORAGE_BACKEND: assets-fs
PENPOT_STORAGE_ASSETS_FS_DIRECTORY: /opt/data/assets
## Also can be configured to to use a S3 compatible storage.
# AWS_ACCESS_KEY_ID: <KEY_ID>
# AWS_SECRET_ACCESS_KEY: <ACCESS_KEY>
# PENPOT_OBJECTS_STORAGE_BACKEND: s3
# PENPOT_OBJECTS_STORAGE_S3_ENDPOINT: <ENDPOINT>
# PENPOT_OBJECTS_STORAGE_S3_BUCKET: <BUKET_NAME>
# PENPOT_ASSETS_STORAGE_BACKEND: assets-s3
# PENPOT_STORAGE_ASSETS_S3_ENDPOINT: <ENDPOINT>
# PENPOT_STORAGE_ASSETS_S3_BUCKET: <BUKET_NAME>
## Telemetry. When enabled, a periodical process will send anonymous data about this
## instance. Telemetry data will enable us to learn how the application is used,

View File

@@ -42,11 +42,11 @@ http {
gzip_vary on;
gzip_proxied any;
gzip_static on;
gzip_comp_level 6;
gzip_comp_level 4;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml application/wasm;
gzip_types text/plain text/css text/javascript application/javascript application/json application/transit+json image/svg+xml;
proxy_buffer_size 16k;
proxy_busy_buffers_size 24k; # essentially, proxy_buffer_size + 2 small buffers of 4k
@@ -110,8 +110,6 @@ http {
recursive_error_pages on;
proxy_intercept_errors on;
error_page 301 302 307 = @handle_redirect;
include /etc/nginx/overrides/assets.d/*.conf;
}
location /internal/assets {
@@ -144,18 +142,25 @@ http {
location / {
include /etc/nginx/overrides/location.d/*.conf;
location ~* \.(js|css|jpg|png|svg|gif|ttf|woff|woff2|wasm|map)$ {
add_header Cache-Control "public, max-age=604800" always; # 7 days
location ~ ^/js/config.js$ {
add_header Cache-Control "no-store, no-cache, max-age=0" always;
}
location ~* \.(js|css|jpg|svg|png|mjs|map)$ {
add_header Cache-Control "max-age=604800" always; # 7 days
}
location ~ ^/(/|css|fonts|images|js|wasm|mjs|map) {
}
location ~ ^/[^/]+/(.*)$ {
return 301 " /404";
}
add_header X-Frame-Options SAMEORIGIN always;
add_header Last-Modified $date_gmt;
add_header Cache-Control "no-store, no-cache, max-age=0" always;
if_modified_since off;
try_files $uri /index.html$is_args$args /index.html =404;
}
}
}

View File

@@ -10,15 +10,16 @@ To view this site locally, first set up the environment:
# only if necessary
nvm install
nvm use
# only if necessary
corepack enable
pnpm install
yarn install
```
And launch a development server:
```sh
pnpm start
yarn start
```
You can then point a browser to [http://localhost:8080](http://localhost:8080).

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 161 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.0 KiB

Some files were not shown because too many files have changed in this diff Show More