Compare commits

..

1 Commits

Author SHA1 Message Date
Alejandro Alonso
222481fa0d 🎉 Basic graph wasm support 2025-12-22 06:51:56 +01:00
3448 changed files with 1313224 additions and 106101 deletions

View File

@@ -1,38 +0,0 @@
---
name: New Render Bug Report
about: Create a report about the bugs you have found in the new render
title: ''
labels: new render
assignees: claragvinola
---
**Describe the bug**
A clear and concise description of what the bug is.
**Steps to Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots or screen recordings**
If applicable, add screenshots or screen recording to help illustrate your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View File

@@ -40,7 +40,7 @@ on:
jobs:
build-bundle:
name: Build and Upload Penpot Bundle
runs-on: penpot-runner-01
runs-on: ubuntu-24.04
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View File

@@ -7,14 +7,9 @@ jobs:
build-and-push:
name: Build and push DevEnv Docker image
environment: release-admins
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
steps:
- name: Set common environment variables
run: |
# Each job execution will use its own docker configuration.
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4

View File

@@ -19,14 +19,9 @@ on:
jobs:
build-and-push:
name: Build and Push Penpot Docker Images
runs-on: penpot-runner-02
runs-on: ubuntu-24.04-arm
steps:
- name: Set common environment variables
run: |
# Each job execution will use its own docker configuration.
echo "DOCKER_CONFIG=${{ runner.temp }}/.docker-${{ github.run_id }}-${{ github.job }}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4
with:
@@ -71,15 +66,6 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
# To avoid the “429 Too Many Requests” error when downloading
# images from DockerHub for unregistered users.
# https://docs.docker.com/docker-hub/usage/
- name: Login to DockerHub Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.PUB_DOCKER_USERNAME }}
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v5

View File

@@ -1,14 +0,0 @@
name: _STAGING RENDER
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "staging-render"
build_wasm: "yes"
build_storybook: "yes"

View File

@@ -33,7 +33,7 @@ jobs:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
🐳 *[PENPOT] Docker image available: ${{ github.ref_name }}*
🐳 *[PENPOT] Docker image available.*
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -26,7 +26,7 @@ jobs:
- name: Check Commit Type
uses: gsactions/commit-message-checker@v2
with:
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert|Reapply).+[^.])$'
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert).+[^.])$'
flags: 'gm'
error: 'Commit should match CONTRIBUTING.md guideline'
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request

View File

@@ -1,113 +0,0 @@
name: Plugins/api-doc deployer
on:
push:
branches:
- develop
- staging
- main
paths:
- "plugins/libs/plugin-types/index.d.ts"
- "plugins/libs/plugin-types/REAME.md"
- "plugins/tools/typedoc.css"
- "plugins/CHANGELOG.md"
- "plugins/wrangler-penpot-plugins-api-doc.toml"
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch'
type: choice
required: true
default: 'develop'
options:
- develop
- staging
- main
permissions:
contents: read
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ steps.vars.outputs.gh_ref }}
# START: Setup Node and PNPM enabling cache
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Enable PNPM
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
- name: Get pnpm store path
id: pnpm-store
working-directory: ./plugins
shell: bash
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
# END: Setup Node and PNPM enabling cache
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
pnpm install --no-frozen-lockfile;
pnpm add -D -w wrangler@latest;
- name: Build docs
working-directory: plugins
shell: bash
run: pnpm run build:doc
- name: Select Worker name
run: |
REF="${{ steps.vars.outputs.gh_ref }}"
case "$REF" in
main) echo "WORKER_NAME=penpot-plugins-api-doc-pro" >> $GITHUB_ENV ;;
staging) echo "WORKER_NAME=penpot-plugins-api-doc-pre" >> $GITHUB_ENV ;;
develop) echo "WORKER_NAME=penpot-plugins-api-doc-hourly" >> $GITHUB_ENV ;;
*) echo "Unsupported branch ${REF}" && exit 1 ;;
esac
- name: Deploy to Cloudflare Workers
uses: cloudflare/wrangler-action@v3
with:
workingDirectory: plugins
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --config wrangler-penpot-plugins-api-doc.toml --name ${{ env.WORKER_NAME }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🧩📚 *[PENPOT PLUGINS] Error deploying API documentation.*
📄 Triggered from ref: `${{ inputs.gh_ref }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,127 +0,0 @@
name: Plugins/package deployer
on:
# Deploy package from manual action
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch'
type: choice
required: true
default: 'develop'
options:
- develop
- staging
- main
plugin_name:
description: 'Pluging name (like plugins/apps/<plugin_name>-plugin)'
type: string
required: true
workflow_call:
inputs:
gh_ref:
description: 'Name of the branch'
type: string
required: true
default: 'develop'
plugin_name:
description: 'Publig name (from plugins/apps/<plugin_name>-plugin)'
type: string
required: true
permissions:
contents: read
jobs:
deploy:
runs-on: penpot-runner-01
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.gh_ref }}
# START: Setup Node and PNPM enabling cache
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Enable PNPM
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
- name: Get pnpm store path
id: pnpm-store
working-directory: ./plugins
shell: bash
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-${{ hashFiles('plugins/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
# END: Setup Node and PNPM enabling cache
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
pnpm install --no-frozen-lockfile;
pnpm add -D -w wrangler@latest;
- name: "Build package for ${{ inputs.plugin_name }}-plugin"
working-directory: plugins
shell: bash
run: npx nx build ${{ inputs.plugin_name }}-plugin
- name: Select Worker name
run: |
REF="${{ inputs.gh_ref }}"
case "$REF" in
main)
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pro" >> $GITHUB_ENV
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.app" >> $GITHUB_ENV ;;
staging)
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-pre" >> $GITHUB_ENV
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.penpot.dev" >> $GITHUB_ENV ;;
develop)
echo "WORKER_NAME=${{ inputs.plugin_name }}-plugin-hourly" >> $GITHUB_ENV
echo "WORKER_URI=${{ inputs.plugin_name }}.plugins.hourly.penpot.dev" >> $GITHUB_ENV ;;
*) echo "Unsupported branch ${REF}" && exit 1 ;;
esac
- name: Set the custom url
working-directory: plugins
shell: bash
run: |
sed -i "s/WORKER_URI/${{ env.WORKER_URI }}/g" apps/${{ inputs.plugin_name }}-plugin/wrangler.toml
- name: Deploy to Cloudflare Workers
uses: cloudflare/wrangler-action@v3
with:
workingDirectory: plugins
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy --config apps/${{ inputs.plugin_name }}-plugin/wrangler.toml --name ${{ env.WORKER_NAME }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🧩📦 *[PENPOT PLUGINS] Error deploying ${{ env.WORKER_NAME }}.*
📄 Triggered from ref: `${{ inputs.gh_ref }}`
Plugin name: `${{ inputs.plugin_name }}-plugin`
Cloudflare worker name: `${{ env.WORKER_NAME }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,143 +0,0 @@
name: Plugins/packages deployer
on:
push:
branches:
- develop
- staging
- main
paths:
- 'plugins/apps/*-plugin/**'
- 'libs/plugins-styles/**'
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch'
type: choice
required: true
default: 'develop'
options:
- develop
- staging
- main
jobs:
detect-changes:
runs-on: ubuntu-latest
outputs:
colors_to_tokens: ${{ steps.filter.outputs.colors_to_tokens }}
create_palette: ${{ steps.filter.outputs.create_palette }}
lorem_ipsum: ${{ steps.filter.outputs.lorem_ipsum }}
rename_layers: ${{ steps.filter.outputs.rename_layers }}
contrast: ${{ steps.filter.outputs.contrast }}
icons: ${{ steps.filter.outputs.icons }}
poc_state: ${{ steps.filter.outputs.poc_state }}
table: ${{ steps.filter.outputs.table }}
# [For new plugins]
# Add more outputs here
steps:
- uses: actions/checkout@v4
- id: filter
uses: dorny/paths-filter@v3
with:
filters: |
colors_to_tokens:
- 'plugins/apps/colors-to-tokens-plugin/**'
- 'libs/plugins-styles/**'
contrast:
- 'plugins/apps/contrast-plugin/**'
- 'libs/plugins-styles/**'
create_palette:
- 'plugins/apps/create-palette-plugin/**'
- 'libs/plugins-styles/**'
icons:
- 'plugins/apps/icons-plugin/**'
- 'libs/plugins-styles/**'
lorem_ipsum:
- 'plugins/apps/lorem-ipsum-plugin/**'
- 'libs/plugins-styles/**'
rename_layers:
- 'plugins/apps/rename-layers-plugin/**'
- 'libs/plugins-styles/**'
table:
- 'plugins/apps/table-plugin/**'
- 'libs/plugins-styles/**'
# [For new plugins]
# Add more plugin filters here
# another_plugin:
# - 'plugins/apps/another-plugin/**'
# - 'libs/plugins-styles/**'
colors-to-tokens-plugin:
needs: detect-changes
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.colors_to_tokens == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: colors-to-tokens
contrast-plugin:
needs: detect-changes
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.contrast == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: contrast
create-palette-plugin:
needs: detect-changes
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.create_palette == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: create-palette
icons-plugin:
needs: detect-changes
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.icons == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: icons
lorem-ipsum-plugin:
needs: detect-changes
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.lorem_ipsum == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: lorem-ipsum
rename-layers-plugin:
needs: detect-changes
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.rename_layers == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: rename-layers
table-plugin:
needs: detect-changes
if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.table == 'true'
uses: ./.github/workflows/plugins-deploy-package.yml
secrets: inherit
with:
gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
plugin_name: table
# [For new plugins]
# Add more jobs for other plugins below, following the same pattern
# another-plugin:
# needs: detect-changes
# if: github.event_name == 'workflow_dispatch' || needs.detect-changes.outputs.another_plugin == 'true'
# uses: ./.github/workflows/plugins-deploy-package.yml
# secrets: inherit
# with:
# gh_ref: "${{ inputs.gh_ref || github.ref_name }}"
# plugin_name: another

View File

@@ -21,7 +21,7 @@ concurrency:
jobs:
lint:
name: "Linter"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -34,7 +34,7 @@ jobs:
test-common:
name: "Common Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -51,55 +51,9 @@ jobs:
run: |
./scripts/test
test-plugins:
name: Plugins Runtime Linter & Tests
runs-on: penpot-runner-02
container: penpotapp/devenv:latest
steps:
- uses: actions/checkout@v4
- name: Setup Node
id: setup-node
uses: actions/setup-node@v6
with:
node-version-file: .nvmrc
- name: Install deps
working-directory: ./plugins
shell: bash
run: |
corepack enable;
corepack install;
pnpm install;
- name: Run Lint
working-directory: ./plugins
run: pnpm run lint
- name: Run Format Check
working-directory: ./plugins
run: pnpm run format:check
- name: Run Test
working-directory: ./plugins
run: pnpm run test
- name: Build runtime
working-directory: ./plugins
run: pnpm run build
- name: Build plugins
working-directory: ./plugins
run: pnpm run build:plugins
- name: Build styles
working-directory: ./plugins
run: pnpm run build:styles-example
test-frontend:
name: "Frontend Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -113,14 +67,12 @@ jobs:
- name: Component Tests
working-directory: ./frontend
env:
VITEST_BROWSER_TIMEOUT: 120000
run: |
./scripts/test-components
test-render-wasm:
name: "Render WASM Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -144,7 +96,7 @@ jobs:
test-backend:
name: "Backend Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
services:
@@ -183,7 +135,7 @@ jobs:
test-library:
name: "Library Tests"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -197,7 +149,7 @@ jobs:
build-integration:
name: "Build Integration Bundle"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
@@ -218,7 +170,7 @@ jobs:
test-integration-1:
name: "Integration Tests 1/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -248,7 +200,7 @@ jobs:
test-integration-2:
name: "Integration Tests 2/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -278,7 +230,7 @@ jobs:
test-integration-3:
name: "Integration Tests 3/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
@@ -308,7 +260,7 @@ jobs:
test-integration-4:
name: "Integration Tests 4/4"
runs-on: penpot-runner-02
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration

3
.gitignore vendored
View File

@@ -5,7 +5,6 @@
!.yarn/releases
!.yarn/sdks
!.yarn/versions
.pnpm-store
*-init.clj
*.css.json
*.jar
@@ -54,8 +53,6 @@
/exporter/target
/frontend/.storybook/preview-body.html
/frontend/.storybook/preview-head.html
/frontend/playwright-report/
/frontend/text-editor/src/wasm/
/frontend/dist/
/frontend/npm-debug.log
/frontend/out/

105
.gitpod.yml Normal file
View File

@@ -0,0 +1,105 @@
image:
file: docker/gitpod/Dockerfile
ports:
# nginx
- port: 3449
onOpen: open-preview
# frontend nREPL
- port: 3447
onOpen: ignore
visibility: private
# frontend shadow server
- port: 3448
onOpen: ignore
visibility: private
# backend
- port: 6060
onOpen: ignore
# exporter shadow server
- port: 9630
onOpen: ignore
visibility: private
# exporter http server
- port: 6061
onOpen: ignore
# mailhog web interface
- port: 8025
onOpen: ignore
# mailhog postfix
- port: 1025
onOpen: ignore
# postgres
- port: 5432
onOpen: ignore
# redis
- port: 6379
onOpen: ignore
# openldap
- port: 389
onOpen: ignore
tasks:
# https://github.com/gitpod-io/gitpod/issues/666#issuecomment-534347856
- name: gulp
command: >
cd $GITPOD_REPO_ROOT/frontend/;
yarn && gp sync-done 'frontend-yarn';
npx gulp --theme=${PENPOT_THEME} watch
- name: frontend shadow watch
command: >
cd $GITPOD_REPO_ROOT/frontend/;
gp sync-await 'frontend-yarn';
npx shadow-cljs watch main
- init: gp await-port 5432 && psql -f $GITPOD_REPO_ROOT/docker/gitpod/files/postgresql_init.sql
name: backend
command: >
cd $GITPOD_REPO_ROOT/backend/;
./scripts/start-dev
- name: exporter shadow watch
command:
cd $GITPOD_REPO_ROOT/exporter/;
gp sync-await 'frontend-yarn';
yarn && npx shadow-cljs watch main
- name: exporter web server
command: >
cd $GITPOD_REPO_ROOT/exporter/;
./scripts/wait-and-start.sh
- name: signed terminal
before: >
[[ ! -z ${GNUGPG} ]] &&
cd ~ &&
rm -rf .gnupg &&
echo ${GNUGPG} | base64 -d | tar --no-same-owner -xzvf -
init: >
[[ ! -z ${GNUGPG_KEY} ]] &&
git config --global commit.gpgsign true &&
git config --global user.signingkey ${GNUGPG_KEY}
command: cd $GITPOD_REPO_ROOT
- name: redis
command: redis-server
- before: go get github.com/mailhog/MailHog
name: mailhog
command: MailHog
- name: Nginx
command: >
nginx &&
multitail /var/log/nginx/access.log -I /var/log/nginx/error.log

2
.nvmrc
View File

@@ -1 +1 @@
v22.21.1
v22.19.0

40
.travis.yml Normal file
View File

@@ -0,0 +1,40 @@
dist: xenial
language: generic
sudo: required
cache:
directories:
- $HOME/.m2
services:
- docker
branches:
only:
- master
- develop
install:
- curl -O https://download.clojure.org/install/linux-install-1.10.1.447.sh
- chmod +x linux-install-1.10.1.447.sh
- sudo ./linux-install-1.10.1.447.sh
before_script:
- env | sort
script:
- ./manage.sh build-devenv
- ./manage.sh run-frontend-tests
- ./manage.sh run-backend-tests
- ./manage.sh build-images
- ./manage.sh run
after_script:
- docker images
notifications:
email: false
env:
- NODE_VERSION=10.16.0

11
.yarnrc.yml Normal file
View File

@@ -0,0 +1,11 @@
enableGlobalCache: true
enableImmutableCache: false
enableImmutableInstalls: false
enableTelemetry: false
httpTimeout: 600000
nodeLinker: node-modules

View File

@@ -1,36 +1,5 @@
# CHANGELOG
## 2.14.0 (Unreleased)
### :boom: Breaking changes & Deprecations
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- Remap references when renaming tokens [Taiga #10202](https://tree.taiga.io/project/penpot/us/10202)
- Tokens panel nested path view [Taiga #9966](https://tree.taiga.io/project/penpot/us/9966)
- Improve usability of lock and hide buttons in the layer panel. [Taiga #12916](https://tree.taiga.io/project/penpot/issue/12916)
- Optimize sidebar performance for deeply nested shapes [Taiga #13017](https://tree.taiga.io/project/penpot/task/13017)
- Remove tokens path node and bulk remove tokens [Taiga #13007](https://tree.taiga.io/project/penpot/us/13007)
- Replace themes management modal radio buttons for switches [Taiga #9215](https://tree.taiga.io/project/penpot/us/9215)
### :bug: Bugs fixed
- Remove whitespaces from asset export filename [Github #8133](https://github.com/penpot/penpot/pull/8133)
- Fix prototype connections lost when switching between variants [Taiga #12812](https://tree.taiga.io/project/penpot/issue/12812)
- Fix wrong image in the onboarding invitation block [Taiga #13040](https://tree.taiga.io/project/penpot/issue/13040)
- Fix wrong register image [Taiga #12955](https://tree.taiga.io/project/penpot/task/12955)
- Fix error message on components doesn't close automatically [Taiga #12012](https://tree.taiga.io/project/penpot/issue/12012)
- Fix incorrect handling of input values on layout gap and padding inputs [Github #8113](https://github.com/penpot/penpot/issues/8113)
- Fix incorrect default option on tokens import dialog [Github #8051](https://github.com/penpot/penpot/pull/8051)
- Fix unhandled exception tokens creation dialog [Github #8110](https://github.com/penpot/penpot/issues/8110)
- Fix displaying a hidden user avatar when there is only one more [Taiga #13058](https://tree.taiga.io/project/penpot/issue/13058)
- Fix unhandled exception on open-new-window helper [Github #7787](https://github.com/penpot/penpot/issues/7787)
- Fix exception on uploading large fonts [Github #8135](https://github.com/penpot/penpot/pull/8135)
## 2.13.0 (Unreleased)
### :boom: Breaking changes & Deprecations
@@ -43,43 +12,16 @@
### :sparkles: New features & Enhancements
- Add new Box Shadow Tokens [Taiga #10201](https://tree.taiga.io/project/penpot/us/10201)
- Make i18n translation files load on-demand [Taiga #11474](https://tree.taiga.io/project/penpot/us/11474)
- Add deleted files to dashboard [Taiga #8149](https://tree.taiga.io/project/penpot/us/8149)
### :bug: Bugs fixed
- Fix problem when drag+duplicate a full grid [Taiga #12565](https://tree.taiga.io/project/penpot/issue/12565)
- Fix problem when pasting elements in reverse flex layout [Taiga #12460](https://tree.taiga.io/project/penpot/issue/12460)
- Fix wrong board size presets in Android [Taiga #12339](https://tree.taiga.io/project/penpot/issue/12339)
- Fix problem with grid layout components and auto sizing [Github #7797](https://github.com/penpot/penpot/issues/7797)
- Fix some alignments on inspect tab [Taiga #12915](https://tree.taiga.io/project/penpot/issue/12915)
- Fix problem with text editor maintaining previous styles [Taiga #12835](https://tree.taiga.io/project/penpot/issue/12835)
- Fix color assets from shared libraries not appearing as assets in Selected colors panel [Taiga #12957](https://tree.taiga.io/project/penpot/issue/12957)
- Fix CSS generated box-shadow property [Taiga #12997](https://tree.taiga.io/project/penpot/issue/12997)
- Fix inner shadow selector on shadow token [Taiga #12951](https://tree.taiga.io/project/penpot/issue/12951)
- Fix missing text color token from selected shapes in selected colors list [Taiga #12956](https://tree.taiga.io/project/penpot/issue/12956)
- Fix dropdown option width in Guides columns dropdown [Taiga #12959](https://tree.taiga.io/project/penpot/issue/12959)
- Fix typos on download modal [Taiga #12865](https://tree.taiga.io/project/penpot/issue/12865)
- Fix problem with text editor maintaining previous styles [Taiga #12835](https://tree.taiga.io/project/penpot/issue/12835)
- Fix unhandled exception tokens creation dialog [Github #8110](https://github.com/penpot/penpot/issues/8110)
- Fix allow negative spread values on shadow token creation [Taiga #13167](https://tree.taiga.io/project/penpot/issue/13167)
- Fix spanish translations on import export token modal [Taiga #13171](https://tree.taiga.io/project/penpot/issue/13171)
- Remove whitespaces from asset export filename [Github #8133](https://github.com/penpot/penpot/pull/8133)
- Fix exception on uploading large fonts [Github #8135](https://github.com/penpot/penpot/pull/8135)
- Fix unhandled exception on open-new-window helper [Github #7787](https://github.com/penpot/penpot/issues/7787)
- Fix incorrect handling of input values on layout gap and padding inputs [Github #8113](https://github.com/penpot/penpot/issues/8113)
## 2.12.1
### :bug: Bugs fixed
- Fix setting a portion of text as bold or underline messes things up [Github #7980](https://github.com/penpot/penpot/issues/7980)
- Fix problem with style in fonts input [Taiga #12935](https://tree.taiga.io/project/penpot/issue/12935)
- Fix problem with path editor and right click [Github #7917](https://github.com/penpot/penpot/issues/7917)
## 2.12.0
## 2.12.0 (Unreleased)
### :boom: Breaking changes & Deprecations
@@ -90,6 +32,7 @@ The backend RPC API URLS are changed from `/api/rpc/command/<name>` to
compatibility; however, if you are a user of this API, it is strongly
recommended that you adapt your code to use the new PATH.
#### Updated SSO Callback URL
The OAuth / Single Sign-On (SSO) callback endpoint has changed to
@@ -122,6 +65,7 @@ This update standardizes all authentication flows under the single URL
and makis it more modular, enabling the ability to configure SSO auth
provider dinamically.
#### Changes on default docker compose
We have updated the `docker/images/docker-compose.yaml` with a small
@@ -139,7 +83,6 @@ example. It's still usable as before, we just removed the example.
- Ensure consistent snap behavior across all zoom levels [Github #7774](https://github.com/penpot/penpot/pull/7774) by [@Tokytome](https://github.com/Tokytome)
- Fix crash in token grid view due to tooltip validation (by @dfelinto) [Github #7887](https://github.com/penpot/penpot/pull/7887)
- Enable Hindi translations on the application
### :sparkles: New features & Enhancements
@@ -173,7 +116,6 @@ example. It's still usable as before, we just removed the example.
- Fix switch variants with paths [Taiga #12841](https://tree.taiga.io/project/penpot/issue/12841)
- Fix referencing typography tokens on font-family tokens [Taiga #12492](https://tree.taiga.io/project/penpot/issue/12492)
- Fix horizontal scroll on layer panel [Taiga #12843](https://tree.taiga.io/project/penpot/issue/12843)
- Fix unicode handling on email template abbreviation filter [Github #7966](https://github.com/penpot/penpot/pull/7966)
## 2.11.1
@@ -185,6 +127,7 @@ example. It's still usable as before, we just removed the example.
- Deprecated configuration variables with the prefix `PENPOT_ASSETS_*`, and will be
removed in future versions:
- The `PENPOT_ASSETS_STORAGE_BACKEND` becomes `PENPOT_OBJECTS_STORAGE_BACKEND` and its
values passes from (`assets-fs` or `assets-s3`) to (`fs` or `s3`)
- The `PENPOT_STORAGE_ASSETS_FS_DIRECTORY` becomes `PENPOT_OBJECTS_STORAGE_FS_DIRECTORY`

View File

@@ -120,12 +120,17 @@ them on your system, you can run them with:
```bash
# Check formatting
./scripts/fmt
yarn fmt:clj:check
# Lint
./scripts/lint
# Check and fix formatting
yarn fmt:clj
# Run the linter
yarn lint:clj
```
There are more choices in `package.json`.
Ideally, you should run these commands as git pre-commit hooks. A convenient way
of defining them is to use [Husky](https://typicode.github.io/husky/#/).

7
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

View File

@@ -97,8 +97,8 @@
:jmx-remote
{:jvm-opts ["-Dcom.sun.management.jmxremote"
"-Dcom.sun.management.jmxremote.port=9000"
"-Dcom.sun.management.jmxremote.rmi.port=9000"
"-Dcom.sun.management.jmxremote.port=9090"
"-Dcom.sun.management.jmxremote.rmi.port=9090"
"-Dcom.sun.management.jmxremote.local.only=false"
"-Dcom.sun.management.jmxremote.authenticate=false"
"-Dcom.sun.management.jmxremote.ssl=false"

View File

@@ -4,7 +4,7 @@
"license": "MPL-2.0",
"author": "Kaleidos INC",
"private": true,
"packageManager": "pnpm@10.26.2+sha512.0e308ff2005fc7410366f154f625f6631ab2b16b1d2e70238444dd6ae9d630a8482d92a451144debc492416896ed16f7b114a86ec68b8404b2443869e68ffda6",
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
"repository": {
"type": "git",
"url": "https://github.com/penpot/penpot"

306
backend/pnpm-lock.yaml generated
View File

@@ -1,306 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
luxon:
specifier: ^3.4.4
version: 3.7.2
sax:
specifier: ^1.4.1
version: 1.4.3
devDependencies:
nodemon:
specifier: ^3.1.2
version: 3.1.11
source-map-support:
specifier: ^0.5.21
version: 0.5.21
ws:
specifier: ^8.17.0
version: 8.18.3
packages:
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
brace-expansion@1.1.12:
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
buffer-from@1.1.2:
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
debug@4.4.3:
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
has-flag@3.0.0:
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
engines: {node: '>=4'}
ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
luxon@3.7.2:
resolution: {integrity: sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==}
engines: {node: '>=12'}
minimatch@3.1.2:
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
nodemon@3.1.11:
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
engines: {node: '>=10'}
hasBin: true
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
sax@1.4.3:
resolution: {integrity: sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==}
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'}
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
source-map@0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
supports-color@5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
touch@3.1.1:
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
hasBin: true
undefsafe@2.0.5:
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
ws@8.18.3:
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
snapshots:
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
balanced-match@1.0.2: {}
binary-extensions@2.3.0: {}
brace-expansion@1.1.12:
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
braces@3.0.3:
dependencies:
fill-range: 7.1.1
buffer-from@1.1.2: {}
chokidar@3.6.0:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
concat-map@0.0.1: {}
debug@4.4.3(supports-color@5.5.0):
dependencies:
ms: 2.1.3
optionalDependencies:
supports-color: 5.5.0
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fsevents@2.3.3:
optional: true
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
has-flag@3.0.0: {}
ignore-by-default@1.0.1: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-extglob@2.1.1: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
luxon@3.7.2: {}
minimatch@3.1.2:
dependencies:
brace-expansion: 1.1.12
ms@2.1.3: {}
nodemon@3.1.11:
dependencies:
chokidar: 3.6.0
debug: 4.4.3(supports-color@5.5.0)
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.7.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
undefsafe: 2.0.5
normalize-path@3.0.0: {}
picomatch@2.3.1: {}
pstree.remy@1.1.8: {}
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
sax@1.4.3: {}
semver@7.7.3: {}
simple-update-notifier@2.0.0:
dependencies:
semver: 7.7.3
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
source-map: 0.6.1
source-map@0.6.1: {}
supports-color@5.5.0:
dependencies:
has-flag: 3.0.0
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
touch@3.1.1: {}
undefsafe@2.0.5: {}
ws@8.18.3: {}

View File

@@ -240,4 +240,4 @@
</div>
</body>
</html>
</html>

View File

@@ -12,22 +12,43 @@ Debug Main Page
</nav>
<main class="dashboard">
<section class="widget">
<fieldset>
<legend>Error reports</legend>
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
</fieldset>
<fieldset>
<legend>CURRENT PROFILE</legend>
<desc>
<p>
Name: <b>{{profile.fullname}}</b> <br />
Email: <b>{{profile.email}}</b>
</p>
</desc>
<legend>Profile Management</legend>
<form method="post" action="/dbg/actions/resend-email-verification">
<div class="row">
<input type="email" name="email" placeholder="example@example.com" value="" />
</div>
<div class="row">
<label for="force-verify">Are you sure?</label>
<input id="force-verify" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" name="resend" value="Resend Verification" />
<input type="submit" name="verify" value="Verify" />
</div>
<div class="row">
<input type="submit" class="danger" name="block" value="Block" />
<input type="submit" class="danger" name="unblock" value="Unblock" />
</div>
</form>
</fieldset>
<fieldset>
<legend>VIRTUAL CLOCK</legend>
<desc>
<p><b>IMPORTANT:</b> The virtual clock is profile based and only affects the currently logged-in profile.</p>
<p>
CURRENT CLOCK: <b>{{current-clock}}</b>
<br />
@@ -60,93 +81,8 @@ Debug Main Page
</form>
</fieldset>
<fieldset>
<legend>ERROR REPORTS</legend>
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
</fieldset>
</section>
<section class="widget">
<fieldset>
<legend>Profile Management</legend>
<form method="post" action="/dbg/actions/resend-email-verification">
<div class="row">
<input type="email" name="email" placeholder="example@example.com" value="" />
</div>
<div class="row">
<label for="force-verify">Are you sure?</label>
<input id="force-verify" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" name="resend" value="Resend Verification" />
<input type="submit" name="verify" value="Verify" />
</div>
<div class="row">
<input type="submit" class="danger" name="block" value="Block" />
<input type="submit" class="danger" name="unblock" value="Unblock" />
</div>
</form>
</fieldset>
<fieldset>
<legend>Feature Flags for Team</legend>
<desc>Add a feature flag to a team</desc>
<form method="post" action="/dbg/actions/handle-team-features">
<div class="row">
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
</div>
<div class="row">
<select type="text" style="width:100px" name="feature">
{% for feature in supported-features %}
<option value="{{feature}}">{{feature}}</option>
{% endfor %}
</select>
</div>
<div class="row">
<select style="width:100px" name="action">
<option value="">Action...</option>
<option value="show">Show</option>
<option value="enable">Enable</option>
<option value="disable">Disable</option>
</select>
</div>
<div class="row">
<label for="check-feature">Skip feature check</label>
<input id="check-feature" type="checkbox" name="skip-check" />
<br />
<small>
Do not check if the feature is supported
</small>
</div>
<div class="row">
<label for="force-version">Are you sure?</label>
<input id="force-version" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" value="Submit" />
</div>
</form>
</fieldset>
</section>
<section class="widget">
<fieldset>
@@ -237,5 +173,55 @@ Debug Main Page
</form>
</fieldset>
</section>
<section class="widget">
<fieldset>
<legend>Feature Flags for Team</legend>
<desc>Add a feature flag to a team</desc>
<form method="post" action="/dbg/actions/handle-team-features">
<div class="row">
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
</div>
<div class="row">
<select type="text" style="width:100px" name="feature">
{% for feature in supported-features %}
<option value="{{feature}}">{{feature}}</option>
{% endfor %}
</select>
</div>
<div class="row">
<select style="width:100px" name="action">
<option value="">Action...</option>
<option value="show">Show</option>
<option value="enable">Enable</option>
<option value="disable">Disable</option>
</select>
</div>
<div class="row">
<label for="check-feature">Skip feature check</label>
<input id="check-feature" type="checkbox" name="skip-check" />
<br />
<small>
Do not check if the feature is supported
</small>
</div>
<div class="row">
<label for="force-version">Are you sure?</label>
<input id="force-version" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" value="Submit" />
</div>
</form>
</fieldset>
</section>
</main>
{% endblock %}

View File

@@ -36,6 +36,17 @@
[integrant.core :as ig]
[yetti.response :as-alias yres]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn obfuscate-string
[s]
(if (< (count s) 10)
(apply str (take (count s) (repeat "*")))
(str (subs s 0 5)
(apply str (take (- (count s) 5) (repeat "*"))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OIDC PROVIDER (GENERIC)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -166,7 +177,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -211,7 +222,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -288,7 +299,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -330,7 +341,7 @@
:provider "gitlab"
:base-uri (:base-uri provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
(ex/raise :type ::internal
@@ -350,7 +361,7 @@
(l/inf :hint "provider initialized"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider)))
:client-secret (obfuscate-string (:client-secret provider)))
provider)
(catch Throwable cause
@@ -448,7 +459,7 @@
(l/trc :hint "fetch access token"
:provider (:id provider)
:client-id (:client-id provider)
:client-secret (d/obfuscate-string (:client-secret provider))
:client-secret (obfuscate-string (:client-secret provider))
:grant-type (:grant_type params)
:redirect-uri (:redirect_uri params))
@@ -501,7 +512,7 @@
[cfg provider tdata]
(l/trc :hint "fetch user info"
:uri (:user-uri provider)
:token (d/obfuscate-string (:token/access tdata)))
:token (obfuscate-string (:token/access tdata)))
(let [params {:uri (:user-uri provider)
:headers {"Authorization" (str (:token/type tdata) " " (:token/access tdata))}

View File

@@ -331,81 +331,6 @@
(set/difference cfeat/backend-only-features))
#{}))))
(defn check-file-exists
[cfg id & {:keys [include-deleted?]
:or {include-deleted? false}
:as options}]
(db/get-with-sql cfg [sql:get-minimal-file id]
{:db/remove-deleted (not include-deleted?)}))
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?")
(defn- get-file-permissions*
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-file-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions* conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-file-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(defn get-project
[cfg project-id]
(db/get cfg :project {:id project-id}))

View File

@@ -821,10 +821,9 @@
entries (keep (match-storage-entry-fn) entries)]
(doseq [{:keys [id entry]} entries]
(let [object (-> (read-entry input entry)
(decode-storage-object)
(update :bucket d/nilv sto/default-bucket)
(validate-storage-object))
(let [object (->> (read-entry input entry)
(decode-storage-object)
(validate-storage-object))
ext (cmedia/mtype->extension (:content-type object))
path (str "objects/" id ext)

View File

@@ -124,6 +124,8 @@
(throw (IllegalArgumentException. "invalid email body provided")))
(doseq [[name content] attachments]
(prn "attachment" name)
(let [attachment-part (MimeBodyPart.)]
(.setFileName attachment-part ^String name)
(.setContent attachment-part ^String content (str "text/plain; charset=" charset))

View File

@@ -30,7 +30,7 @@
(defn- get-file-media-object
[pool id]
(db/get pool :file-media-object {:id id} {::db/remove-deleted false}))
(db/get pool :file-media-object {:id id}))
(defn- serve-object-from-s3
[{:keys [::sto/storage] :as cfg} obj]

View File

@@ -49,16 +49,13 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn index-handler
[cfg request]
(let [profile-id (::session/profile-id request)
offset (clock/get-offset profile-id)
profile (profile/get-profile cfg profile-id)]
[_cfg _request]
(let [{:keys [clock offset]} @clock/current]
{::yres/status 200
::yres/headers {"content-type" "text/html"}
::yres/body (-> (io/resource "app/templates/debug.tmpl")
(tmpl/render {:version (:full cf/version)
:profile profile
:current-clock ct/*clock*
:current-clock (str clock)
:current-offset (if offset
(ct/format-duration offset)
"NO OFFSET")
@@ -450,16 +447,15 @@
(defn- set-virtual-clock
[_ {:keys [params] :as request}]
(let [offset (some-> params :offset str/trim not-empty ct/duration)
profile-id (::session/profile-id request)
reset? (contains? params :reset)]
(let [offset (some-> params :offset str/trim not-empty ct/duration)
reset? (contains? params :reset)]
(if (= "production" (cf/get :tenant))
{::yres/status 501
::yres/body "OPERATION NOT ALLOWED"}
(do
(if (or reset? (zero? (inst-ms offset)))
(clock/assign-offset profile-id nil)
(clock/assign-offset profile-id offset))
(clock/set-offset! nil)
(clock/set-offset! offset))
{::yres/status 302
::yres/headers {"location" "/dbg"}}))))
@@ -499,7 +495,7 @@
(defn authorized?
[pool {:keys [::session/profile-id]}]
(or (and (= "devenv" (cf/get :host)) profile-id)
(or (= "devenv" (cf/get :host))
(let [profile (ex/ignoring (profile/get-profile pool profile-id))
admins (or (cf/get :admins) #{})]
(contains? admins (:email profile)))))

View File

@@ -309,7 +309,7 @@
(fn [request]
(let [key (yreq/get-header request "x-shared-key")]
(if (= key shared-key)
(handler (assoc request ::http/auth-with-shared-key true))
(handler request)
{::yres/status 403}))))
(fn [_ _]
{::yres/status 403})))

View File

@@ -20,7 +20,6 @@
[app.http.session.tasks :as-alias tasks]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.setup.clock :as clock]
[app.tokens :as tokens]
[integrant.core :as ig]
[yetti.request :as yreq]
@@ -230,22 +229,18 @@
(let [{:keys [type token claims metadata]} (get request ::http/auth-data)]
(cond
(= type :cookie)
(let [session
(case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
request
(cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))
response
(binding [ct/*clock* (clock/get-clock (:profile-id session))]
(handler request))]
response (handler request)]
(if (and session (renew-session? session))
(let [session (->> session

View File

@@ -9,7 +9,6 @@
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
@@ -224,7 +223,7 @@
(some? tnow)
(assoc :tracked-at tnow))))
(defn- append-audit-entry
(defn- append-audit-entry!
[cfg params]
(let [params (-> params
(update :props db/tjson)
@@ -237,16 +236,6 @@
(let [params (event->params event)
tnow (ct/now)]
(when (contains? cf/flags :audit-log-logger)
(l/log! ::l/logger "app.audit"
::l/level :info
:profile-id (str (::profile-id event))
:ip-addr (str (::ip-addr event))
:type (::type event)
:name (::name event)
:props (json/encode (::props event) :key-fn json/write-camel-key)
:context (json/encode (::context event) :key-fn json/write-camel-key)))
(when (contains? cf/flags :audit-log)
;; NOTE: this operation may cause primary key conflicts on inserts
;; because of the timestamp precission (two concurrent requests), in
@@ -254,7 +243,7 @@
(let [params (-> params
(assoc :created-at tnow)
(update :tracked-at #(or % tnow)))]
(append-audit-entry cfg params)))
(append-audit-entry! cfg params)))
(when (and (or (contains? cf/flags :telemetry)
(cf/get :telemetry-enabled))
@@ -269,7 +258,7 @@
(update :tracked-at #(or % tnow))
(assoc :props {})
(assoc :context {}))]
(append-audit-entry cfg params)))
(append-audit-entry! cfg params)))
(when (and (contains? cf/flags :webhooks)
(::webhooks/event? event))
@@ -323,4 +312,4 @@
params (-> (event->params event)
(assoc :created-at tnow)
(update :tracked-at #(or % tnow)))]
(append-audit-entry cfg params)))))))
(append-audit-entry! cfg params)))))))

View File

@@ -14,7 +14,6 @@
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
@@ -93,11 +92,7 @@
(let [handler-name (:type path-params)
etag (yreq/get-header request "if-none-match")
profile-id (or (::session/profile-id request)
(::actoken/profile-id request)
(if (::http/auth-with-shared-key request)
uuid/zero
nil))
(::actoken/profile-id request))
ip-addr (inet/parse-request request)
data (-> params

View File

@@ -307,8 +307,7 @@
:content-type (:mtype input)})]
(:id sobject))
(catch Throwable cause
(l/wrn :hint "unable to import profile picture"
:uri uri
(l/err :hint "unable to import profile picture"
:cause cause)
nil)))

View File

@@ -79,14 +79,85 @@
;; --- FILE PERMISSIONS
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
and f.deleted_at is null
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
and f.deleted_at is null
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?
and f.deleted_at is null")
(defn get-file-permissions
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(def has-edit-permissions?
(perms/make-edition-predicate-fn bfc/get-file-permissions))
(perms/make-edition-predicate-fn get-permissions))
(def has-read-permissions?
(perms/make-read-predicate-fn bfc/get-file-permissions))
(perms/make-read-predicate-fn get-permissions))
(def has-comment-permissions?
(perms/make-comment-predicate-fn bfc/get-file-permissions))
(perms/make-comment-predicate-fn get-permissions))
(def check-edition-permissions!
(perms/make-check-fn has-edit-permissions?))
@@ -99,7 +170,7 @@
(defn check-comment-permissions!
[conn profile-id file-id share-id]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
can-read (has-read-permissions? perms)
can-comment (has-comment-permissions? perms)]
(when-not (or can-read can-comment)
@@ -151,7 +222,7 @@
(defn- get-minimal-file-with-perms
[cfg {:keys [:id ::rpc/profile-id]}]
(let [mfile (get-minimal-file cfg id)
perms (bfc/get-file-permissions cfg profile-id id)]
perms (get-permissions cfg profile-id id)]
(assoc mfile :permissions perms)))
(defn get-file-etag
@@ -177,7 +248,7 @@
;; will be already prefetched and we just reuse them instead
;; of making an additional database queries.
(let [perms (or (:permissions (::cond/object params))
(bfc/get-file-permissions conn profile-id id))]
(get-permissions conn profile-id id))]
(check-read-permissions! perms)
(let [team (teams/get-team conn
@@ -240,7 +311,7 @@
::sm/result schema:file-fragment}
[cfg {:keys [::rpc/profile-id file-id fragment-id share-id]}]
(db/run! cfg (fn [cfg]
(let [perms (bfc/get-file-permissions cfg profile-id file-id share-id)]
(let [perms (get-permissions cfg profile-id file-id share-id)]
(check-read-permissions! perms)
(-> (get-file-fragment cfg file-id fragment-id)
(rph/with-http-cache long-cache-duration))))))
@@ -385,7 +456,8 @@
:code :params-validation
:hint "page-id is required when object-id is provided"))
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
file (bfc/get-file cfg file-id :read-only? true)
proj (db/get conn :project {:id (:project-id file)})
@@ -616,10 +688,11 @@
"Get libraries used by the specified file."
{::doc/added "1.17"
::sm/params schema:get-file-libraries}
[cfg {:keys [::rpc/profile-id file-id]}]
(bfc/check-file-exists cfg file-id)
(check-read-permissions! cfg profile-id file-id)
(bfc/get-file-libraries cfg file-id))
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
(dm/with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id file-id)
(bfc/get-file-libraries conn file-id)))
;; --- COMMAND QUERY: Files that use this File library
@@ -704,6 +777,7 @@
f.created_at,
f.modified_at,
f.name,
f.is_shared,
f.deleted_at AS will_be_deleted_at,
ft.media_id AS thumbnail_id,
row_number() OVER w AS row_num,
@@ -711,7 +785,8 @@
FROM file AS f
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN file_thumbnail AS ft on (ft.file_id = f.id
AND ft.revn = f.revn)
AND ft.revn = f.revn
AND ft.deleted_at is null)
WHERE p.team_id = ?
AND (p.deleted_at > ?::timestamptz OR
f.deleted_at > ?::timestamptz)
@@ -813,7 +888,7 @@
AND (f.deleted_at IS NULL OR f.deleted_at > now())
ORDER BY f.created_at ASC;")
(defn- absorb-library-by-file
(defn- absorb-library-by-file!
[cfg ldata file-id]
(assert (db/connection-map? cfg)
@@ -837,7 +912,7 @@
:modified-at (ct/now)
:has-media-trimmed false}))))
(defn- absorb-library*
(defn- absorb-library
"Find all files using a shared library, and absorb all library assets
into the file local libraries"
[cfg {:keys [id data] :as library}]
@@ -852,10 +927,10 @@
:library-id (str id)
:files (str/join "," (map str ids)))
(run! (partial absorb-library-by-file cfg data) ids)
(run! (partial absorb-library-by-file! cfg data) ids)
library))
(defn absorb-library
(defn absorb-library!
[{:keys [::db/conn] :as cfg} id]
(let [file (-> (bfc/get-file cfg id
:realize? true
@@ -872,7 +947,7 @@
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-file-features! (:features file)))
(absorb-library* cfg file)))
(absorb-library cfg file)))
(defn- set-file-shared
[{:keys [::db/conn] :as cfg} {:keys [profile-id id] :as params}]
@@ -885,14 +960,14 @@
;; file, we need to perform more complex operation,
;; so in this case we retrieve the complete file and
;; perform all required validations.
(let [file (-> (absorb-library cfg id)
(let [file (-> (absorb-library! cfg id)
(assoc :is-shared false))]
(db/delete! conn :file-library-rel {:library-file-id id})
(db/update! conn :file
{:is-shared false
:modified-at (ct/now)}
{:id id})
file)
(select-keys file [:id :name :is-shared]))
(and (false? (:is-shared file))
(true? (:is-shared params)))
@@ -939,11 +1014,6 @@
{:id file-id}
{::db/return-keys [:id :name :is-shared :deleted-at
:project-id :created-at :modified-at]})]
;; Remove all possible relations for that file
(db/delete! conn :file-library-rel
{:library-file-id file-id})
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
@@ -1094,53 +1164,47 @@
;; --- MUTATION COMMAND: delete-files-immediatelly
(def ^:private sql:get-delete-team-files-candidates
"SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])")
(def ^:private sql:delete-team-files
"UPDATE file AS uf SET deleted_at = ?::timestamptz
FROM (
SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])
) AS subquery
WHERE uf.id = subquery.id
RETURNING uf.id, uf.deleted_at;")
(def ^:private schema:permanently-delete-team-files
[:map {:title "permanently-delete-team-files"}
[:team-id ::sm/uuid]
[:ids [::sm/set ::sm/uuid]]])
(defn- permanently-delete-team-files
[{:keys [::db/conn]} {:keys [::rpc/request-at team-id ids]}]
(let [ids (into #{}
d/xf:map-id
(db/exec! conn [sql:get-delete-team-files-candidates team-id
(db/create-array conn "uuid" ids)]))]
(reduce (fn [acc id]
(events/tap :progress {:file-id id :index (inc (count acc)) :total (count ids)})
(db/update! conn :file
{:deleted-at request-at}
{:id id}
{::db/return-keys false})
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
:deleted-at request-at
:id id}})
(conj acc id))
#{}
ids)))
(sv/defmethod ::permanently-delete-team-files
"Mark the specified files to be deleted immediatelly on the
specified team. The team-id on params will be used to filter and
check writable permissons on team."
{::doc/added "2.13"
::sm/params schema:permanently-delete-team-files}
{::doc/added "2.12"
::sm/params schema:permanently-delete-team-files
::db/transaction true}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
(teams/check-edition-permissions! pool profile-id team-id)
(sse/response #(db/tx-run! cfg permanently-delete-team-files params)))
[{:keys [::db/conn]} {:keys [::rpc/profile-id ::rpc/request-at team-id ids]}]
(teams/check-edition-permissions! conn profile-id team-id)
(reduce (fn [acc {:keys [id deleted-at]}]
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
:deleted-at deleted-at
:id id}})
(conj acc id))
#{}
(db/plan conn [sql:delete-team-files request-at team-id
(db/create-array conn "uuid" ids)])))
;; --- MUTATION COMMAND: restore-files-immediatelly
@@ -1204,7 +1268,7 @@
{:keys [files projects]}
(reduce (fn [result {:keys [id project-id]}]
(let [index (-> result :files count)]
(events/tap :progress {:file-id id :index (inc index) :total total-files})
(events/tap :progress {:file-id id :index index :total total-files})
(restore-file conn id)
(-> result
@@ -1227,7 +1291,7 @@
(sv/defmethod ::restore-deleted-team-files
"Removes the deletion mark from the specified files (and respective
projects) on the specified team."
{::doc/added "2.13"
{::doc/added "2.12"
::sse/stream? true
::sm/params schema:restore-deleted-team-files}
[cfg params]

View File

@@ -199,13 +199,15 @@
[cfg {:keys [::rpc/profile-id file-id strip-frames-with-thumbnails] :as params}]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/check-read-permissions! conn profile-id file-id)
(let [team (teams/get-team conn
:profile-id profile-id
:file-id file-id)
file (bfc/get-file cfg file-id
:include-deleted? true
:realize? true
:read-only? true)
strip-frames-with-thumbnails
(or (nil? strip-frames-with-thumbnails) ;; if not present, default to true
(true? strip-frames-with-thumbnails))]
@@ -331,16 +333,12 @@
;; --- MUTATION COMMAND: create-file-thumbnail
(defn- create-file-thumbnail
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [file-id revn props media] :as params}]
(defn- create-file-thumbnail!
[{:keys [::db/conn ::sto/storage]} {:keys [file-id revn props media] :as params}]
(media/validate-media-type! media)
(media/validate-media-size! media)
(let [file (bfc/get-file cfg file-id
:include-deleted? true
:load-data? false)
props (db/tjson (or props {}))
(let [props (db/tjson (or props {}))
path (:path media)
mtype (:mtype media)
hash (sto/calculate-hash path)
@@ -369,7 +367,7 @@
(db/update! conn :file-thumbnail
{:media-id (:id media)
:deleted-at (:deleted-at file)
:deleted-at nil
:updated-at tnow
:props props}
{:file-id file-id
@@ -380,7 +378,6 @@
:revn revn
:created-at tnow
:updated-at tnow
:deleted-at (:deleted-at file)
:props props
:media-id (:id media)}))
@@ -405,8 +402,6 @@
::rtry/when rtry/conflict-exception?
::sm/params schema:create-file-thumbnail}
;; FIXME: do not run the thumbnail upload inside a transaction
[cfg {:keys [::rpc/profile-id file-id] :as params}]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
;; TODO For now we check read permissions instead of write,
@@ -414,6 +409,6 @@
;; review this approach on the future.
(files/check-read-permissions! conn profile-id file-id)
(when-not (db/read-only? conn)
(let [media (create-file-thumbnail cfg params)]
(let [media (create-file-thumbnail! cfg params)]
{:uri (files/resolve-public-uri (:id media))
:id (:id media)})))))

View File

@@ -6,7 +6,6 @@
(ns app.rpc.commands.fonts
(:require
[app.binfile.common :as bfc]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
@@ -27,17 +26,7 @@
[app.rpc.helpers :as rph]
[app.rpc.quotes :as quotes]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[datoteka.io :as io])
(:import
java.io.InputStream
java.io.OutputStream
java.io.SequenceInputStream
java.util.Collections))
(set! *warn-on-reflection* true)
[app.util.services :as sv]))
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
(def valid-style #{"normal" "italic"})
@@ -77,7 +66,7 @@
(uuid? file-id)
(let [file (db/get-by-id conn :file file-id {:columns [:id :project-id]})
project (db/get-by-id conn :project (:project-id file) {:columns [:id :team-id]})
perms (bfc/get-file-permissions conn profile-id file-id share-id)]
perms (files/get-permissions conn profile-id file-id share-id)]
(files/check-read-permissions! perms)
(db/query conn :team-font-variant
{:team-id (:team-id project)
@@ -115,7 +104,7 @@
(defn create-font-variant
[{:keys [::sto/storage ::db/conn]} {:keys [data] :as params}]
(letfn [(generate-missing [data]
(letfn [(generate-missing! [data]
(let [data (media/run {:cmd :generate-fonts :input data})]
(when (and (not (contains? data "font/otf"))
(not (contains? data "font/ttf"))
@@ -126,26 +115,8 @@
:hint "invalid font upload, unable to generate missing font assets"))
data))
(process-chunks [chunks]
(let [tmp (tmp/tempfile :prefix "penpot.tempfont." :suffix "")
streams (map io/input-stream chunks)
streams (Collections/enumeration streams)]
(with-open [^OutputStream output (io/output-stream tmp)
^InputStream input (SequenceInputStream. streams)]
(io/copy input output))
tmp))
(join-chunks [data]
(reduce-kv (fn [data mtype content]
(if (vector? content)
(assoc data mtype (process-chunks content))
data))
data
data))
(prepare-font [data mtype]
(when-let [resource (get data mtype)]
(let [hash (sto/calculate-hash resource)
content (-> (sto/content resource)
(sto/wrap-with-hash hash))]
@@ -184,8 +155,7 @@
:otf-file-id (:id otf)
:ttf-file-id (:id ttf)}))]
(let [data (join-chunks data)
data (generate-missing data)
(let [data (generate-missing! data)
assets (persist-fonts-files! data)
result (insert-font-variant! assets)]
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))

View File

@@ -19,7 +19,7 @@
inner join team_profile_rel as tpr on (tpr.team_id = p.team_id)
where tpr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null)
and (p.deleted_at is null or p.deleted_at > now())
and (tpr.is_admin = true or
tpr.is_owner = true or
tpr.can_edit = true)
@@ -29,7 +29,7 @@
inner join project_profile_rel as ppr on (ppr.project_id = p.id)
where ppr.profile_id = ?
and p.team_id = ?
and (p.deleted_at is null)
and (p.deleted_at is null or p.deleted_at > now())
and (ppr.is_admin = true or
ppr.is_owner = true or
ppr.can_edit = true)
@@ -47,7 +47,7 @@
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn)
inner join projects as pr on (f.project_id = pr.id)
where f.name ilike ('%' || ? || '%')
and (f.deleted_at is null)
and (f.deleted_at is null or f.deleted_at > now())
order by f.created_at asc")
(defn search-files

View File

@@ -13,6 +13,7 @@
[app.config :as cf]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.teams :as teams]
[app.rpc.cond :as-alias cond]
[app.rpc.doc :as-alias doc]
@@ -120,7 +121,7 @@
[system {:keys [::rpc/profile-id file-id share-id] :as params}]
(db/run! system
(fn [{:keys [::db/conn] :as system}]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (files/get-permissions conn profile-id file-id share-id)
params (-> params
(assoc ::perms perms)
(assoc :profile-id profile-id))]

View File

@@ -104,29 +104,28 @@
(def ^:private schema:limit
[:and
[:map
[::name :keyword]
[::name :any]
[::strategy schema:strategy]
[::key :string]
[::opts :string]
[::capacity {:optional true} ::sm/int]
[::rate {:optional true} ::sm/int]
[::interval {:optional true} ::ct/duration]
[::params {:optional true} [::sm/vec :any]]
[::permits {:optional true} ::sm/int]
[::unit {:optional true} [:enum :days :hours :minutes :seconds :weeks]]]
[:fn (fn [attrs]
(let [contains-fn (partial contains? attrs)]
(or (every? contains-fn [::capacity ::rate ::interval])
(every? contains-fn [::permits ::unit]))))]])
[::opts :string]]
[:or
[:map
[::capacity ::sm/int]
[::rate ::sm/int]
[::internal ::ct/duration]
[::params [::sm/vec :any]]]
[:map
[::nreq ::sm/int]
[::unit [:enum :days :hours :minutes :seconds :weeks]]]]])
(def ^:private schema:limits
[:map-of :keyword [::sm/vec schema:limit]])
(def ^:private valid-limit-tuple?
(sm/validator schema:limit-tuple))
(sm/lazy-validator schema:limit-tuple))
(def ^:private valid-rlimit-instance?
(sm/validator ::rpc/rlimit))
(sm/lazy-validator ::rpc/rlimit))
(defmethod parse-limit :window
[[name strategy opts :as vlimit]]
@@ -135,16 +134,16 @@
(merge
{::name name
::strategy strategy}
(if-let [[_ permits unit] (re-find window-opts-re opts)]
(let [permits (parse-long permits)]
{::permits permits
(if-let [[_ nreq unit] (re-find window-opts-re opts)]
(let [nreq (parse-long nreq)]
{::nreq nreq
::unit (case unit
"d" :days
"h" :hours
"m" :minutes
"s" :seconds
"w" :weeks)
::key (str "penpot.rlimit." (cf/get :tenant) ".window." (d/name name))
::key (str "ratelimit.window." (d/name name))
::opts opts})
(ex/raise :type :validation
:code :invalid-window-limit-opts
@@ -165,15 +164,15 @@
::interval interval
::opts opts
::params [(->seconds interval) rate capacity]
::key (str "penpot.rlimit." (cf/get :tenant) ".bucket." (d/name name))})
::key (str "ratelimit.bucket." (d/name name))})
(ex/raise :type :validation
:code :invalid-bucket-limit-opts
:hint (str/ffmt "looks like '%' does not have a valid format" opts))))
(defmethod process-limit :bucket
[rconn profile-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
[rconn user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
(let [script (-> bucket-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id)])
(assoc ::rscript/keys [(str key "." service "." user-id)])
(assoc ::rscript/vals (conj params (->seconds now))))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
@@ -193,18 +192,18 @@
(assoc ::lresult/remaining remaining))))
(defmethod process-limit :window
[rconn profile-id now {:keys [::permits ::unit ::key ::service] :as limit}]
[rconn user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
(let [ts (ct/truncate now unit)
ttl (ct/diff now (ct/plus ts {unit 1}))
script (-> window-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [permits (->seconds ttl)]))
(assoc ::rscript/keys [(str key "." service "." user-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [nreq (->seconds ttl)]))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
remaining (nth result 1)]
(l/trace :hint "limit processed"
:service service
:name (name (::name limit))
:limit (name (::name limit))
:strategy (name (::strategy limit))
:opts (::opts limit)
:allowed allowed?
@@ -215,8 +214,8 @@
(assoc ::lresult/reset (ct/plus ts {unit 1})))))
(defn- process-limits
[rconn profile-id limits now]
(let [results (into [] (map (partial process-limit rconn profile-id now)) limits)
[rconn user-id limits now]
(let [results (into [] (map (partial process-limit rconn user-id now)) limits)
remaining (->> results
(d/index-by ::name ::lresult/remaining)
(uri/map->query-string))
@@ -228,7 +227,7 @@
(when rejected
(l/warn :hint "rejected rate limit"
:profile-id (str profile-id)
:user-id (str user-id)
:limit-service (-> rejected ::service name)
:limit-name (-> rejected ::name name)
:limit-strategy (-> rejected ::strategy name)))
@@ -372,9 +371,12 @@
(defn- on-refresh-error
[_ cause]
(when-not (instance? java.util.concurrent.RejectedExecutionException cause)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true)))
(if-let [explain (-> cause ex-data ex/explain)]
(l/warn ::l/raw (str "unable to refresh config, invalid format:\n" explain)
::l/sync? true)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true))))
(defn- get-config-path
[]

View File

@@ -25,9 +25,9 @@ local allowed = filled >= requested
local newTokens = filled
if allowed then
newTokens = filled - requested
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
end
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
redis.call("expire", tokensKey, ttl)
return { allowed, newTokens }

View File

@@ -9,35 +9,48 @@
modification of time offset (useful for testing and time adjustments)."
(:require
[app.common.logging :as l]
[app.common.time :as ct]))
[app.common.time :as ct]
[app.setup :as-alias setup]
[integrant.core :as ig])
(:import
java.time.Clock
java.time.Duration
java.time.Instant
java.time.ZoneId))
(defonce state
(atom {}))
(defonce current
(atom {:clock (Clock/systemDefaultZone)
:offset nil}))
(defn assign-offset
"Assign virtual clock offset to a specific user. Is the responsability
of RPC module to properly bind the correct clock to the user
request."
[profile-id duration]
(swap! state (fn [state]
(if (nil? duration)
(dissoc state profile-id)
(assoc state profile-id duration)))))
(defmethod ig/init-key ::setup/clock
[_ _]
(add-watch current ::common
(fn [_ _ _ {:keys [clock offset]}]
(let [clock (if (ct/duration? offset)
(Clock/offset ^Clock clock
^Duration offset)
clock)]
(l/wrn :hint "altering clock" :clock (str clock))
(alter-var-root #'ct/*clock* (constantly clock))))))
(defn get-offset
[profile-id]
(get @state profile-id))
(defn get-clock
[profile-id]
(if-let [offset (get-offset profile-id)]
(ct/offset-clock offset)
(ct/get-system-clock)))
(defmethod ig/halt-key! ::setup/clock
[_ _]
(remove-watch current ::common))
(defn set-global-clock
(defn fixed
"Get fixed clock, mainly used in tests"
[instant]
(Clock/fixed ^Instant (ct/inst instant)
^ZoneId (ZoneId/of "Z")))
(defn set-offset!
[duration]
(swap! current assoc :offset (some-> duration ct/duration)))
(defn set-clock!
([]
(set-global-clock (ct/get-system-clock)))
(swap! current assoc :clock (Clock/systemDefaultZone)))
([clock]
(assert (ct/clock? clock) "expected valid clock instance")
(l/wrn :hint "altering clock" :clock (str clock))
(alter-var-root #'ct/*clock* (constantly clock))))
(when (instance? Clock clock)
(swap! current assoc :clock clock))))

View File

@@ -35,9 +35,6 @@
:assets-s3 :s3
nil)))
(def default-bucket
"file-media-object")
(def valid-buckets
#{"file-media-object"
"team-font-variant"

View File

@@ -25,7 +25,7 @@
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.storage :as sto]
[app.storage :as-alias sto]
[app.storage.impl :as impl]
[integrant.core :as ig]))
@@ -130,7 +130,7 @@
[{:keys [metadata]}]
(or (some-> metadata :bucket)
(some-> metadata :reference d/name)
sto/default-bucket))
"file-media-object"))
(defn- process-objects!
[conn has-refs? bucket objects]

View File

@@ -45,8 +45,7 @@
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :file
{:deleted-at deleted-at
:is-shared false}
{:deleted-at deleted-at}
{:id id}
{::db/return-keys false})
@@ -54,7 +53,7 @@
(not *team-deletion*))
;; NOTE: we don't prevent file deletion on absorb operation failure
(try
(db/tx-run! cfg files/absorb-library id)
(db/tx-run! cfg files/absorb-library! id)
(catch Throwable cause
(l/warn :hint "error on absorbing library"
:file-id id

View File

@@ -7,18 +7,10 @@
(ns app.util.template
(:require
[app.common.exceptions :as ex]
[cuerdas.core :as str]
[selmer.filters :as sf]
[selmer.parser :as sp]))
;; (sp/cache-off!)
(sf/add-filter! :abbreviate
(fn [s n]
(let [n (parse-long n)]
(str/abbreviate s n))))
(defn render
[path context]
(try

View File

@@ -137,34 +137,33 @@ RETURNING task.id, task.queue")
::wait)))
(run-batch []
(try
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(finally
(.close ^AutoCloseable rconn))))
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(catch InterruptedException cause
(throw cause))
(catch InterruptedException cause
(throw cause))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))))
(finally
(.close ^AutoCloseable rconn)))))
(dispatcher []
(l/inf :hint "started")
@@ -177,7 +176,7 @@ RETURNING task.id, task.queue")
(catch InterruptedException _
(l/trc :hint "interrupted"))
(catch Throwable cause
(l/err :hint "unexpected exception" :cause cause))
(l/err :hint " unexpected exception" :cause cause))
(finally
(l/inf :hint "terminated"))))]

View File

@@ -595,8 +595,8 @@
(px/exec! :virtual #(rcp/write-body-to-stream body nil output))
(into []
(map (fn [{:keys [event data]}]
(d/vec2 (keyword event)
(tr/decode-str data))))
[(keyword event)
(tr/decode-str data)]))
(parse-sse (slurp' input)))
(finally
(.close input)))))

View File

@@ -17,6 +17,7 @@
[app.db.sql :as sql]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -133,7 +134,7 @@
;; this will run pending task triggered by deleting user snapshot
(th/run-pending-tasks!)
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
;; delete 2 snapshots and 2 file data entries
(t/is (= 4 (:processed res)))))))))

View File

@@ -19,6 +19,7 @@
[app.http :as http]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -921,7 +922,7 @@
(t/is (= 0 (:processed result))))
;; run permanent deletion
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 3 (:processed result)))))
@@ -1874,7 +1875,7 @@
file-id (uuid/next)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [data {::th/type :create-file
::rpc/profile-id (:id prof)
:project-id proj-id
@@ -1920,11 +1921,7 @@
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [result (:result out)]
(t/is (fn? result))
(let [[ev1 ev2 :as events] (th/consume-sse result)]
(t/is (= 2 (count events)))
(t/is (= (:ids data) (val ev2)))))
(t/is (= (:ids data) result)))
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
(t/is (= (:deleted-at row) now)))))))
@@ -1936,7 +1933,7 @@
file-id (uuid/next)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [data {::th/type :create-file
::rpc/profile-id (:id prof)
:project-id proj-id
@@ -1999,7 +1996,7 @@
team-id (:default-team-id profile)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [project (th/create-project* 1 {:profile-id (:id profile)
:team-id team-id})
file (th/create-file* 1 {:profile-id (:id profile)

View File

@@ -85,7 +85,7 @@
(t/is (map? (:result out))))
;; run the task again
(let [res (binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 31}))]
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! "storage-gc-touched" {}))]
(t/is (= 2 (:freeze res))))
@@ -136,7 +136,7 @@
(t/is (some? (sto/get-object storage (:media-id row2))))
;; run the task again
(let [res (binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 31}))]
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! :storage-gc-touched {}))]
(t/is (= 1 (:delete res)))
(t/is (= 0 (:freeze res))))
@@ -147,7 +147,7 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
@@ -247,7 +247,7 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted result)))))

View File

@@ -12,6 +12,7 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -146,7 +147,7 @@
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed res))))
@@ -207,7 +208,7 @@
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))
@@ -267,7 +268,7 @@
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))

View File

@@ -12,6 +12,7 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[backend-tests.helpers :as th]
[clojure.test :as t]))
@@ -227,7 +228,7 @@
(t/is (= 0 (count result)))))
;; run permanent deletion
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed result)))))

View File

@@ -13,6 +13,7 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
@@ -525,7 +526,7 @@
(t/is (= :not-found (:type edata)))))
;; run permanent deletion
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed result)))))
@@ -582,7 +583,7 @@
(t/is (= 1 (count rows)))
(t/is (ct/inst? (:deleted-at (first rows)))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:days 8}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 7 (:processed result)))))))

View File

@@ -11,6 +11,7 @@
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -98,14 +99,14 @@
::sto/expired-at (ct/in-future {:hours 1})
:content-type "text/plain"})]
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 0}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 0}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
(t/is (= 2 (:count res))))
(binding [ct/*clock* (ct/fixed-clock (ct/in-future {:minutes 61}))]
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 61}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
@@ -330,22 +331,22 @@
:content-type "text/plain"})]
(binding [ct/*clock* (ct/fixed-clock now)]
(binding [ct/*clock* (clock/fixed now)]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res)))))
(binding [ct/*clock* (ct/fixed-clock (ct/plus now {:minutes 1}))]
(binding [ct/*clock* (clock/fixed (ct/plus now {:minutes 1}))]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 1 (:delete res)))))
(binding [ct/*clock* (ct/fixed-clock (ct/plus now {:hours 1}))]
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 1}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))
(binding [ct/*clock* (ct/fixed-clock (ct/plus now {:hours 2}))]
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 2}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))))

1145
backend/yarn.lock Normal file
View File

File diff suppressed because it is too large Load Diff

7
common/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

View File

@@ -29,7 +29,8 @@
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
integrant/integrant {:mvn/version "1.0.0"}
funcool/cuerdas {:mvn/version "2026.415"}
funcool/tubax {:mvn/version "2021.05.20-0"}
funcool/cuerdas {:mvn/version "2025.06.16-414"}
funcool/promesa
{:git/sha "46048fc0d4bf5466a2a4121f5d52aefa6337f2e8"
:git/url "https://github.com/funcool/promesa"}
@@ -59,7 +60,7 @@
thheller/shadow-cljs {:mvn/version "3.2.0"}
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
com.bhauman/rebel-readline {:mvn/version "RELEASE"}
criterium/criterium {:mvn/version "0.4.6"}
criterium/criterium {:mvn/version "RELEASE"}
mockery/mockery {:mvn/version "RELEASE"}}
:extra-paths ["test" "dev"]}

View File

@@ -4,7 +4,7 @@
"license": "MPL-2.0",
"author": "Kaleidos INC",
"private": true,
"packageManager": "pnpm@10.26.2+sha512.0e308ff2005fc7410366f154f625f6631ab2b16b1d2e70238444dd6ae9d630a8482d92a451144debc492416896ed16f7b114a86ec68b8404b2443869e68ffda6",
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
"type": "module",
"repository": {
"type": "git",
@@ -23,9 +23,9 @@
"fmt:clj:check": "cljfmt check --parallel=false src/ test/",
"fmt:clj": "cljfmt fix --parallel=true src/ test/",
"lint:clj": "clj-kondo --parallel=true --lint src/",
"lint": "pnpm run lint:clj",
"lint": "yarn run lint:clj",
"watch:test": "concurrently \"clojure -M:dev:shadow-cljs watch test\" \"nodemon -C -d 2 -w target/tests/ --exec 'node target/tests/test.js'\"",
"build:test": "clojure -M:dev:shadow-cljs compile test",
"test": "pnpm run build:test && node target/tests/test.js"
"test": "yarn run build:test && node target/tests/test.js"
}
}

489
common/pnpm-lock.yaml generated
View File

@@ -1,489 +0,0 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
date-fns:
specifier: ^4.1.0
version: 4.1.0
devDependencies:
concurrently:
specifier: ^9.1.2
version: 9.2.1
nodemon:
specifier: ^3.1.10
version: 3.1.11
source-map-support:
specifier: ^0.5.21
version: 0.5.21
ws:
specifier: ^8.18.2
version: 8.18.3
packages:
ansi-regex@5.0.1:
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
engines: {node: '>=8'}
ansi-styles@4.3.0:
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
engines: {node: '>=8'}
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
brace-expansion@1.1.12:
resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
buffer-from@1.1.2:
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
chalk@4.1.2:
resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
engines: {node: '>=10'}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
cliui@8.0.1:
resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==}
engines: {node: '>=12'}
color-convert@2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
engines: {node: '>=7.0.0'}
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
concat-map@0.0.1:
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
concurrently@9.2.1:
resolution: {integrity: sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng==}
engines: {node: '>=18'}
hasBin: true
date-fns@4.1.0:
resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==}
debug@4.4.3:
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
escalade@3.2.0:
resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
engines: {node: '>=6'}
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
has-flag@3.0.0:
resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
engines: {node: '>=4'}
has-flag@4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
ignore-by-default@1.0.1:
resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-fullwidth-code-point@3.0.0:
resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
engines: {node: '>=8'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
minimatch@3.1.2:
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
nodemon@3.1.11:
resolution: {integrity: sha512-is96t8F/1//UHAjNPHpbsNY46ELPpftGUoSVNXwUfMk/qdjSylYrWSu1XavVTBOn526kFiOR733ATgNBCQyH0g==}
engines: {node: '>=10'}
hasBin: true
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
pstree.remy@1.1.8:
resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
require-directory@2.1.1:
resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
engines: {node: '>=0.10.0'}
rxjs@7.8.2:
resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==}
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
shell-quote@1.8.3:
resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==}
engines: {node: '>= 0.4'}
simple-update-notifier@2.0.0:
resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==}
engines: {node: '>=10'}
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
source-map@0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
string-width@4.2.3:
resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
engines: {node: '>=8'}
strip-ansi@6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
supports-color@5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
supports-color@7.2.0:
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
engines: {node: '>=8'}
supports-color@8.1.1:
resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==}
engines: {node: '>=10'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
touch@3.1.1:
resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==}
hasBin: true
tree-kill@1.2.2:
resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==}
hasBin: true
tslib@2.8.1:
resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
undefsafe@2.0.5:
resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==}
wrap-ansi@7.0.0:
resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
engines: {node: '>=10'}
ws@8.18.3:
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
y18n@5.0.8:
resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
engines: {node: '>=10'}
yargs-parser@21.1.1:
resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==}
engines: {node: '>=12'}
yargs@17.7.2:
resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==}
engines: {node: '>=12'}
snapshots:
ansi-regex@5.0.1: {}
ansi-styles@4.3.0:
dependencies:
color-convert: 2.0.1
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
balanced-match@1.0.2: {}
binary-extensions@2.3.0: {}
brace-expansion@1.1.12:
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
braces@3.0.3:
dependencies:
fill-range: 7.1.1
buffer-from@1.1.2: {}
chalk@4.1.2:
dependencies:
ansi-styles: 4.3.0
supports-color: 7.2.0
chokidar@3.6.0:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
cliui@8.0.1:
dependencies:
string-width: 4.2.3
strip-ansi: 6.0.1
wrap-ansi: 7.0.0
color-convert@2.0.1:
dependencies:
color-name: 1.1.4
color-name@1.1.4: {}
concat-map@0.0.1: {}
concurrently@9.2.1:
dependencies:
chalk: 4.1.2
rxjs: 7.8.2
shell-quote: 1.8.3
supports-color: 8.1.1
tree-kill: 1.2.2
yargs: 17.7.2
date-fns@4.1.0: {}
debug@4.4.3(supports-color@5.5.0):
dependencies:
ms: 2.1.3
optionalDependencies:
supports-color: 5.5.0
emoji-regex@8.0.0: {}
escalade@3.2.0: {}
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
fsevents@2.3.3:
optional: true
get-caller-file@2.0.5: {}
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
has-flag@3.0.0: {}
has-flag@4.0.0: {}
ignore-by-default@1.0.1: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-extglob@2.1.1: {}
is-fullwidth-code-point@3.0.0: {}
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-number@7.0.0: {}
minimatch@3.1.2:
dependencies:
brace-expansion: 1.1.12
ms@2.1.3: {}
nodemon@3.1.11:
dependencies:
chokidar: 3.6.0
debug: 4.4.3(supports-color@5.5.0)
ignore-by-default: 1.0.1
minimatch: 3.1.2
pstree.remy: 1.1.8
semver: 7.7.3
simple-update-notifier: 2.0.0
supports-color: 5.5.0
touch: 3.1.1
undefsafe: 2.0.5
normalize-path@3.0.0: {}
picomatch@2.3.1: {}
pstree.remy@1.1.8: {}
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
require-directory@2.1.1: {}
rxjs@7.8.2:
dependencies:
tslib: 2.8.1
semver@7.7.3: {}
shell-quote@1.8.3: {}
simple-update-notifier@2.0.0:
dependencies:
semver: 7.7.3
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
source-map: 0.6.1
source-map@0.6.1: {}
string-width@4.2.3:
dependencies:
emoji-regex: 8.0.0
is-fullwidth-code-point: 3.0.0
strip-ansi: 6.0.1
strip-ansi@6.0.1:
dependencies:
ansi-regex: 5.0.1
supports-color@5.5.0:
dependencies:
has-flag: 3.0.0
supports-color@7.2.0:
dependencies:
has-flag: 4.0.0
supports-color@8.1.1:
dependencies:
has-flag: 4.0.0
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
touch@3.1.1: {}
tree-kill@1.2.2: {}
tslib@2.8.1: {}
undefsafe@2.0.5: {}
wrap-ansi@7.0.0:
dependencies:
ansi-styles: 4.3.0
string-width: 4.2.3
strip-ansi: 6.0.1
ws@8.18.3: {}
y18n@5.0.8: {}
yargs-parser@21.1.1: {}
yargs@17.7.2:
dependencies:
cliui: 8.0.1
escalade: 3.2.0
get-caller-file: 2.0.5
require-directory: 2.1.1
string-width: 4.2.3
y18n: 5.0.8
yargs-parser: 21.1.1

View File

@@ -3,5 +3,5 @@
set -ex
corepack enable;
corepack install;
pnpm install;
pnpm run test;
yarn install;
yarn run test;

View File

@@ -1024,26 +1024,6 @@
:clj
(sort comp-fn items))))
(defn obfuscate-string
"Obfuscates potentially sensitive values.
- One-arg arity:
* For strings shorter than 10 characters, all characters are replaced by `*`.
* For longer strings, the first 5 characters are preserved and the rest obfuscated.
- Two-arg arity accepts a boolean `full?` that, when true, replaces the whole value
by `*`, preserving only the length."
([v]
(obfuscate-string v false))
([v full?]
(let [s (str v)
n (count s)]
(cond
(zero? n) s
full? (apply str (repeat n "*"))
(< n 10) (apply str (repeat n "*"))
:else (str (subs s 0 5)
(apply str (repeat (- n 5) "*")))))))
(defn reorder
"Reorder a vector by moving one of their items from some position to some space between positions.
It clamps the position numbers to a valid range."

View File

@@ -10,7 +10,6 @@
(:refer-clojure :exclude [instance?])
(:require
#?(:clj [clojure.stacktrace :as strace])
[app.common.data :refer [obfuscate-string]]
[app.common.pprint :as pp]
[app.common.schema :as sm]
[clojure.core :as c]
@@ -20,10 +19,6 @@
(:import
clojure.lang.IPersistentMap)))
(def ^:private sensitive-fields
"Keys whose values must be obfuscated in validation explains."
#{:password :old-password :token :invitation-token})
#?(:clj (set! *warn-on-reflection* true))
(def ^:dynamic *data-length* 8)
@@ -115,25 +110,7 @@
(explain (:explain data) opts)
(contains? data ::sm/explain)
(let [exp (::sm/explain data)
sanitize-map (fn sanitize-map [m]
(reduce-kv
(fn [acc k v]
(let [k* (if (string? k) (keyword k) k)]
(cond
(contains? sensitive-fields k*)
(assoc acc k (if (map? v)
(sanitize-map v)
(obfuscate-string v true)))
(map? v) (assoc acc k (sanitize-map v))
:else (assoc acc k v))))
{}
m))
sanitize-explain (fn [exp]
(cond-> exp
(:value exp) (update :value sanitize-map)))]
(sm/humanize-explain (sanitize-explain exp) opts))))
(sm/humanize-explain (::sm/explain data) opts)))
#?(:clj
(defn format-throwable

View File

@@ -56,6 +56,7 @@
"text-editor/v2-html-paste"
"text-editor/v2"
"render-wasm/v1"
"graph-wasm/v1"
"variants/v1"})
;; A set of features enabled by default
@@ -79,7 +80,8 @@
"text-editor/v2-html-paste"
"text-editor/v2"
"tokens/numeric-input"
"render-wasm/v1"})
"render-wasm/v1"
"graph-wasm/v1"})
;; Features that are mainly backend only or there are a proper
;; fallback when frontend reports no support for it
@@ -128,6 +130,7 @@
:feature-text-editor-v2 "text-editor/v2"
:feature-text-editor-v2-html-paste "text-editor/v2-html-paste"
:feature-render-wasm "render-wasm/v1"
:feature-graph-wasm "graph-wasm/v1"
:feature-variants "variants/v1"
:feature-token-input "tokens/numeric-input"
nil))

View File

@@ -526,25 +526,20 @@
ids))
(defn clean-loops
"Clean a list of ids from circular references. Optimized fast-path for single selections."
"Clean a list of ids from circular references."
[objects ids]
(if (<= (count ids) 1)
;; For single selection, there can't be circularity; return as ordered-set.
(into (d/ordered-set) ids)
(let [ids-set (if (set? ids) ids (set ids))
parent-selected?
(fn [id]
;; Stop early as soon as we find any selected parent
(let [parents (get-parent-ids objects id)]
(some #(contains? ids-set %) parents)))
(let [parent-selected?
(fn [id]
(let [parents (get-parent-ids objects id)]
(some ids parents)))
add-element
(fn [result id]
(cond-> result
(not (parent-selected? id))
(conj id)))]
add-element
(fn [result id]
(cond-> result
(not (parent-selected? id))
(conj id)))]
(reduce add-element (d/ordered-set) ids))))
(reduce add-element (d/ordered-set) ids)))
(defn- indexed-shapes
"Retrieves a vector with the indexes for each element in the layer

View File

@@ -62,7 +62,6 @@
#{:audit-log
:audit-log-archive
:audit-log-gc
:audit-log-logger
:auto-file-snapshot
;; enables the `/api/doc` endpoint that lists all the rpc methods available.
:backend-api-doc
@@ -135,8 +134,6 @@
:subscriptions
:subscriptions-old
:inspect-styles
;; Enable performance logs in devconsole (disabled by default)
:perf-logs
;; Security layer middleware that filters request by fetch
;; metadata headers
@@ -172,7 +169,6 @@
:enable-component-thumbnails
:enable-render-wasm-dpr
:enable-token-color
:enable-token-shadow
:enable-inspect-styles
:enable-feature-fdata-objects-map])

View File

@@ -75,25 +75,20 @@
#?(:cljs
(defn ->clj
[o & {:keys [key-fn val-fn recursive] :or {key-fn read-kebab-key val-fn identity recursive true}}]
[o & {:keys [key-fn val-fn] :or {key-fn read-kebab-key val-fn identity}}]
(let [f (fn this-fn [x]
(let [x (val-fn x)]
(cond
(array? x)
(persistent!
(.reduce ^js/Array x
#(conj! %1 (if recursive
(this-fn %2)
%2))
#(conj! %1 (this-fn %2))
(transient [])))
(identical? (type x) js/Object)
(persistent!
(.reduce ^js/Array (js-keys x)
#(assoc! %1 (key-fn %2)
(if recursive
(this-fn (unchecked-get x %2))
(unchecked-get x %2)))
#(assoc! %1 (key-fn %2) (this-fn (unchecked-get x %2)))
(transient {})))
:else

View File

@@ -2497,7 +2497,7 @@
(-> changes
(cls/generate-delete-shapes
file page objects (d/ordered-set (:id shape))
{:allow-altering-copies true :ignore-children-fn ignore-swapped-fn :ignore-mask true :ignore-flows-for #{(:id shape)}}))
{:allow-altering-copies true :ignore-children-fn ignore-swapped-fn :ignore-mask true}))
[new-shape changes]
(-> changes
(generate-new-shape-for-swap shape file page libraries id-new-component index target-cell keep-props-values))]

View File

@@ -124,11 +124,9 @@
;; on the deletion process. It should receive a shape and
;; return a boolean
ignore-children-fn
ignore-mask
ignore-flows-for]
ignore-mask]
:or {ignore-children-fn (constantly false)
ignore-mask false
ignore-flows-for #{}}}]
ignore-mask false}}]
(let [objects (pcb/get-objects changes)
data (pcb/get-library-data changes)
page-id (pcb/get-page-id changes)
@@ -196,8 +194,7 @@
(->> (:flows page)
(reduce
(fn [changes [id flow]]
(if (and (id-to-delete? (:starting-frame flow))
(not (contains? ignore-flows-for (:starting-frame flow))))
(if (id-to-delete? (:starting-frame flow))
(-> changes
(pcb/with-page page)
(pcb/set-flow id nil))

View File

@@ -64,33 +64,8 @@
java.time.temporal.TemporalAmount
java.time.temporal.TemporalUnit)))
(declare inst)
#?(:clj (def ^:dynamic *clock* (Clock/systemDefaultZone)))
#?(:clj
(defn clock?
[o]
(instance? Clock o)))
#?(:clj
(defn get-system-clock
[]
(Clock/systemDefaultZone)))
#?(:clj
(defn offset-clock
[offset]
(Clock/offset ^Clock (Clock/systemDefaultZone) ^Duration offset)))
#?(:clj
(defn fixed-clock
[instant]
(Clock/fixed ^Instant (inst instant)
^ZoneId (ZoneId/of "Z"))))
(defn now
[]
#?(:clj (Instant/now *clock*)
@@ -365,7 +340,7 @@
(dfn-diff t2 t1)))
#?(:cljs
(defn set-default-locale
(defn set-default-locale!
[locale]
(when-let [locale (unchecked-get locales locale)]
(dfn-set-default-options #js {:locale locale}))))

View File

@@ -140,8 +140,7 @@
:layout-item-min-w
:layout-item-absolute
:layout-item-z-index
:layout-item-align-self
:interactions})
:layout-item-align-self})
(defn component-attr?
"Check if some attribute is one that is involved in component syncrhonization.

View File

@@ -269,8 +269,8 @@
"Remove flex children properties except the fit-content for flex layouts. These are properties
that we don't have to propagate to copies but will be respected when swapping components"
[shape]
(let [layout-item-h-sizing (when (and (ctl/any-layout? shape) (ctl/auto-width? shape)) :auto)
layout-item-v-sizing (when (and (ctl/any-layout? shape) (ctl/auto-height? shape)) :auto)]
(let [layout-item-h-sizing (when (and (ctl/flex-layout? shape) (ctl/auto-width? shape)) :auto)
layout-item-v-sizing (when (and (ctl/flex-layout? shape) (ctl/auto-height? shape)) :auto)]
(-> shape
(d/without-keys ctk/swap-keep-attrs)
(cond-> (some? layout-item-h-sizing)

View File

@@ -362,24 +362,24 @@
component (ctkl/get-component component-file (:component-id top-instance) true)
remote-shape (get-ref-shape component-file component shape)
component-container (get-component-container component-file component)
[remote-shape component-container component-file]
[remote-shape component-container]
(if (some? remote-shape)
[remote-shape component-container component-file]
[remote-shape component-container]
;; If not found, try the case of this being a fostered or swapped children
(let [head-instance (ctn/get-head-shape (:objects container) shape)
component-file (get-in libraries [(:component-file head-instance) :data])
head-component (ctkl/get-component component-file (:component-id head-instance) true)
remote-shape' (get-ref-shape component-file head-component shape)
component-container' (get-component-container component-file head-component)]
[remote-shape' component-container' component-file]))]
(let [head-instance (ctn/get-head-shape (:objects container) shape)
component-file (get-in libraries [(:component-file head-instance) :data])
head-component (ctkl/get-component component-file (:component-id head-instance) true)
remote-shape' (get-ref-shape component-file head-component shape)
component-container (get-component-container component-file component)]
[remote-shape' component-container]))]
(if (nil? remote-shape)
nil
(if (nil? (:shape-ref remote-shape))
(cond-> remote-shape
(and remote-shape with-context?)
(with-meta {:file {:id (:id component-file)
:data component-file}
(with-meta {:file {:id (:id file-data)
:data file-data}
:container component-container}))
(find-remote-shape component-container libraries remote-shape :with-context? with-context?)))))

View File

@@ -112,10 +112,8 @@
(:c2y params) (update-in [index :params :c2y] + (:c2y params)))
content))]
(if (some? modifiers)
(impl/path-data
(reduce apply-to-index (vec content) modifiers))
content)))
(impl/path-data
(reduce apply-to-index (vec content) modifiers))))
(defn transform-content
"Applies a transformation matrix over content and returns a new

View File

@@ -1,29 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.common.types.project
(:require
[app.common.schema :as sm]
[app.common.time :as cm]))
(def schema:project
[:map {:title "Profile"}
[:id ::sm/uuid]
[:created-at {:optional true} ::cm/inst]
[:modified-at {:optional true} ::cm/inst]
[:name :string]
[:is-default {:optional true} ::sm/boolean]
[:is-pinned {:optional true} ::sm/boolean]
[:count {:optional true} ::sm/int]
[:total-count {:optional true} ::sm/int]
[:team-id ::sm/uuid]])
(def valid-project?
(sm/lazy-validator schema:project))
(def check-project
(sm/check-fn schema:project))

View File

@@ -47,18 +47,6 @@
self-reference? (get token-references token-name)]
self-reference?))
(defn references-token?
"Recursively check if a value references the token name. Handles strings, maps, and sequences."
[value token-name]
(cond
(string? value)
(boolean (some #(= % token-name) (find-token-value-references value)))
(map? value)
(some true? (map #(references-token? % token-name) (vals value)))
(sequential? value)
(some true? (map #(references-token? % token-name) value))
:else false))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SCHEMA
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -71,7 +59,6 @@
:dimensions "dimension"
:font-family "fontFamilies"
:font-size "fontSizes"
:font-weight "fontWeights"
:letter-spacing "letterSpacing"
:number "number"
:opacity "opacity"
@@ -83,6 +70,7 @@
:stroke-width "borderWidth"
:text-case "textCase"
:text-decoration "textDecoration"
:font-weight "fontWeights"
:typography "typography"})
(def dtcg-token-type->token-type
@@ -474,8 +462,8 @@
:height #{:sizing :dimensions}
:max-width #{:sizing :dimensions}
:max-height #{:sizing :dimensions}
:x #{:dimensions}
:y #{:dimensions}
:x #{:spacing :dimensions}
:y #{:spacing :dimensions}
:rotation #{:number :rotation}
:border-radius #{:border-radius :dimensions}
:row-gap #{:spacing :dimensions}
@@ -487,8 +475,6 @@
:vertical-margin #{:spacing :dimensions}
:sided-margins #{:spacing :dimensions}
:line-height #{:line-height :number}
:opacity #{:opacity}
:stroke-width #{:stroke-width :dimensions}
:font-size #{:font-size}
:letter-spacing #{:letter-spacing}
:fill #{:color}
@@ -572,18 +558,3 @@
"Predicate if a shadow composite token is a reference value - a string pointing to another reference token."
[token-value]
(string? token-value))
(defn update-token-value-references
"Recursively update token references within a token value, supporting complex token values (maps, sequences, strings)."
[value old-name new-name]
(cond
(string? value)
(str/replace value
(re-pattern (str "\\{" (str/replace old-name "." "\\.") "\\}"))
(str "{" new-name "}"))
(map? value)
(d/update-vals value #(update-token-value-references % old-name new-name))
(sequential? value)
(mapv #(update-token-value-references % old-name new-name) value)
:else
value))

View File

@@ -909,8 +909,7 @@ Will return a value that matches this schema:
`:all` All of the nested sets are active
`:partial` Mixed active state of nested sets")
(get-tokens-in-active-sets [_] "set of set names that are active in the the active themes")
(get-all-tokens [_] "all tokens in the lib, as a sequence")
(get-all-tokens-map [_] "all tokens in the lib, as a map name -> token")
(get-all-tokens [_] "all tokens in the lib")
(get-tokens [_ set-id] "return a map of tokens in the set, indexed by token-name"))
(declare parse-multi-set-dtcg-json)
@@ -1307,10 +1306,6 @@ Will return a value that matches this schema:
tokens))
(get-all-tokens [this]
(mapcat #(vals (get-tokens- %))
(get-sets this)))
(get-all-tokens-map [this]
(reduce
(fn [tokens' set]
(into tokens' (map (fn [x] [(:name x) x]) (vals (get-tokens- set)))))
@@ -1415,8 +1410,8 @@ Will return a value that matches this schema:
;; NOTE: we can't assign statically at eval time the value of a
;; function that is declared but not defined; so we need to pass
;; an anonymous function and delegate the resolution to runtime
{:encode/json #(some-> % export-dtcg-json)
:decode/json #(some-> % read-multi-set-dtcg)
{:encode/json #(export-dtcg-json %)
:decode/json #(read-multi-set-dtcg %)
;; FIXME: add better, more reallistic generator
:gen/gen (->> (sg/small-int)
(sg/fmap (fn [_]
@@ -1550,7 +1545,7 @@ Will return a value that matches this schema:
(and (not (contains? decoded-json "$metadata"))
(not (contains? decoded-json "$themes"))))
(defn convert-dtcg-font-family
(defn- convert-dtcg-font-family
"Convert font-family token value from DTCG format to internal format.
- If value is a string, split it into a collection of font families
- If value is already an array, keep it as is
@@ -1561,7 +1556,7 @@ Will return a value that matches this schema:
(sequential? value) value
:else value))
(defn convert-dtcg-typography-composite
(defn- convert-dtcg-typography-composite
"Convert typography token value keys from DTCG format to internal format."
[value]
(if (map? value)
@@ -1573,7 +1568,7 @@ Will return a value that matches this schema:
;; Reference value
value))
(defn convert-dtcg-shadow-composite
(defn- convert-dtcg-shadow-composite
"Convert shadow token value from DTCG format to internal format."
[value]
(let [process-shadow (fn [shadow]

1291
common/yarn.lock Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -41,10 +41,7 @@ services:
- 6062:6062
- 6063:6063
- 6064:6064
- 9000:9000
- 9001:9001
- 9090:9090
- 9091:9091
environment:
- EXTERNAL_UID=${CURRENT_USER_ID}

View File

@@ -10,7 +10,3 @@ localhost:3449 {
http://localhost:3450 {
reverse_proxy localhost:4449
}
http://penpot-devenv-main:3450 {
reverse_proxy localhost:4449
}

View File

@@ -145,8 +145,8 @@ http {
proxy_pass http://127.0.0.1:3000/;
}
location /wasm-playground {
alias /home/penpot/penpot/frontend/resources/public/wasm-playground/;
location /playground {
alias /home/penpot/penpot/experiments/;
add_header Cache-Control "no-cache, max-age=0";
autoindex on;
}
@@ -223,7 +223,7 @@ http {
add_header X-Cache-Status $upstream_cache_status;
}
location ~* \.(jpg|png|svg|ttf|woff|woff2|gif)$ {
location ~* \.(jpg|png|svg|ttf|woff|woff2)$ {
add_header Cache-Control "public, max-age=604800" always; # 7 days
}

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
sudo chown penpot:users /home/penpot
cd ~;
source ~/.bashrc
set -e;
echo "[start-tmux.sh] Installing node dependencies"
pushd ~/penpot/exporter/
yarn install
popd
tmux -2 new-session -d -s penpot
tmux rename-window -t penpot:0 'exporter'
tmux select-window -t penpot:0
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot 'rm -f target/app.js*' enter C-l
tmux send-keys -t penpot 'clojure -M:dev:shadow-cljs watch main' enter
tmux split-window -v
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot './scripts/wait-and-start.sh' enter
tmux new-window -t penpot:1 -n 'backend'
tmux select-window -t penpot:1
tmux send-keys -t penpot 'cd penpot/backend' enter C-l
tmux send-keys -t penpot './scripts/start-dev' enter
tmux -2 attach-session -t penpot

View File

@@ -8,10 +8,14 @@ source ~/.bashrc
echo "[start-tmux.sh] Installing node dependencies"
pushd ~/penpot/frontend/
./scripts/setup;
corepack install;
yarn install;
yarn playwright install chromium
popd
pushd ~/penpot/exporter/
./scripts/setup;
corepack install;
yarn install
yarn playwright install chromium
popd
tmux -2 new-session -d -s penpot
@@ -19,25 +23,30 @@ tmux -2 new-session -d -s penpot
tmux rename-window -t penpot:0 'frontend watch'
tmux select-window -t penpot:0
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot './scripts/watch app' enter
tmux send-keys -t penpot 'yarn run watch' enter
tmux new-window -t penpot:1 -n 'frontend storybook'
tmux new-window -t penpot:1 -n 'frontend shadow'
tmux select-window -t penpot:1
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot './scripts/watch storybook' enter
tmux send-keys -t penpot 'yarn run watch:app' enter
tmux new-window -t penpot:2 -n 'exporter'
tmux new-window -t penpot:2 -n 'frontend storybook'
tmux select-window -t penpot:2
tmux send-keys -t penpot 'cd penpot/frontend' enter C-l
tmux send-keys -t penpot 'yarn run watch:storybook' enter
tmux new-window -t penpot:3 -n 'exporter'
tmux select-window -t penpot:3
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot 'rm -f target/app.js*' enter C-l
tmux send-keys -t penpot './scripts/watch' enter
tmux send-keys -t penpot 'yarn run watch' enter
tmux split-window -v
tmux send-keys -t penpot 'cd penpot/exporter' enter C-l
tmux send-keys -t penpot './scripts/wait-and-start.sh' enter
tmux new-window -t penpot:3 -n 'backend'
tmux select-window -t penpot:3
tmux new-window -t penpot:4 -n 'backend'
tmux select-window -t penpot:4
tmux send-keys -t penpot 'cd penpot/backend' enter C-l
tmux send-keys -t penpot './scripts/start-dev' enter

View File

@@ -112,6 +112,10 @@ COPY --from=penpotapp/imagemagick:7.1.2-0 /opt/imagick /opt/imagick
WORKDIR /opt/penpot/exporter
USER penpot:penpot
RUN ./setup
RUN set -ex; \
corepack install; \
yarn install; \
yarn run playwright install chromium; \
rm -rf /opt/penpot/.yarn
CMD ["node", "app.js"]

View File

@@ -130,6 +130,12 @@ services:
environment:
<< : [*penpot-flags, *penpot-public-uri, *penpot-http-body-size, *penpot-secret-key]
## The PREPL host. Mainly used for external programatic access to penpot backend
## (example: admin). By default it will listen on `localhost` but if you are going to use
## the `admin`, you will need to uncomment this and set the host to `0.0.0.0`.
# PENPOT_PREPL_HOST: 0.0.0.0
## Database connection parameters. Don't touch them unless you are using custom
## postgresql connection parameters.
@@ -145,16 +151,16 @@ services:
## Default configuration for assets storage: using filesystem based with all files
## stored in a docker volume.
PENPOT_OBJECTS_STORAGE_BACKEND: fs
PENPOT_OBJECTS_STORAGE_FS_DIRECTORY: /opt/data/assets
PENPOT_ASSETS_STORAGE_BACKEND: assets-fs
PENPOT_STORAGE_ASSETS_FS_DIRECTORY: /opt/data/assets
## Also can be configured to to use a S3 compatible storage.
# AWS_ACCESS_KEY_ID: <KEY_ID>
# AWS_SECRET_ACCESS_KEY: <ACCESS_KEY>
# PENPOT_OBJECTS_STORAGE_BACKEND: s3
# PENPOT_OBJECTS_STORAGE_S3_ENDPOINT: <ENDPOINT>
# PENPOT_OBJECTS_STORAGE_S3_BUCKET: <BUKET_NAME>
# PENPOT_ASSETS_STORAGE_BACKEND: assets-s3
# PENPOT_STORAGE_ASSETS_S3_ENDPOINT: <ENDPOINT>
# PENPOT_STORAGE_ASSETS_S3_BUCKET: <BUKET_NAME>
## Telemetry. When enabled, a periodical process will send anonymous data about this
## instance. Telemetry data will enable us to learn how the application is used,

View File

@@ -144,7 +144,7 @@ http {
location / {
include /etc/nginx/overrides/location.d/*.conf;
location ~* \.(js|css|jpg|png|svg|gif|ttf|woff|woff2|wasm|map)$ {
location ~* \.(js|css|jpg|png|svg|ttf|woff|woff2|wasm)$ {
add_header Cache-Control "public, max-age=604800" always; # 7 days
}
@@ -152,10 +152,8 @@ http {
return 301 " /404";
}
add_header X-Frame-Options SAMEORIGIN always;
add_header Cache-Control "no-store, no-cache, max-age=0" always;
try_files $uri /index.html$is_args$args /index.html =404;
}
}
}

View File

@@ -10,15 +10,16 @@ To view this site locally, first set up the environment:
# only if necessary
nvm install
nvm use
# only if necessary
corepack enable
pnpm install
yarn install
```
And launch a development server:
```sh
pnpm start
yarn start
```
You can then point a browser to [http://localhost:8080](http://localhost:8080).

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 161 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.0 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.8 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.2 KiB

View File

@@ -39,5 +39,5 @@
"markdown-it-anchor": "^9.0.1",
"markdown-it-plantuml": "^1.4.1"
},
"packageManager": "pnpm@10.28.0+sha512.05df71d1421f21399e053fde567cea34d446fa02c76571441bfc1c7956e98e363088982d940465fd34480d4d90a0668bc12362f8aa88000a64e83d0b0e47be48"
"packageManager": "yarn@4.3.1"
}

2065
docs/pnpm-lock.yaml generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,8 @@
#!/usr/bin/env bash
source ~/.bashrc
set -ex
corepack enable;
corepack install;
rm -rf ./_dist
pnpm install
pnpm run build
yarn
yarn run build

View File

@@ -114,7 +114,14 @@ configuration.
The callback has the following format:
```html
https://<your_domain>/api/auth/oidc/callback
https://<your_domain>/api/auth/oauth/<oauth_provider>/callback
```
You will need to change <your_domain> and <oauth_provider> according to your setup.
This is how it looks with Gitlab provider:
```html
https://<your_domain>/api/auth/oauth/gitlab/callback
```
#### Google

Some files were not shown because too many files have changed in this diff Show More