Compare commits

..

5 Commits

Author SHA1 Message Date
Eva Marco
155683dbe0 WIP 2025-11-06 09:43:03 +01:00
Eva Marco
3e4b0124c5 ♻️ Remove unneeded code 2025-11-06 09:43:03 +01:00
Eva Marco
f22633d18b 🚧 Add dropdown to component 2025-11-06 09:43:01 +01:00
Eva Marco
942149ae87 🚧 Wip 2025-11-06 09:40:38 +01:00
Eva Marco
ed379013ef 🎉 Basic structure 2025-11-06 09:40:38 +01:00
694 changed files with 50138 additions and 73677 deletions

351
.circleci/config.yml Normal file
View File

@@ -0,0 +1,351 @@
version: 2.1
jobs:
lint:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
steps:
- checkout
- run:
name: "fmt check"
working_directory: "."
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "lint clj common"
working_directory: "."
command: |
yarn run lint:clj:common
- run:
name: "lint clj frontend"
working_directory: "."
command: |
yarn run lint:clj:frontend
- run:
name: "lint clj backend"
working_directory: "."
command: |
yarn run lint:clj:backend
- run:
name: "lint clj exporter"
working_directory: "."
command: |
yarn run lint:clj:exporter
- run:
name: "lint clj library"
working_directory: "."
command: |
yarn run lint:clj:library
test-common:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
- run:
name: "JVM tests"
working_directory: "./common"
command: |
clojure -M:dev:test
- run:
name: "NODE tests"
working_directory: "./common"
command: |
yarn install
yarn run test
- save_cache:
paths:
- ~/.m2
- ~/.yarn
- ~/.gitlibs
- ~/.cache/ms-playwright
key: v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
test-frontend:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: "install dependencies"
working_directory: "./frontend"
# We install playwright here because the dependent tasks
# uses the same cache as this task so we prepopulate it
command: |
yarn install
yarn run playwright install chromium
- run:
name: "lint scss on frontend"
working_directory: "./frontend"
command: |
yarn run lint:scss
- run:
name: "unit tests"
working_directory: "./frontend"
command: |
yarn run test
- save_cache:
paths:
- ~/.m2
- ~/.yarn
- ~/.gitlibs
- ~/.cache/ms-playwright
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
test-library:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx6g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: Install dependencies and build
working_directory: "./library"
command: |
yarn install
- run:
name: Build and Test
working_directory: "./library"
command: |
./scripts/build
yarn run test
test-components:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx6g -Xms2g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
- run:
name: Install dependencies
working_directory: "./frontend"
command: |
yarn install
yarn run playwright install chromium
- run:
name: Build Storybook
working_directory: "./frontend"
command: yarn run build:storybook
- run:
name: Serve Storybook and run tests
working_directory: "./frontend"
command: |
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
"npx http-server storybook-static --port 6006 --silent" \
"npx wait-on tcp:6006 && yarn test:storybook"
test-integration:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: large
environment:
JAVA_OPTS: -Xmx6g -Xms2g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
# Build frontend
- run:
name: "frontend build"
working_directory: "./frontend"
command: |
yarn install
yarn run build:app:assets
yarn run build:app
yarn run build:app:libs
# Build the wasm bundle
- run:
name: "wasm build"
working_directory: "./render-wasm"
command: |
EMSDK_QUIET=1 . /opt/emsdk/emsdk_env.sh
./build release
# Run integration tests
- run:
name: "integration tests"
working_directory: "./frontend"
command: |
yarn run playwright install chromium
yarn run test:e2e -x --workers=4
test-backend:
docker:
- image: penpotapp/devenv:latest
- image: cimg/postgres:14.5
environment:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
- image: cimg/redis:7.0.5
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
- restore_cache:
keys:
- v1-dependencies-{{ checksum "backend/deps.edn" }}
- run:
name: "tests"
working_directory: "./backend"
command: |
clojure -M:dev:test --reporter kaocha.report/documentation
environment:
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
- save_cache:
paths:
- ~/.m2
- ~/.gitlibs
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
test-render-wasm:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
steps:
- checkout
- run:
name: "fmt check"
working_directory: "./render-wasm"
command: |
cargo fmt --check
- run:
name: "lint"
working_directory: "./render-wasm"
command: |
./lint
- run:
name: "cargo tests"
working_directory: "./render-wasm"
command: |
./test
workflows:
penpot:
jobs:
- test-frontend:
requires:
- lint: success
- test-library:
requires:
- lint: success
- test-components:
requires:
- lint: success
- test-backend:
requires:
- lint: success
- test-common:
requires:
- lint: success
- lint
- test-integration
- test-render-wasm

View File

@@ -1,21 +0,0 @@
name: _NITRATE MODULE
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "nitrate-module"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: "nitrate-module"

View File

@@ -11,7 +11,7 @@ jobs:
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}
build_wasm: "yes"
build_wasm: "no"
build_storybook: "yes"
build-docker:
@@ -21,22 +21,6 @@ jobs:
with:
gh_ref: ${{ github.ref_name }}
notify:
name: Notifications
runs-on: ubuntu-24.04
needs: build-docker
steps:
- name: Notify Mattermost
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
🐳 *[PENPOT] Docker image available.*
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra
publish-final-tag:
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
needs: build-docker

View File

@@ -37,43 +37,36 @@ jobs:
ref: ${{ steps.vars.outputs.gh_ref }}
# --- Publicly release the docker images ---
- name: Configure ECR credentials
uses: aws-actions/configure-aws-credentials@v4
- name: Login to private registry
uses: docker/login-action@v3
with:
aws-access-key-id: ${{ secrets.DOCKER_USERNAME }}
aws-secret-access-key: ${{ secrets.DOCKER_PASSWORD }}
aws-region: ${{ secrets.AWS_REGION }}
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Install Skopeo
run: |
sudo apt-get update -y
sudo apt-get install -y skopeo
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.PUB_DOCKER_USERNAME }}
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
- name: Copy images from AWS ECR to Docker Hub
- name: Publish docker images to DockerHub
env:
AWS_REGION: ${{ secrets.AWS_REGION }}
DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
PUB_DOCKER_USERNAME: ${{ secrets.PUB_DOCKER_USERNAME }}
PUB_DOCKER_PASSWORD: ${{ secrets.PUB_DOCKER_PASSWORD }}
TAG: ${{ steps.vars.outputs.gh_ref }}
REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
HUB: ${{ secrets.PUB_DOCKER_HUB }}
run: |
aws ecr get-login-password --region $AWS_REGION | \
skopeo login --username AWS --password-stdin \
$DOCKER_REGISTRY
echo "$PUB_DOCKER_PASSWORD" | skopeo login --username "$PUB_DOCKER_USERNAME" --password-stdin docker.io
IMAGES=("frontend" "backend" "exporter" "storybook")
IMAGES=("frontend" "backend" "exporter")
EXTRA_TAGS=("main" "latest")
for image in "${IMAGES[@]}"; do
skopeo copy --all \
docker://$DOCKER_REGISTRY/$image:$TAG \
docker://docker.io/penpotapp/$image:$TAG
docker pull "$REGISTRY/$image:$TAG"
docker tag "$REGISTRY/$image:$TAG" "penpotapp/$image:$TAG"
docker push "penpotapp/$image:$TAG"
for alias in main latest; do
skopeo copy --all \
docker://$DOCKER_REGISTRY/$image:$TAG \
docker://docker.io/penpotapp/$image:$alias
for tag in "${EXTRA_TAGS[@]}"; do
docker tag "$REGISTRY/$image:$TAG" "penpotapp/$image:$tag"
docker push "penpotapp/$image:$tag"
done
done

View File

@@ -1,289 +0,0 @@
name: "CI"
defaults:
run:
shell: bash
on:
pull_request:
types:
- opened
- synchronize
push:
branches:
- develop
- staging
concurrency:
group: ${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
lint:
name: "Linter"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Check clojure code format
run: |
./scripts/lint
test-common:
name: "Common Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests on JVM
working-directory: ./common
run: |
clojure -M:dev:test
- name: Run tests on NODE
working-directory: ./common
run: |
./scripts/test
test-frontend:
name: "Frontend Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Unit Tests
working-directory: ./frontend
run: |
./scripts/test
- name: Component Tests
working-directory: ./frontend
run: |
./scripts/test-components
test-render-wasm:
name: "Render WASM Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Format
working-directory: ./render-wasm
run: |
cargo fmt --check
- name: Lint
working-directory: ./render-wasm
run: |
./lint
- name: Test
working-directory: ./render-wasm
run: |
./test
test-backend:
name: "Backend Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
services:
postgres:
image: postgres:17
# Provide the password for postgres
env:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis:
image: valkey/valkey:9
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests
working-directory: ./backend
env:
PENPOT_TEST_DATABASE_URI: "postgresql://postgres/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://redis/1"
run: |
clojure -M:dev:test --reporter kaocha.report/documentation
test-library:
name: "Library Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests
working-directory: ./library
run: |
./scripts/test
build-integration:
name: "Build Integration Bundle"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Build Bundle
working-directory: ./frontend
run: |
./scripts/build 0.0.0
- name: Store Bundle Cache
uses: actions/cache@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
test-integration-1:
name: "Integration Tests 1/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="1/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-1
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-2:
name: "Integration Tests 2/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="2/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-2
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-3:
name: "Integration Tests 3/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="3/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-3
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-4:
name: "Integration Tests 4/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="4/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-4
path: frontend/test-results/
overwrite: true
retention-days: 3

1
.gitignore vendored
View File

@@ -80,4 +80,3 @@ node_modules
/playwright/.cache/
/render-wasm/target/
/**/.yarn/*
/.pnpm-store

View File

@@ -1,102 +1,15 @@
# CHANGELOG
## 2.13.0 (Unreleased)
### :boom: Breaking changes & Deprecations
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
- Fix mask issues with component swap (by @dfelinto) [Github #7675](https://github.com/penpot/penpot/issues/7675)
### :sparkles: New features & Enhancements
- Add new Box Shadow Tokens [Taiga #10201](https://tree.taiga.io/project/penpot/us/10201)
- Make i18n translation files load on-demand [Taiga #11474](https://tree.taiga.io/project/penpot/us/11474)
- Add deleted files to dashboard [Taiga #8149](https://tree.taiga.io/project/penpot/us/8149)
### :bug: Bugs fixed
- Fix problem when drag+duplicate a full grid [Taiga #12565](https://tree.taiga.io/project/penpot/issue/12565)
- Fix problem when pasting elements in reverse flex layout [Taiga #12460](https://tree.taiga.io/project/penpot/issue/12460)
- Fix wrong board size presets in Android [Taiga #12339](https://tree.taiga.io/project/penpot/issue/12339)
## 2.12.0 (Unreleased)
### :boom: Breaking changes & Deprecations
#### Backend RPC API changes
The backend RPC API URLS are changed from `/api/rpc/command/<name>` to
`/api/main/methods/<name>`. The previous PATH is preserved for backward
compatibility; however, if you are a user of this API, it is strongly
recommended that you adapt your code to use the new PATH.
#### Updated SSO Callback URL
The OAuth / Single Sign-On (SSO) callback endpoint has changed to
align with the new OpenID Connect (OIDC) implementation.
Old callback URL:
```
https://<your_domain>/api/auth/oauth/<oauth_provider>/callback
```
New callback URL:
```
https://<your_domain>/api/auth/oidc/callback
```
**Action required:**
If you have SSO/Social-Auth configured on your on-premise instance,
the following actions are required before update:
Update your OAuth or SSO provider configuration (e.g., Okta, Google,
Azure AD, etc.) to use the new callback URL. Failure to update may
result in authentication failures after upgrading.
**Reason for change:**
This update standardizes all authentication flows under the single URL
and makis it more modular, enabling the ability to configure SSO auth
provider dinamically.
#### Changes on default docker compose
We have updated the `docker/images/docker-compose.yaml` with a small
change related to the `PENPOT_SECRET_KEY`. Since this version, this
environment variable is also required on exporter. So if you are using
penpot on-premise you will need to apply the same changes on your own
`docker-compose.yaml` file.
We have removed the Minio server from the `docker/images/docker-compose.yml`
example. It's still usable as before, we just removed the example.
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
- Ensure consistent snap behavior across all zoom levels [Github #7774](https://github.com/penpot/penpot/pull/7774) by [@Tokytome](https://github.com/Tokytome)
- Fix crash in token grid view due to tooltip validation (by @dfelinto) [Github #7887](https://github.com/penpot/penpot/pull/7887)
- Enable Hindi translations on the application
### :sparkles: New features & Enhancements
- Add the ability to select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
- Add toggle for switching boolean property values [Taiga #12341](https://tree.taiga.io/project/penpot/us/12341)
- Make the file export process more reliable [Taiga #12555](https://tree.taiga.io/project/penpot/us/12555)
- Add auth flow changes [Taiga #12333](https://tree.taiga.io/project/penpot/us/12333)
- Add new shape validation mechanism for shapes [Github #7696](https://github.com/penpot/penpot/pull/7696)
- Apply color tokens from sidebar [Taiga #11353](https://tree.taiga.io/project/penpot/us/11353)
- Display tokens in the inspect tab [Taiga #9313](https://tree.taiga.io/project/penpot/us/9313)
- Refactor clipboard behavior to assess some minor inconsistencies and make pasting binary data faster. [Taiga #12571](https://tree.taiga.io/project/penpot/task/12571)
- Select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
### :bug: Bugs fixed
@@ -110,22 +23,8 @@ example. It's still usable as before, we just removed the example.
- Fix search shortcut [Taiga #10265](https://tree.taiga.io/project/penpot/issue/10265)
- Fix shortcut conflict in text editor (increase/decrease font size vs word selection)
- Fix problem with plugins generating code for pages different than current one [Taiga #12312](https://tree.taiga.io/project/penpot/issue/12312)
- Fix input confirmation behavior is not uniform [Taiga #12294](https://tree.taiga.io/project/penpot/issue/12294)
- Fix copy/pasting application/transit+json [Taiga #12721](https://tree.taiga.io/project/penpot/issue/12721)
- Fix problem with plugins content attribute [Plugins #209](https://github.com/penpot/penpot-plugins/issues/209)
- Fix U and E icon displayed in project list [Taiga #12806](https://tree.taiga.io/project/penpot/issue/12806)
- Fix unpublish library modal not scrolling a long file list [Taiga #12285](https://tree.taiga.io/project/penpot/issue/12285)
- Fix incorrect interaction betwen hower and scroll on assets sidebar [Taiga #12389](https://tree.taiga.io/project/penpot/issue/12389)
- Fix switch variants with paths [Taiga #12841](https://tree.taiga.io/project/penpot/issue/12841)
- Fix referencing typography tokens on font-family tokens [Taiga #12492](https://tree.taiga.io/project/penpot/issue/12492)
- Fix horizontal scroll on layer panel [Taiga #12843](https://tree.taiga.io/project/penpot/issue/12843)
- Fix unicode handling on email template abbreviation filter [Github #7966](https://github.com/penpot/penpot/pull/7966)
## 2.11.1
- Fix WEBP shape export on docker images [Taiga #3838](https://tree.taiga.io/project/penpot/issue/3838)
## 2.11.0
## 2.11.0 (Unreleased)
### :boom: Breaking changes & Deprecations
@@ -153,6 +52,10 @@ example. It's still usable as before, we just removed the example.
services which use netty internally (redis connection, S3 SDK client). This
configuration is not very commonly used so don't expected real impact on any user.
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- New composite token: Typography [Taiga #10200](https://tree.taiga.io/project/penpot/us/10200)
@@ -197,9 +100,6 @@ example. It's still usable as before, we just removed the example.
- Fix options button does not work for comments created in the lower part of the screen [Taiga #12422](https://tree.taiga.io/project/penpot/issue/12422)
- Fix problem when checking usage with removed teams [Taiga #12442](https://tree.taiga.io/project/penpot/issue/12442)
- Fix focus mode persisting across page/file navigation [Taiga #12469](https://tree.taiga.io/project/penpot/issue/12469)
- Fix shadow color validation [Github #7705](https://github.com/penpot/penpot/pull/7705)
- Fix exception on selection blend-mode using keyboard [Github #7710](https://github.com/penpot/penpot/pull/7710)
- Fix crash when using decimal (floating-point) values for X/Y or width/height [Taiga #12543](https://tree.taiga.io/project/penpot/issue/12543)
## 2.10.1

View File

@@ -27,7 +27,6 @@
[app.common.transit :as t]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
[app.main :as main]

View File

@@ -1,8 +1,7 @@
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
Subject: {{feedback-subject}}
Type: {{feedback-type}}
{% if feedback-error-href %}
{%- if feedback-error-href %}
HREF: {{feedback-error-href}}
{% endif -%}

View File

@@ -240,4 +240,4 @@
</div>
</body>
</html>
</html>

View File

@@ -3,7 +3,7 @@
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"}
{:id "penpot-design-system"
:name "Penpot Design System | Pencil"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Pencil-Penpot-Design-System.penpot"}
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/penpot-app.penpot"}
{:id "wireframing-kit"
:name "Wireframe library"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}

View File

@@ -4,7 +4,7 @@
<meta charset="utf-8" />
<meta name="robots" content="noindex,nofollow">
<meta http-equiv="x-ua-compatible" content="ie=edge" />
<title>{{label|upper}} API Documentation</title>
<title>Builtin API Documentation - Penpot</title>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
@@ -19,7 +19,7 @@
<body>
<main>
<header>
<h1>{{label|upper}}: API Documentation (v{{version}})</h1>
<h1>Penpot API Documentation (v{{version}})</h1>
<small class="menu">
[
<nav>
@@ -31,10 +31,9 @@
</header>
<section class="doc-content">
<h2>INTRODUCTION</h2>
<p>This documentation is intended to be a general overview of
the {{label}} API. If you prefer, you can
use <a href="{{openapi}}">Swagger/OpenAPI</a> as
alternative.</p>
<p>This documentation is intended to be a general overview of the penpot RPC API.
If you prefer, you can use <a href="/api/openapi.json">OpenAPI</a>
and/or <a href="/api/openapi">SwaggerUI</a> as alternative.</p>
<h2>GENERAL NOTES</h2>
@@ -44,7 +43,7 @@
that starts with <b>get-</b> in the name, can use GET HTTP
method which in many cases benefits from the HTTP cache.</p>
{% block auth-section %}
<h3>Authentication</h3>
<p>The penpot backend right now offers two way for authenticate the request:
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
@@ -57,10 +56,9 @@
<p>The access token can be obtained on the appropriate section on profile settings
and it should be provided using <b>`Authorization`</b> header with <b>`Token
&lt;token-string&gt;`</b> value.</p>
{% endblock %}
<h3>Content Negotiation</h3>
<p>This API operates indistinctly with: <b>`application/json`</b>
<p>The penpot API by default operates indistinctly with: <b>`application/json`</b>
and <b>`application/transit+json`</b> content types. You should specify the
desired content-type on the <b>`Accept`</b> header, the transit encoding is used
by default.</p>
@@ -77,16 +75,13 @@
standard <a href="https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API">Fetch
API</a></p>
{% block limits-section %}
<h3>Limits</h3>
<p>The rate limit work per user basis (this means that different api keys share
the same rate limit). For now the limits are not documented because we are
studying and analyzing the data. As a general rule, it should not be abused, if an
abusive use is detected, we will proceed to block the user's access to the
API.</p>
{% endblock %}
{% block webhooks-section %}
<h3>Webhooks</h3>
<p>All methods that emit webhook events are marked with flag <b>WEBHOOK</b>, the
data structure defined on each method represents the <i>payload</i> of the
@@ -102,11 +97,9 @@
"profileId": "db601c95-045f-808b-8002-361312e63531"
}
</pre>
{% endblock %}
</section>
<section class="rpc-doc-content">
<h2>METHODS REFERENCE:</h2>
<h2>RPC METHODS REFERENCE:</h2>
<ul class="rpc-items">
{% for item in methods %}
{% include "app/templates/api-doc-entry.tmpl" with item=item %}

View File

@@ -1 +0,0 @@
{% extends "app/templates/api-doc.tmpl" %}

View File

@@ -1,10 +0,0 @@
{% extends "app/templates/api-doc.tmpl" %}
{% block auth-section %}
{% endblock %}
{% block limits-section %}
{% endblock %}
{% block webhooks-section %}
{% endblock %}

View File

@@ -7,7 +7,7 @@
name="description"
content="SwaggerUI"
/>
<title>{{label|upper}} API</title>
<title>PENPOT Swagger UI</title>
<style>{{swagger-css|safe}}</style>
</head>
<body>
@@ -16,7 +16,7 @@
<script>
window.onload = () => {
window.ui = SwaggerUIBundle({
url: '{{uri}}',
url: '{{public-uri}}/api/openapi.json',
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,

View File

@@ -25,7 +25,8 @@
<Logger name="app.storage.tmp" level="info" />
<Logger name="app.worker" level="trace" />
<Logger name="app.msgbus" level="info" />
<Logger name="app.http" level="info" />
<Logger name="app.http.websocket" level="info" />
<Logger name="app.http.sse" level="info" />
<Logger name="app.util.websocket" level="info" />
<Logger name="app.redis" level="info" />
<Logger name="app.rpc.rlimit" level="info" />

View File

@@ -25,7 +25,8 @@
<Logger name="app.storage.tmp" level="info" />
<Logger name="app.worker" level="trace" />
<Logger name="app.msgbus" level="info" />
<Logger name="app.http" level="info" />
<Logger name="app.http.websocket" level="info" />
<Logger name="app.http.sse" level="info" />
<Logger name="app.util.websocket" level="info" />
<Logger name="app.redis" level="info" />
<Logger name="app.rpc.rlimit" level="info" />

View File

@@ -1,18 +1,18 @@
#!/usr/bin/env bash
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
export PENPOT_MANAGEMENT_API_SHARED_KEY=super-secret-management-api-key
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_PUBLIC_URI=https://localhost:3449
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-login-with-ldap \
enable-login-with-password
disable-login-with-ldap \
disable-login-with-oidc \
disable-login-with-google \
disable-login-with-github \
disable-login-with-gitlab \
enable-login-with-oidc \
enable-login-with-google \
enable-login-with-github \
enable-login-with-gitlab \
enable-backend-worker \
enable-backend-asserts \
disable-feature-fdata-pointer-map \

View File

File diff suppressed because it is too large Load Diff

View File

@@ -331,81 +331,6 @@
(set/difference cfeat/backend-only-features))
#{}))))
(defn check-file-exists
[cfg id & {:keys [include-deleted?]
:or {include-deleted? false}
:as options}]
(db/get-with-sql cfg [sql:get-minimal-file id]
{:db/remove-deleted (not include-deleted?)}))
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?")
(defn- get-file-permissions*
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-file-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions* conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-file-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(defn get-project
[cfg project-id]
(db/get cfg :project {:id project-id}))

View File

@@ -255,8 +255,6 @@
(write-entry! output path params)
(events/tap :progress {:section :storage-object :id id})
(with-open [input (sto/get-object-data storage sobject)]
(.putNextEntry ^ZipOutputStream output (ZipEntry. (str "objects/" id ext)))
(io/copy input output :size (:size sobject))
@@ -281,8 +279,6 @@
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
(events/tap :progress {:section :file :id file-id})
(vswap! bfc/*state* update :files assoc file-id
{:id file-id
:name (:name file)
@@ -821,10 +817,9 @@
entries (keep (match-storage-entry-fn) entries)]
(doseq [{:keys [id entry]} entries]
(let [object (-> (read-entry input entry)
(decode-storage-object)
(update :bucket d/nilv sto/default-bucket)
(validate-storage-object))
(let [object (->> (read-entry input entry)
(decode-storage-object)
(validate-storage-object))
ext (cmedia/mtype->extension (:content-type object))
path (str "objects/" id ext)

View File

@@ -5,6 +5,7 @@
;; Copyright (c) KALEIDOS INC
(ns app.config
"A configuration management."
(:refer-clojure :exclude [get])
(:require
[app.common.data :as d]
@@ -46,7 +47,6 @@
:auto-file-snapshot-timeout "3h"
:public-uri "http://localhost:3449"
:host "localhost"
:tenant "default"
@@ -57,8 +57,6 @@
:objects-storage-backend "fs"
:objects-storage-fs-directory "assets"
:auth-token-cookie-name "auth-token"
:assets-path "/internal/assets/"
:smtp-default-reply-to "Penpot <no-reply@example.com>"
:smtp-default-from "Penpot <no-reply@example.com>"
@@ -92,7 +90,7 @@
[:secret-key {:optional true} :string]
[:tenant {:optional false} :string]
[:public-uri {:optional false} ::sm/uri]
[:public-uri {:optional false} :string]
[:host {:optional false} :string]
[:http-server-port {:optional true} ::sm/int]
@@ -102,7 +100,7 @@
[:http-server-io-threads {:optional true} ::sm/int]
[:http-server-max-worker-threads {:optional true} ::sm/int]
[:management-api-key {:optional true} :string]
[:management-api-shared-key {:optional true} :string]
[:telemetry-uri {:optional true} :string]
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
@@ -167,7 +165,7 @@
[:google-client-id {:optional true} :string]
[:google-client-secret {:optional true} :string]
[:oidc-client-id {:optional true} :string]
[:oidc-user-info-source {:optional true} [:enum "auto" "userinfo" "token"]]
[:oidc-user-info-source {:optional true} :keyword]
[:oidc-client-secret {:optional true} :string]
[:oidc-base-uri {:optional true} :string]
[:oidc-token-uri {:optional true} :string]

View File

@@ -106,17 +106,17 @@
(let [content-part (MimeBodyPart.)
alternative-mpart (MimeMultipart. "alternative")]
(when-let [content (get body "text/plain")]
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String content ^String charset)
(.addBodyPart alternative-mpart text-part)))
(when-let [content (get body "text/html")]
(let [html-part (MimeBodyPart.)]
(.setContent html-part ^String content
(str "text/html; charset=" charset))
(.addBodyPart alternative-mpart html-part)))
(when-let [content (get body "text/plain")]
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String content ^String charset)
(.addBodyPart alternative-mpart text-part)))
(.setContent content-part alternative-mpart)
(.addBodyPart mixed-mpart content-part))

View File

@@ -25,6 +25,7 @@
[app.main :as-alias main]
[app.metrics :as mtx]
[app.rpc :as-alias rpc]
[app.rpc.doc :as-alias rpc.doc]
[app.setup :as-alias setup]
[integrant.core :as ig]
[reitit.core :as r]
@@ -148,6 +149,7 @@
[:map
[::ws/routes schema:routes]
[::rpc/routes schema:routes]
[::rpc.doc/routes schema:routes]
[::oidc/routes schema:routes]
[::assets/routes schema:routes]
[::debug/routes schema:routes]
@@ -169,9 +171,8 @@
[sec/sec-fetch-metadata]
[mw/params]
[mw/format-response]
[mw/auth {:bearer (partial session/decode-token cfg)
:cookie (partial session/decode-token cfg)
:token (partial actoken/decode-token cfg)}]
[session/soft-auth cfg]
[actoken/soft-auth cfg]
[mw/parse-request]
[mw/errors errors/handle]
[mw/restrict-methods]]}
@@ -187,5 +188,9 @@
(::mgmt/routes cfg)]
(::ws/routes cfg)
(::oidc/routes cfg)
(::rpc/routes cfg)]]))
["/api" {:middleware [[mw/cors]
[sec/client-header-check]]}
(::oidc/routes cfg)
(::rpc.doc/routes cfg)
(::rpc/routes cfg)]]]))

View File

@@ -9,19 +9,23 @@
[app.common.logging :as l]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]))
[app.tokens :as tokens]
[yetti.request :as yreq]))
(defn decode-token
(def header-re #"(?i)^Token\s+(.*)")
(defn get-token
[request]
(some->> (yreq/get-header request "authorization")
(re-matches header-re)
(second)))
(defn- decode-token
[cfg token]
(try
(tokens/verify cfg {:token token :iss "access-token"})
(catch Throwable cause
(l/trc :hint "exception on decoding token"
:token token
:cause cause))))
(when token
(tokens/verify cfg {:token token :iss "access-token"})))
(def sql:get-token-data
"SELECT perms, profile_id, expires_at
@@ -31,28 +35,47 @@
OR (expires_at > now()));")
(defn- get-token-data
[pool claims]
[pool token-id]
(when-not (db/read-only? pool)
(when-let [token-id (get claims :tid)]
(some-> (db/exec-one! pool [sql:get-token-data token-id])
(update :perms db/decode-pgarray #{})))))
(some-> (db/exec-one! pool [sql:get-token-data token-id])
(update :perms db/decode-pgarray #{}))))
(defn- wrap-soft-auth
"Soft Authentication, will be executed synchronously on the undertow
worker thread."
[handler cfg]
(letfn [(handle-request [request]
(try
(let [token (get-token request)
claims (decode-token cfg token)]
(cond-> request
(map? claims)
(assoc ::id (:tid claims))))
(catch Throwable cause
(l/trace :hint "exception on decoding malformed token" :cause cause)
request)))]
(fn [request]
(handler (handle-request request)))))
(defn- wrap-authz
"Authorization middleware, will be executed synchronously on vthread."
[handler {:keys [::db/pool]}]
(fn [request]
(let [{:keys [type claims]} (get request ::http/auth-data)]
(if (= :token type)
(let [{:keys [perms profile-id expires-at]} (some->> claims (get-token-data pool))]
;; FIXME: revisit this, this data looks unused
(handler (cond-> request
(some? perms)
(assoc ::perms perms)
(some? profile-id)
(assoc ::profile-id profile-id)
(some? expires-at)
(assoc ::expires-at expires-at))))
(let [{:keys [perms profile-id expires-at]} (some->> (::id request) (get-token-data pool))]
(handler (cond-> request
(some? perms)
(assoc ::perms perms)
(some? profile-id)
(assoc ::profile-id profile-id)
(some? expires-at)
(assoc ::expires-at expires-at))))))
(handler request)))))
(def soft-auth
{:name ::soft-auth
:compile (fn [& _]
(when (contains? cf/flags :access-tokens)
wrap-soft-auth))})
(def authz
{:name ::authz

View File

@@ -30,7 +30,7 @@
(defn- get-file-media-object
[pool id]
(db/get pool :file-media-object {:id id} {::db/remove-deleted false}))
(db/get pool :file-media-object {:id id}))
(defn- serve-object-from-s3
[{:keys [::sto/storage] :as cfg} obj]

View File

@@ -9,7 +9,8 @@
(:require
[app.common.schema :as sm]
[integrant.core :as ig]
[java-http-clj.core :as http])
[java-http-clj.core :as http]
[promesa.core :as p])
(:import
java.net.http.HttpClient))
@@ -28,9 +29,14 @@
(defn send!
([client req] (send! client req {}))
([client req {:keys [response-type] :or {response-type :string}}]
([client req {:keys [response-type sync?] :or {response-type :string sync? false}}]
(assert (client? client) "expected valid http client")
(http/send req {:client client :as response-type})))
(if sync?
(http/send req {:client client :as response-type})
(try
(http/send-async req {:client client :as response-type})
(catch Throwable cause
(p/rejected cause))))))
(defn- resolve-client
[params]
@@ -50,8 +56,8 @@
([cfg-or-client request]
(let [client (resolve-client cfg-or-client)
request (update request :uri str)]
(send! client request {})))
(send! client request {:sync? true})))
([cfg-or-client request options]
(let [client (resolve-client cfg-or-client)
request (update request :uri str)]
(send! client request options))))
(send! client request (merge {:sync? true} options)))))

View File

@@ -13,7 +13,6 @@
[app.config :as cf]
[app.http :as-alias http]
[app.http.access-token :as-alias actoken]
[app.http.auth :as-alias auth]
[app.http.session :as-alias session]
[app.util.inet :as inet]
[clojure.spec.alpha :as s]
@@ -23,15 +22,16 @@
(defn request->context
"Extracts error report relevant context data from request."
[request]
(let [{:keys [claims] :as auth} (get request ::http/auth-data)]
(let [claims (-> {}
(into (::session/token-claims request))
(into (::actoken/token-claims request)))]
(-> (cf/logging-context)
(assoc :request/path (:path request))
(assoc :request/method (:method request))
(assoc :request/params (:params request))
(assoc :request/user-agent (yreq/get-header request "user-agent"))
(assoc :request/ip-addr (inet/parse-request request))
(assoc :request/profile-id (get claims :uid))
(assoc :request/auth-data auth)
(assoc :request/profile-id (:uid claims))
(assoc :version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")))))
(defmulti handle-error
@@ -60,6 +60,7 @@
::yres/body data}
(binding [l/*context* (request->context request)]
(l/wrn :hint "restriction error" :cause err)
{::yres/status 400
::yres/body data}))))

View File

@@ -13,7 +13,7 @@
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.http.middleware :as mw]
[app.http.access-token :refer [get-token]]
[app.main :as-alias main]
[app.rpc.commands.profile :as cmd.profile]
[app.setup :as-alias setup]
@@ -32,6 +32,20 @@
[_ params]
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
(def ^:private auth
{:name ::auth
:compile
(fn [_ _]
(fn [handler shared-key]
(if shared-key
(fn [request]
(let [token (get-token request)]
(if (= token shared-key)
(handler request)
{::yres/status 403})))
(fn [_ _]
{::yres/status 403}))))})
(def ^:private default-system
{:name ::default-system
:compile
@@ -50,27 +64,23 @@
(db/tx-run! cfg handler request)))))})
(defmethod ig/init-key ::routes
[_ {:keys [::setup/props] :as cfg}]
[_ cfg]
["" {:middleware [[auth (cf/get :management-api-shared-key)]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
(let [management-key (or (cf/get :management-api-key)
(get props :management-key))]
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["" {:middleware [[mw/shared-key-auth management-key]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]]))
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]])
;; ---- HELPERS

View File

@@ -12,11 +12,8 @@
[app.common.schema :as-alias sm]
[app.common.transit :as t]
[app.config :as cf]
[app.http :as-alias http]
[app.http.errors :as errors]
[app.tokens :as tokens]
[app.util.pointer-map :as pmap]
[buddy.core.codecs :as bc]
[cuerdas.core :as str]
[yetti.adapter :as yt]
[yetti.middleware :as ymw]
@@ -243,77 +240,3 @@
(if (contains? allowed method)
(handler request)
{::yres/status 405}))))))})
(defn- wrap-auth
[handler decoders]
(let [token-re
#"(?i)^(Token|Bearer)\s+(.*)"
get-token-from-authorization
(fn [request]
(when-let [[_ token-type token] (some->> (yreq/get-header request "authorization")
(re-matches token-re))]
(if (= "token" (str/lower token-type))
{:type :token
:token token}
{:type :bearer
:token token})))
get-token-from-cookie
(fn [request]
(let [cname (cf/get :auth-token-cookie-name)
token (some-> (yreq/get-cookie request cname) :value)]
(when-not (str/empty? token)
{:type :cookie
:token token})))
get-token
(some-fn get-token-from-cookie get-token-from-authorization)
process-request
(fn [request]
(if-let [{:keys [type token] :as auth} (get-token request)]
(let [decode-fn (get decoders type)]
(if (or (= type :cookie) (= type :bearer))
(let [metadata (tokens/decode-header token)]
;; NOTE: we only proceed to decode claims on new
;; cookie tokens. The old cookies dont need to be
;; decoded because they use the token string as ID
(if (and (= (:kid metadata) 1)
(= (:ver metadata) 1)
(some? decode-fn))
(assoc request ::http/auth-data (assoc auth
:claims (decode-fn token)
:metadata metadata))
(assoc request ::http/auth-data (assoc auth :metadata {:ver 0}))))
(if decode-fn
(assoc request ::http/auth-data (assoc auth :claims (decode-fn token)))
(assoc request ::http/auth-data auth))))
request))]
(fn [request]
(-> request process-request handler))))
(def auth
{:name ::auth
:compile (constantly wrap-auth)})
(defn- wrap-shared-key-auth
[handler shared-key]
(if shared-key
(let [shared-key (if (string? shared-key)
shared-key
(bc/bytes->b64-str shared-key true))]
(fn [request]
(let [key (yreq/get-header request "x-shared-key")]
(if (= key shared-key)
(handler (assoc request ::http/auth-with-shared-key true))
{::yres/status 403}))))
(fn [_ _]
{::yres/status 403})))
(def shared-key-auth
{:name ::shared-key-auth
:compile (constantly wrap-shared-key-auth)})

View File

@@ -11,24 +11,28 @@
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.http :as-alias http]
[app.http.auth :as-alias http.auth]
[app.http.session.tasks :as-alias tasks]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[cuerdas.core :as str]
[integrant.core :as ig]
[yetti.request :as yreq]
[yetti.response :as yres]))
[yetti.request :as yreq]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; A default cookie name for storing the session.
(def default-auth-token-cookie-name "auth-token")
;; A cookie that we can use to check from other sites of the same
;; domain if a user is authenticated.
(def default-auth-data-cookie-name "auth-data")
;; Default value for cookie max-age
(def default-cookie-max-age (ct/duration {:days 7}))
@@ -40,10 +44,10 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defprotocol ISessionManager
(read-session [_ id])
(create-session [_ params])
(update-session [_ session])
(delete-session [_ id]))
(read [_ key])
(write! [_ key data])
(update! [_ data])
(delete! [_ key]))
(defn manager?
[o]
@@ -58,82 +62,71 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:params
[:map {:title "SessionParams" :closed true}
[:map {:title "session-params"}
[:user-agent ::sm/text]
[:profile-id ::sm/uuid]
[:user-agent {:optional true} ::sm/text]
[:sso-provider-id {:optional true} ::sm/uuid]
[:sso-session-id {:optional true} :string]])
[:created-at ::ct/inst]])
(def ^:private valid-params?
(sm/validator schema:params))
(defn- prepare-session-params
[params key]
(assert (string? key) "expected key to be a string")
(assert (not (str/blank? key)) "expected key to be not empty")
(assert (valid-params? params) "expected valid params")
{:user-agent (:user-agent params)
:profile-id (:profile-id params)
:created-at (:created-at params)
:updated-at (:created-at params)
:id key})
(defn- database-manager
[pool]
(reify ISessionManager
(read-session [_ id]
(if (string? id)
;; Backward compatibility
(let [session (db/exec-one! pool (sql/select :http-session {:id id}))]
(-> session
(assoc :modified-at (:updated-at session))
(dissoc :updated-at)))
(db/exec-one! pool (sql/select :http-session-v2 {:id id}))))
(read [_ token]
(db/exec-one! pool (sql/select :http-session {:id token})))
(create-session [_ params]
(assert (valid-params? params) "expect valid session params")
(write! [_ key params]
(let [params (-> params
(assoc :created-at (ct/now))
(prepare-session-params key))]
(db/insert! pool :http-session params)
params))
(let [now (ct/now)
params (-> params
(assoc :id (uuid/next))
(assoc :created-at now)
(assoc :modified-at now))]
(db/insert! pool :http-session-v2 params
{::db/return-keys true})))
(update! [_ params]
(let [updated-at (ct/now)]
(db/update! pool :http-session
{:updated-at updated-at}
{:id (:id params)})
(assoc params :updated-at updated-at)))
(update-session [_ session]
(let [modified-at (ct/now)]
(if (string? (:id session))
(db/insert! pool :http-session-v2
(-> session
(assoc :id (uuid/next))
(assoc :created-at modified-at)
(assoc :modified-at modified-at)))
(db/update! pool :http-session-v2
{:modified-at modified-at}
{:id (:id session)}
{::db/return-keys true}))))
(delete-session [_ id]
(if (string? id)
(db/delete! pool :http-session {:id id} {::db/return-keys false})
(db/delete! pool :http-session-v2 {:id id} {::db/return-keys false}))
(delete! [_ token]
(db/delete! pool :http-session {:id token})
nil)))
(defn inmemory-manager
[]
(let [cache (atom {})]
(reify ISessionManager
(read-session [_ id]
(get @cache id))
(read [_ token]
(get @cache token))
(create-session [_ params]
(assert (valid-params? params) "expect valid session params")
(write! [_ key params]
(let [params (-> params
(assoc :created-at (ct/now))
(prepare-session-params key))]
(swap! cache assoc key params)
params))
(let [now (ct/now)
session (-> params
(assoc :id (uuid/next))
(assoc :created-at now)
(assoc :modified-at now))]
(swap! cache assoc (:id session) session)
session))
(update! [_ params]
(let [updated-at (ct/now)]
(swap! cache update (:id params) assoc :updated-at updated-at)
(assoc params :updated-at updated-at)))
(update-session [_ session]
(let [modified-at (ct/now)]
(swap! cache update (:id session) assoc :modified-at modified-at)
(assoc session :modified-at modified-at)))
(delete-session [_ id]
(swap! cache dissoc id)
(delete! [_ token]
(swap! cache dissoc token)
nil))))
(defmethod ig/assert-key ::manager
@@ -153,116 +146,103 @@
;; MANAGER IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare ^:private assign-session-cookie)
(declare ^:private clear-session-cookie)
(defn- assign-token
[cfg session]
(let [claims {:iss "authentication"
:aud "penpot"
:sid (:id session)
:iat (:modified-at session)
:uid (:profile-id session)
:sso-provider-id (:sso-provider-id session)
:sso-session-id (:sso-session-id session)}
header {:kid 1 :ver 1}
token (tokens/generate cfg claims header)]
(assoc session :token token)))
(declare ^:private assign-auth-token-cookie)
(declare ^:private clear-auth-token-cookie)
(declare ^:private gen-token)
(defn create-fn
[{:keys [::manager] :as cfg} {profile-id :id :as profile}
& {:keys [sso-provider-id sso-session-id]}]
[{:keys [::manager] :as cfg} profile-id]
(assert (manager? manager) "expected valid session manager")
(assert (uuid? profile-id) "expected valid uuid for profile-id")
(fn [request response]
(let [uagent (yreq/get-header request "user-agent")
session (->> {:user-agent uagent
:profile-id profile-id
:sso-provider-id sso-provider-id
:sso-session-id sso-session-id}
(d/without-nils)
(create-session manager)
(assign-token cfg))]
(l/trc :hint "create" :id (str (:id session)) :profile-id (str profile-id))
(assign-session-cookie response session))))
params {:profile-id profile-id
:user-agent uagent}
token (gen-token cfg params)
session (write! manager token params)]
(l/trc :hint "create" :profile-id (str profile-id))
(-> response
(assign-auth-token-cookie session)))))
(defn delete-fn
[{:keys [::manager]}]
(assert (manager? manager) "expected valid session manager")
(fn [request response]
(some->> (get request ::id) (delete-session manager))
(clear-session-cookie response)))
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (yreq/get-cookie request cname)]
(l/trc :hint "delete" :profile-id (:profile-id request))
(some->> (:value cookie) (delete! manager))
(-> response
(assoc :status 204)
(assoc :body nil)
(clear-auth-token-cookie)))))
(defn decode-token
(defn- gen-token
[cfg {:keys [profile-id created-at]}]
(tokens/generate cfg {:iss "authentication"
:iat created-at
:uid profile-id}))
(defn- decode-token
[cfg token]
(try
(tokens/verify cfg {:token token :iss "authentication"})
(catch Throwable cause
(l/trc :hint "exception on decoding token"
:token token
:cause cause))))
(when token
(tokens/verify cfg {:token token :iss "authentication"})))
(defn get-session
(defn- get-token
[request]
(get request ::session))
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (some-> (yreq/get-cookie request cname) :value)]
(when-not (str/empty? cookie)
cookie)))
(defn invalidate-others
[cfg session]
(let [sql "delete from http_session_v2 where profile_id = ? and id != ?"]
(-> (db/exec-one! cfg [sql (:profile-id session) (:id session)])
(db/get-update-count))))
(defn- get-session
[manager token]
(some->> token (read manager)))
(defn- renew-session?
[{:keys [id modified-at] :as session}]
(or (string? id)
(and (ct/inst? modified-at)
(let [elapsed (ct/diff modified-at (ct/now))]
(neg? (compare default-renewal-max-age elapsed))))))
[{:keys [updated-at] :as session}]
(and (ct/inst? updated-at)
(let [elapsed (ct/diff updated-at (ct/now))]
(neg? (compare default-renewal-max-age elapsed)))))
(defn- wrap-authz
(defn- wrap-soft-auth
[handler {:keys [::manager] :as cfg}]
(assert (manager? manager) "expected valid session manager")
(letfn [(handle-request [request]
(try
(let [token (get-token request)
claims (decode-token cfg token)]
(cond-> request
(map? claims)
(-> (assoc ::token-claims claims)
(assoc ::token token))))
(catch Throwable cause
(l/trc :hint "exception on decoding malformed token" :cause cause)
request)))]
(fn [request]
(handler (handle-request request)))))
(defn- wrap-authz
[handler {:keys [::manager]}]
(assert (manager? manager) "expected valid session manager")
(fn [request]
(let [{:keys [type token claims metadata]} (get request ::http/auth-data)]
(cond
(= type :cookie)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
(let [session (get-session manager (::token request))
request (cond-> request
(some? session)
(assoc ::profile-id (:profile-id session)
::id (:id session)))
response (handler request)]
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))
(if (renew-session? session)
(let [session (update! manager session)]
(-> response
(assign-auth-token-cookie session)))
response))))
response (handler request)]
(if (and session (renew-session? session))
(let [session (->> session
(update-session manager)
(assign-token cfg))]
(assign-session-cookie response session))
response))
(= type :bearer)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))]
(handler request))
:else
(handler request)))))
(def soft-auth
{:name ::soft-auth
:compile (constantly wrap-soft-auth)})
(def authz
{:name ::authz
@@ -270,16 +250,16 @@
;; --- IMPL
(defn- assign-session-cookie
[response {token :token modified-at :modified-at}]
(defn- assign-auth-token-cookie
[response {token :id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
created-at modified-at
created-at updated-at
renewal (ct/plus created-at default-renewal-max-age)
expires (ct/plus created-at max-age)
secure? (contains? cf/flags :secure-session-cookies)
strict? (contains? cf/flags :strict-session-cookies)
cors? (contains? cf/flags :cors)
name (cf/get :auth-token-cookie-name)
name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
comment (str "Renewal at: " (ct/format-inst renewal :rfc1123))
cookie {:path "/"
:http-only true
@@ -288,12 +268,12 @@
:comment comment
:same-site (if cors? :none (if strict? :strict :lax))
:secure secure?}]
(update response ::yres/cookies assoc name cookie)))
(update response :cookies assoc name cookie)))
(defn- clear-session-cookie
(defn- clear-auth-token-cookie
[response]
(let [cname (cf/get :auth-token-cookie-name)]
(update response ::yres/cookies assoc cname {:path "/" :value "" :max-age 0})))
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
(update response :cookies assoc cname {:path "/" :value "" :max-age 0})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TASK: SESSION GC

View File

@@ -25,8 +25,7 @@
[app.util.inet :as inet]
[app.util.services :as-alias sv]
[app.worker :as wrk]
[cuerdas.core :as str]
[yetti.request :as yreq]))
[cuerdas.core :as str]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HELPERS
@@ -79,32 +78,17 @@
(remove #(contains? reserved-props (key %))))
props))
(defn get-external-session-id
[request]
(when-let [session-id (yreq/get-header request "x-external-session-id")]
(when-not (or (> (count session-id) 256)
(= session-id "null")
(str/blank? session-id))
session-id)))
(defn- get-client-event-origin
[request]
(when-let [origin (yreq/get-header request "x-event-origin")]
(when-not (or (= origin "null")
(str/blank? origin))
(str/prune origin 200))))
(defn get-client-user-agent
[request]
(when-let [user-agent (yreq/get-header request "user-agent")]
(str/prune user-agent 500)))
(defn- get-client-version
[request]
(when-let [origin (yreq/get-header request "x-frontend-version")]
(when-not (or (= origin "null")
(str/blank? origin))
(str/prune origin 100))))
(defn event-from-rpc-params
"Create a base event skeleton with pre-filled some important
data that can be extracted from RPC params object"
[params]
(let [context {:external-session-id (::rpc/external-session-id params)
:external-event-origin (::rpc/external-event-origin params)
:triggered-by (::rpc/handler-name params)}]
{::type "action"
::profile-id (::rpc/profile-id params)
::ip-addr (::rpc/ip-addr params)
::context (d/without-nils context)}))
;; --- SPECS
@@ -133,33 +117,6 @@
(def ^:private check-event
(sm/check-fn schema:event))
(defn- prepare-context-from-request
[request]
(let [client-event-origin (get-client-event-origin request)
client-version (get-client-version request)
client-user-agent (get-client-user-agent request)
session-id (get-external-session-id request)
token-id (::actoken/id request)]
(d/without-nils
{:external-session-id session-id
:access-token-id (some-> token-id str)
:client-event-origin client-event-origin
:client-user-agent client-user-agent
:client-version client-version
:version (:full cf/version)})))
(defn event-from-rpc-params
"Create a base event skeleton with pre-filled some important
data that can be extracted from RPC params object"
[params]
(let [context (some-> params meta ::http/request prepare-context-from-request)
event {::type "action"
::profile-id (or (::rpc/profile-id params) uuid/zero)
::ip-addr (::rpc/ip-addr params)}]
(cond-> event
(some? context)
(assoc ::context context))))
(defn prepare-event
[cfg mdata params result]
(let [resultm (meta result)
@@ -169,15 +126,23 @@
(::rpc/profile-id params)
uuid/zero)
session-id (get params ::rpc/external-session-id)
event-origin (get params ::rpc/external-event-origin)
props (-> (or (::replace-props resultm)
(-> params
(merge (::props resultm))
(dissoc :profile-id)
(dissoc :type)))
(clean-props))
context (merge (::context resultm)
(prepare-context-from-request request))
token-id (::actoken/id request)
context (-> (::context resultm)
(assoc :external-session-id session-id)
(assoc :external-event-origin event-origin)
(assoc :access-token-id (some-> token-id str))
(d/without-nils))
ip-addr (inet/parse-request request)]
{::type (or (::type resultm)

View File

@@ -57,7 +57,7 @@
:uid uuid/zero})
body (t/encode {:events events})
headers {"content-type" "application/transit+json"
"origin" (str (cf/get :public-uri))
"origin" (cf/get :public-uri)
"cookie" (u/map->query-string {:auth-token token})}
params {:uri uri
:timeout 12000

View File

@@ -49,7 +49,7 @@
ctx (-> context
(assoc :tenant (cf/get :tenant))
(assoc :host (cf/get :host))
(assoc :public-uri (str (cf/get :public-uri)))
(assoc :public-uri (cf/get :public-uri))
(assoc :logger/name logger)
(assoc :logger/level level)
(dissoc :request/params :value :params :data))]

View File

@@ -21,7 +21,7 @@
[app.http.client :as-alias http.client]
[app.http.debug :as-alias http.debug]
[app.http.management :as mgmt]
[app.http.session :as session]
[app.http.session :as-alias session]
[app.http.session.tasks :as-alias session.tasks]
[app.http.websocket :as http.ws]
[app.loggers.webhooks :as-alias webhooks]
@@ -31,6 +31,7 @@
[app.redis :as-alias rds]
[app.rpc :as-alias rpc]
[app.rpc.climit :as-alias climit]
[app.rpc.doc :as-alias rpc.doc]
[app.setup :as-alias setup]
[app.srepl :as-alias srepl]
[app.storage :as-alias sto]
@@ -259,17 +260,14 @@
::oidc.providers/generic
{::http.client/client (ig/ref ::http.client/client)}
::oidc/providers
[(ig/ref ::oidc.providers/google)
(ig/ref ::oidc.providers/github)
(ig/ref ::oidc.providers/gitlab)
(ig/ref ::oidc.providers/generic)]
::oidc/routes
{::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)
::setup/props (ig/ref ::setup/props)
::oidc/providers (ig/ref ::oidc/providers)
::oidc/providers {:google (ig/ref ::oidc.providers/google)
:github (ig/ref ::oidc.providers/github)
:gitlab (ig/ref ::oidc.providers/gitlab)
:oidc (ig/ref ::oidc.providers/generic)}
::session/manager (ig/ref ::session/manager)
::email/blacklist (ig/ref ::email/blacklist)
::email/whitelist (ig/ref ::email/whitelist)}
@@ -282,6 +280,7 @@
{::session/manager (ig/ref ::session/manager)
::db/pool (ig/ref ::db/pool)
::rpc/routes (ig/ref ::rpc/routes)
::rpc.doc/routes (ig/ref ::rpc.doc/routes)
::setup/props (ig/ref ::setup/props)
::mtx/routes (ig/ref ::mtx/routes)
::oidc/routes (ig/ref ::oidc/routes)
@@ -301,7 +300,6 @@
{::db/pool (ig/ref ::db/pool)
::mtx/metrics (ig/ref ::mtx/metrics)
::mbus/msgbus (ig/ref ::mbus/msgbus)
::setup/props (ig/ref ::setup/props)
::session/manager (ig/ref ::session/manager)}
:app.http.assets/routes
@@ -339,26 +337,14 @@
::email/blacklist (ig/ref ::email/blacklist)
::email/whitelist (ig/ref ::email/whitelist)}
:app.rpc/management-methods
{::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)
::rds/pool (ig/ref ::rds/pool)
::wrk/executor (ig/ref ::wrk/netty-executor)
::session/manager (ig/ref ::session/manager)
::sto/storage (ig/ref ::sto/storage)
::mtx/metrics (ig/ref ::mtx/metrics)
::mbus/msgbus (ig/ref ::mbus/msgbus)
::rds/client (ig/ref ::rds/client)
::setup/props (ig/ref ::setup/props)}
:app.rpc.doc/routes
{:app.rpc/methods (ig/ref :app.rpc/methods)}
::rpc/routes
{::rpc/methods (ig/ref :app.rpc/methods)
::rpc/management-methods (ig/ref :app.rpc/management-methods)
;; FIXME: revisit if db/pool is necessary here
::db/pool (ig/ref ::db/pool)
::session/manager (ig/ref ::session/manager)
::setup/props (ig/ref ::setup/props)}
{::rpc/methods (ig/ref :app.rpc/methods)
::db/pool (ig/ref ::db/pool)
::session/manager (ig/ref ::session/manager)
::setup/props (ig/ref ::setup/props)}
::wrk/registry
{::mtx/metrics (ig/ref ::mtx/metrics)

View File

@@ -17,7 +17,6 @@
[app.common.time :as ct]
[app.config :as cf]
[app.db :as-alias db]
[app.http.client :as http]
[app.storage :as-alias sto]
[app.storage.tmp :as tmp]
[buddy.core.bytes :as bb]
@@ -38,9 +37,6 @@
org.im4java.core.IMOperation
org.im4java.core.Info))
(def default-max-file-size
(* 1024 1024 10)) ; 10 MiB
(def schema:upload
[:map {:title "Upload"}
[:filename :string]
@@ -245,7 +241,7 @@
(ex/raise :type :validation
:code :invalid-svg-file
:hint "uploaded svg does not provides dimensions"))
(merge input info {:ts (ct/now) :size (fs/size path)}))
(merge input info {:ts (ct/now)}))
(let [instance (Info. (str path))
mtype' (.getProperty instance "Mime type")]
@@ -265,7 +261,6 @@
(assoc input
:width width
:height height
:size (fs/size path)
:ts (ct/now)))))))
(defmethod process-error org.im4java.core.InfoException
@@ -275,54 +270,6 @@
:hint "invalid image"
:cause error))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; IMAGE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn download-image
"Download an image from the provided URI and return the media input object"
[{:keys [::http/client]} uri]
(letfn [(parse-and-validate [{:keys [headers] :as response}]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)
max-size (cf/get :media-max-file-size default-max-file-size)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "seems like the url points to resource with unknown size"))
(when (> size max-size)
(ex/raise :type :validation
:code :file-too-large
:hint (str/ffmt "the file size % is greater than the maximum %"
size
default-max-file-size)))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "seems like the url points to an invalid media object"))
{:size size :mtype mtype :format format}))]
(let [{:keys [body] :as response} (http/req! client
{:method :get :uri uri}
{:response-type :input-stream})
{:keys [size mtype]} (parse-and-validate response)
path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write* path body :size size)]
(when (not= written size)
(ex/raise :type :internal
:code :mismatch-write-size
:hint "unexpected state: unable to write to file"))
{;; :size size
:path path
:mtype mtype})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FONTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -450,13 +450,7 @@
:fn (mg/resource "app/migrations/sql/0141-add-idx-to-file-library-rel.sql")}
{:name "0141-add-file-data-table.sql"
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}
{:name "0142-add-sso-provider-table"
:fn (mg/resource "app/migrations/sql/0142-add-sso-provider-table.sql")}
{:name "0143-http-session-v2-table"
:fn (mg/resource "app/migrations/sql/0143-add-http-session-v2-table.sql")}])
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}])
(defn apply-migrations!
[pool name migrations]

View File

@@ -1,33 +0,0 @@
CREATE TABLE sso_provider (
id uuid PRIMARY KEY,
created_at timestamptz NOT NULL DEFAULT now(),
modified_at timestamptz NOT NULL DEFAULT now(),
is_enabled boolean NOT NULL DEFAULT true,
type text NOT NULL CHECK (type IN ('oidc')),
domain text NOT NULL,
client_id text NOT NULL,
client_secret text NOT NULL,
base_uri text NOT NULL,
token_uri text NULL,
auth_uri text NULL,
user_uri text NULL,
jwks_uri text NULL,
logout_uri text NULL,
roles_attr text NULL,
email_attr text NULL,
name_attr text NULL,
user_info_source text NOT NULL DEFAULT 'token'
CHECK (user_info_source IN ('token', 'userinfo', 'auto')),
scopes text[] NULL,
roles text[] NULL
);
CREATE UNIQUE INDEX sso_provider__domain__idx
ON sso_provider(domain);

View File

@@ -1,23 +0,0 @@
CREATE TABLE http_session_v2 (
id uuid PRIMARY KEY,
created_at timestamptz NOT NULL DEFAULT now(),
modified_at timestamptz NOT NULL DEFAULT now(),
profile_id uuid REFERENCES profile(id) ON DELETE CASCADE,
user_agent text NULL,
sso_provider_id uuid NULL REFERENCES sso_provider(id) ON DELETE CASCADE,
sso_session_id text NULL
);
CREATE INDEX http_session_v2__profile_id__idx
ON http_session_v2(profile_id);
CREATE INDEX http_session_v2__sso_provider_id__idx
ON http_session_v2(sso_provider_id)
WHERE sso_provider_id IS NOT NULL;
CREATE INDEX http_session_v2__sso_session_id__idx
ON http_session_v2(sso_session_id)
WHERE sso_session_id IS NOT NULL;

View File

@@ -13,15 +13,11 @@
[app.common.schema :as sm]
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
[app.http.access-token :as actoken]
[app.http.client :as-alias http.client]
[app.http.middleware :as mw]
[app.http.security :as sec]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.main :as-alias main]
@@ -30,7 +26,6 @@
[app.redis :as rds]
[app.rpc.climit :as climit]
[app.rpc.cond :as cond]
[app.rpc.doc :as doc]
[app.rpc.helpers :as rph]
[app.rpc.retry :as retry]
[app.rpc.rlimit :as rlimit]
@@ -41,6 +36,7 @@
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
[promesa.core :as p]
[yetti.request :as yreq]
[yetti.response :as yres]))
@@ -48,7 +44,7 @@
(defn- default-handler
[_]
(ex/raise :type :not-found))
(p/rejected (ex/error :type :not-found)))
(defn- handle-response-transformation
[response request mdata]
@@ -69,61 +65,70 @@
response (if (fn? result)
(result request)
(let [result (rph/unwrap result)
status (or (::http/status mdata)
(if (nil? result)
204
200))
status (::http/status mdata 200)
headers (cond-> (::http/headers mdata {})
(yres/stream-body? result)
(assoc "content-type" "application/octet-stream"))]
{::yres/status status
::yres/headers headers
::yres/body result}))]
(-> response
(handle-response-transformation request mdata)
(handle-before-comple-hook mdata))))
(defn- make-rpc-handler
(defn get-external-session-id
[request]
(when-let [session-id (yreq/get-header request "x-external-session-id")]
(when-not (or (> (count session-id) 256)
(= session-id "null")
(str/blank? session-id))
session-id)))
(defn- get-external-event-origin
[request]
(when-let [origin (yreq/get-header request "x-event-origin")]
(when-not (or (> (count origin) 256)
(= origin "null")
(str/blank? origin))
origin)))
(defn- rpc-handler
"Ring handler that dispatches cmd requests and convert between
internal async flow into ring async flow."
[methods]
(let [methods (update-vals methods peek)]
(fn [{:keys [params path-params method] :as request}]
(let [handler-name (:type path-params)
etag (yreq/get-header request "if-none-match")
profile-id (or (::session/profile-id request)
(::actoken/profile-id request)
(if (::http/auth-with-shared-key request)
uuid/zero
nil))
[methods {:keys [params path-params method] :as request}]
(let [handler-name (:type path-params)
etag (yreq/get-header request "if-none-match")
profile-id (or (::session/profile-id request)
(::actoken/profile-id request))
ip-addr (inet/parse-request request)
ip-addr (inet/parse-request request)
session-id (get-external-session-id request)
event-origin (get-external-event-origin request)
data (-> params
(assoc ::handler-name handler-name)
(assoc ::ip-addr ip-addr)
(assoc ::request-at (ct/now))
(assoc ::cond/key etag)
(cond-> (uuid? profile-id)
(assoc ::profile-id profile-id)))
data (-> params
(assoc ::handler-name handler-name)
(assoc ::ip-addr ip-addr)
(assoc ::request-at (ct/now))
(assoc ::external-session-id session-id)
(assoc ::external-event-origin event-origin)
(assoc ::session/id (::session/id request))
(assoc ::cond/key etag)
(cond-> (uuid? profile-id)
(assoc ::profile-id profile-id)))
data (with-meta data
{::http/request request})
data (vary-meta data assoc ::http/request request)
handler-fn (get methods (keyword handler-name) default-handler)]
handler-fn (get methods (keyword handler-name) default-handler)]
(when (and (or (= method :get)
(= method :head))
(not (str/starts-with? handler-name "get-")))
(ex/raise :type :restriction
:code :method-not-allowed
:hint "method not allowed for this request"))
(when (and (or (= method :get)
(= method :head))
(not (str/starts-with? handler-name "get-")))
(ex/raise :type :restriction
:code :method-not-allowed
:hint "method not allowed for this request"))
;; FIXME: why we have this cond enabled here, we need to move it outside this handler
(binding [cond/*enabled* true]
(let [response (handler-fn data)]
(handle-response request response)))))))
(binding [cond/*enabled* true]
(let [response (handler-fn data)]
(handle-response request response)))))
(defn- wrap-metrics
"Wrap service method with metrics measurement."
@@ -200,7 +205,7 @@
::sm/explain (explain params)))))))
f))
(defn- wrap
(defn- wrap-all
[cfg f mdata]
(as-> f $
(wrap-db-transaction cfg $ mdata)
@@ -214,30 +219,17 @@
(wrap-params-validation cfg $ mdata)
(wrap-authentication cfg $ mdata)))
(defn- wrap-management
(defn- wrap
[cfg f mdata]
(as-> f $
(wrap-db-transaction cfg $ mdata)
(retry/wrap-retry cfg $ mdata)
(climit/wrap cfg $ mdata)
(wrap-metrics cfg $ mdata)
(wrap-audit cfg $ mdata)
(wrap-spec-conform cfg $ mdata)
(wrap-params-validation cfg $ mdata)
(wrap-authentication cfg $ mdata)))
(l/trc :hint "register method" :name (::sv/name mdata))
(let [f (wrap-all cfg f mdata)]
(partial f cfg)))
(defn- process-method
[cfg module wrap-fn [f mdata]]
(l/trc :hint "add method" :module module :name (::sv/name mdata))
(let [f (wrap-fn cfg f mdata)
k (keyword (::sv/name mdata))]
[k [mdata (partial f cfg)]]))
[cfg [vfn mdata]]
[(keyword (::sv/name mdata)) [mdata (wrap cfg vfn mdata)]])
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; API METHODS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- resolve-methods
(defn- resolve-command-methods
[cfg]
(let [cfg (assoc cfg ::type "command" ::metrics-id :rpc-command-timing)]
(->> (sv/scan-ns
@@ -266,7 +258,7 @@
'app.rpc.commands.verify-token
'app.rpc.commands.viewer
'app.rpc.commands.webhooks)
(map (partial process-method cfg "rpc" wrap))
(map (partial process-method cfg))
(into {}))))
(def ^:private schema:methods-params
@@ -290,50 +282,7 @@
(defmethod ig/init-key ::methods
[_ cfg]
(let [cfg (d/without-nils cfg)]
(resolve-methods cfg)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MANAGEMENT METHODS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- resolve-management-methods
[cfg]
(let [cfg (assoc cfg ::type "management" ::metrics-id :rpc-management-timing)]
(->> (sv/scan-ns
'app.rpc.management.subscription
'app.rpc.management.exporter)
(map (partial process-method cfg "management" wrap-management))
(into {}))))
(def ^:private schema:management-methods-params
[:map {:title "management-methods-params"}
::session/manager
::http.client/client
::db/pool
::rds/pool
::mbus/msgbus
::sto/storage
::mtx/metrics
::setup/props])
(defmethod ig/assert-key ::management-methods
[_ params]
(assert (sm/check schema:management-methods-params params)))
(defmethod ig/init-key ::management-methods
[_ cfg]
(let [cfg (d/without-nils cfg)]
(resolve-management-methods cfg)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ROUTES
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- redirect
[href]
(fn [_]
{::yres/status 308
::yres/headers {"location" (str href)}}))
(resolve-command-methods cfg)))
(def ^:private schema:methods
[:map-of :keyword [:tuple :map ::sm/fn]])
@@ -348,50 +297,11 @@
(assert (db/pool? (::db/pool params)) "expect valid database pool")
(assert (some? (::setup/props params)))
(assert (session/manager? (::session/manager params)) "expect valid session manager")
(assert (valid-methods? (::methods params)) "expect valid methods map")
(assert (valid-methods? (::management-methods params)) "expect valid methods map"))
(assert (valid-methods? (::methods params)) "expect valid methods map"))
(defmethod ig/init-key ::routes
[_ {:keys [::methods ::management-methods ::setup/props] :as cfg}]
(let [public-uri (cf/get :public-uri)
management-key (or (cf/get :management-api-key)
(get props :management-key))]
["/api"
["/management"
["/methods/:type"
{:middleware [[mw/shared-key-auth management-key]
[session/authz cfg]]
:handler (make-rpc-handler management-methods)}]
(doc/routes :methods management-methods
:label "management"
:base-uri (u/join public-uri "/api/management")
:description "MANAGEMENT API")]
["/main"
["/methods/:type"
{:middleware [[mw/cors]
[sec/client-header-check]
[session/authz cfg]
[actoken/authz cfg]]
:handler (make-rpc-handler methods)}]
(doc/routes :methods methods
:label "main"
:base-uri (u/join public-uri "/api/main")
:description "MAIN API")]
;; BACKWARD COMPATIBILITY
["/_doc" {:handler (redirect (u/join public-uri "/api/main/doc"))}]
["/doc" {:handler (redirect (u/join public-uri "/api/main/doc"))}]
["/openapi" {:handler (redirect (u/join public-uri "/api/main/doc/openapi"))}]
["/openapi.join" {:handler (redirect (u/join public-uri "/api/main/doc/openapi.json"))}]
["/rpc/command/:type"
{:middleware [[mw/cors]
[sec/client-header-check]
[session/authz cfg]
[actoken/authz cfg]]
:handler (make-rpc-handler methods)}]]))
[_ {:keys [::methods] :as cfg}]
(let [methods (update-vals methods peek)]
[["/rpc" {:middleware [[session/authz cfg]
[actoken/authz cfg]]}
["/command/:type" {:handler (partial rpc-handler methods)}]]]))

View File

@@ -28,7 +28,6 @@
expires-at (some-> expiration (ct/in-future))
created-at (ct/now)
token (tokens/generate cfg {:iss "access-token"
:uid profile-id
:iat created-at
:tid token-id})

View File

@@ -7,24 +7,21 @@
(ns app.rpc.commands.auth
(:require
[app.auth :as auth]
[app.auth.oidc :as oidc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.email :as eml]
[app.email.blacklist :as email.blacklist]
[app.email.whitelist :as email.whitelist]
[app.http :as-alias http]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.climit :as-alias climit]
[app.rpc.commands.profile :as profile]
@@ -33,7 +30,6 @@
[app.rpc.helpers :as rph]
[app.setup :as-alias setup]
[app.setup.welcome-file :refer [create-welcome-file]]
[app.storage :as sto]
[app.tokens :as tokens]
[app.util.services :as sv]
[app.worker :as wrk]
@@ -113,7 +109,7 @@
(assoc profile :is-admin (let [admins (cf/get :admins)]
(contains? admins (:email profile)))))]
(-> response
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)}))))]
@@ -149,24 +145,7 @@
[cfg params]
(if (= (:profile-id params)
(::rpc/profile-id params))
(let [{:keys [claims]}
(rph/get-auth-data params)
provider
(some->> (get claims :sso-provider-id)
(oidc/get-provider cfg))
response
(if (and provider (:logout-uri provider))
(let [params {"logout_hint" (get claims :sso-session-id)
"client_id" (get provider :client-id)
"post_logout_redirect_uri" (str (cf/get :public-uri))}
uri (-> (u/uri (:logout-uri provider))
(assoc :query (u/map->query-string params)))]
{:redirect-uri uri})
{})]
(rph/with-transform response (session/delete-fn cfg)))
(rph/with-transform {} (session/delete-fn cfg))
{}))
;; ---- COMMAND: Recover Profile
@@ -292,30 +271,11 @@
;; ---- COMMAND: Register Profile
(defn import-profile-picture
[cfg uri]
(try
(let [storage (sto/resolve cfg)
input (media/download-image cfg uri)
input (media/run {:cmd :info :input input})
hash (sto/calculate-hash (:path input))
content (-> (sto/content (:path input) (:size input))
(sto/wrap-with-hash hash))
sobject (sto/put-object! storage {::sto/content content
::sto/deduplicate? true
:bucket "profile"
:content-type (:mtype input)})]
(:id sobject))
(catch Throwable cause
(l/wrn :hint "unable to import profile picture"
:uri uri
:cause cause)
nil)))
(defn create-profile
(defn create-profile!
"Create the profile entry on the database with limited set of input
attrs (all the other attrs are filled with default values)."
[{:keys [::db/conn] :as cfg} {:keys [email] :as params}]
[conn {:keys [email] :as params}]
(dm/assert! ::sm/email email)
(let [id (or (:id params) (uuid/next))
props (-> (audit/extract-utm-params params)
(merge (:props params))
@@ -323,7 +283,8 @@
:viewed-walkthrough? false
:nudge {:big 10 :small 1}
:v2-info-shown true
:release-notes-viewed (:main cf/version)}))
:release-notes-viewed (:main cf/version)})
(db/tjson))
password (or (:password params) "!")
@@ -338,12 +299,6 @@
theme (:theme params nil)
email (str/lower email)
photo-id (some->> (or (:oidc/picture props)
(:google/picture props)
(:github/picture props)
(:gitlab/picture props))
(import-profile-picture cfg))
params {:id id
:fullname (:fullname params)
:email email
@@ -351,13 +306,11 @@
:lang locale
:password password
:deleted-at (:deleted-at params)
:props (db/tjson props)
:props props
:theme theme
:photo-id photo-id
:is-active is-active
:is-muted is-muted
:is-demo is-demo}]
(try
(-> (db/insert! conn :profile params)
(profile/decode-row))
@@ -370,7 +323,7 @@
(throw cause))))))
(defn create-profile-rels
(defn create-profile-rels!
[conn {:keys [id] :as profile}]
(let [features (cfeat/get-enabled-features cf/flags)
team (teams/create-team conn
@@ -420,13 +373,12 @@
;; to detect if the profile is already registered
(or (profile/get-profile-by-email conn (:email claims))
(let [is-active (or (boolean (:is-active claims))
(boolean (:email-verified claims))
(not (contains? cf/flags :email-verification)))
params (-> params
(assoc :is-active is-active)
(update :password auth/derive-password))
profile (->> (create-profile cfg params)
(create-profile-rels conn))]
profile (->> (create-profile! conn params)
(create-profile-rels! conn))]
(vary-meta profile assoc :created true))))
created? (-> profile meta :created true?)
@@ -464,10 +416,10 @@
(and (some? invitation)
(= (:email profile)
(:member-email invitation)))
(let [invitation (assoc invitation :member-id (:id profile))
token (tokens/generate cfg invitation)]
(let [claims (assoc invitation :member-id (:id profile))
token (tokens/generate cfg claims)]
(-> {:invitation-token token}
(rph/with-transform (session/create-fn cfg profile claims))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/replace-props props
::audit/context {:action "accept-invitation"}
::audit/profile-id (:id profile)})))
@@ -478,7 +430,7 @@
created?
(if (:is-active profile)
(-> (profile/strip-private-attrs profile)
(rph/with-transform (session/create-fn cfg profile claims))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-defer create-welcome-file-when-needed)
(rph/with-meta
{::audit/replace-props props
@@ -607,32 +559,4 @@
[cfg params]
(db/tx-run! cfg request-profile-recovery params))
;; --- COMMAND: get-sso-config
(defn- extract-domain
"Extract the domain part from email"
[email]
(let [at (str/last-index-of email "@")]
(when (and (>= at 0)
(< at (dec (count email))))
(-> (subs email (inc at))
(str/trim)
(str/lower)))))
(def ^:private schema:get-sso-provider
[:map {:title "get-sso-config"}
[:email ::sm/email]])
(def ^:private schema:get-sso-provider-result
[:map {:title "SSOProvider"}
[:id ::sm/uuid]])
(sv/defmethod ::get-sso-provider
{::rpc/auth false
::doc/added "2.12"
::sm/params schema:get-sso-provider
::sm/result schema:get-sso-provider-result}
[cfg {:keys [email]}]
(when-let [domain (extract-domain email)]
(when-let [config (db/get* cfg :sso-provider {:domain domain})]
(select-keys config [:id]))))

View File

@@ -11,9 +11,9 @@
[app.binfile.v1 :as bf.v1]
[app.binfile.v3 :as bf.v3]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
[app.http.sse :as sse]
@@ -25,12 +25,10 @@
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.rpc.helpers :as rph]
[app.tasks.file-gc]
[app.util.services :as sv]
[app.worker :as-alias wrk]
[datoteka.fs :as fs]))
[app.worker :as-alias wrk]))
(set! *warn-on-reflection* true)
@@ -40,42 +38,52 @@
schema:export-binfile
[:map {:title "export-binfile"}
[:file-id ::sm/uuid]
[:version {:optional true} ::sm/int]
[:include-libraries ::sm/boolean]
[:embed-assets ::sm/boolean]])
(defn- export-binfile
[{:keys [::sto/storage] :as cfg} {:keys [file-id include-libraries embed-assets]}]
(let [output (tmp/tempfile*)]
(try
(-> cfg
(assoc ::bfc/ids #{file-id})
(assoc ::bfc/embed-assets embed-assets)
(assoc ::bfc/include-libraries include-libraries)
(bf.v3/export-files! output))
(defn stream-export-v1
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(rph/stream
(fn [_ output-stream]
(try
(-> cfg
(assoc ::bfc/ids #{file-id})
(assoc ::bfc/embed-assets embed-assets)
(assoc ::bfc/include-libraries include-libraries)
(bf.v1/export-files! output-stream))
(catch Throwable cause
(l/err :hint "exception on exporting file"
:file-id (str file-id)
:cause cause))))))
(let [data (sto/content output)
object (sto/put-object! storage
{::sto/content data
::sto/touched-at (ct/in-future {:minutes 60})
:content-type "application/zip"
:bucket "tempfile"})]
(-> (cf/get :public-uri)
(u/join "/assets/by-id/")
(u/join (str (:id object)))))
(finally
(fs/delete output)))))
(defn stream-export-v3
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(rph/stream
(fn [_ output-stream]
(try
(-> cfg
(assoc ::bfc/ids #{file-id})
(assoc ::bfc/embed-assets embed-assets)
(assoc ::bfc/include-libraries include-libraries)
(bf.v3/export-files! output-stream))
(catch Throwable cause
(l/err :hint "exception on exporting file"
:file-id (str file-id)
:cause cause))))))
(sv/defmethod ::export-binfile
"Export a penpot file in a binary format."
{::doc/added "1.15"
::doc/changes [["2.12" "Remove version parameter, only one version is supported"]]
::webhooks/event? true
::sm/params schema:export-binfile}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
(files/check-read-permissions! pool profile-id file-id)
(sse/response (partial export-binfile cfg params)))
(let [version (or version 1)]
(case (int version)
1 (stream-export-v1 cfg params)
2 (throw (ex-info "not-implemented" {}))
3 (stream-export-v3 cfg params))))
;; --- Command: import-binfile

View File

@@ -39,7 +39,7 @@
fullname (str "Demo User " sem)
password (-> (bn/random-bytes 16)
(bc/bytes->b64 true)
(bc/bytes->b64u)
(bc/bytes->str))
params {:email email
@@ -49,9 +49,9 @@
:deleted-at (ct/in-future (cf/get-deletion-delay))
:password (derive-password password)
:props {}}
profile (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(->> (auth/create-profile cfg params)
(auth/create-profile-rels conn))))]
profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
(->> (auth/create-profile! conn params)
(auth/create-profile-rels! conn))))]
(with-meta {:email email
:password password}
{::audit/profile-id (:id profile)})))

View File

@@ -79,14 +79,85 @@
;; --- FILE PERMISSIONS
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
and f.deleted_at is null
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
and f.deleted_at is null
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?
and f.deleted_at is null")
(defn get-file-permissions
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
(def has-edit-permissions?
(perms/make-edition-predicate-fn bfc/get-file-permissions))
(perms/make-edition-predicate-fn get-permissions))
(def has-read-permissions?
(perms/make-read-predicate-fn bfc/get-file-permissions))
(perms/make-read-predicate-fn get-permissions))
(def has-comment-permissions?
(perms/make-comment-predicate-fn bfc/get-file-permissions))
(perms/make-comment-predicate-fn get-permissions))
(def check-edition-permissions!
(perms/make-check-fn has-edit-permissions?))
@@ -99,7 +170,7 @@
(defn check-comment-permissions!
[conn profile-id file-id share-id]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
can-read (has-read-permissions? perms)
can-comment (has-comment-permissions? perms)]
(when-not (or can-read can-comment)
@@ -151,7 +222,7 @@
(defn- get-minimal-file-with-perms
[cfg {:keys [:id ::rpc/profile-id]}]
(let [mfile (get-minimal-file cfg id)
perms (bfc/get-file-permissions cfg profile-id id)]
perms (get-permissions cfg profile-id id)]
(assoc mfile :permissions perms)))
(defn get-file-etag
@@ -177,7 +248,7 @@
;; will be already prefetched and we just reuse them instead
;; of making an additional database queries.
(let [perms (or (:permissions (::cond/object params))
(bfc/get-file-permissions conn profile-id id))]
(get-permissions conn profile-id id))]
(check-read-permissions! perms)
(let [team (teams/get-team conn
@@ -240,7 +311,7 @@
::sm/result schema:file-fragment}
[cfg {:keys [::rpc/profile-id file-id fragment-id share-id]}]
(db/run! cfg (fn [cfg]
(let [perms (bfc/get-file-permissions cfg profile-id file-id share-id)]
(let [perms (get-permissions cfg profile-id file-id share-id)]
(check-read-permissions! perms)
(-> (get-file-fragment cfg file-id fragment-id)
(rph/with-http-cache long-cache-duration))))))
@@ -385,7 +456,8 @@
:code :params-validation
:hint "page-id is required when object-id is provided"))
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (get-permissions conn profile-id file-id share-id)
file (bfc/get-file cfg file-id :read-only? true)
proj (db/get conn :project {:id (:project-id file)})
@@ -616,10 +688,11 @@
"Get libraries used by the specified file."
{::doc/added "1.17"
::sm/params schema:get-file-libraries}
[cfg {:keys [::rpc/profile-id file-id]}]
(bfc/check-file-exists cfg file-id)
(check-read-permissions! cfg profile-id file-id)
(bfc/get-file-libraries cfg file-id))
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
(dm/with-open [conn (db/open pool)]
(check-read-permissions! conn profile-id file-id)
(bfc/get-file-libraries conn file-id)))
;; --- COMMAND QUERY: Files that use this File library
@@ -712,7 +785,8 @@
FROM file AS f
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN file_thumbnail AS ft on (ft.file_id = f.id
AND ft.revn = f.revn)
AND ft.revn = f.revn
AND ft.deleted_at is null)
WHERE p.team_id = ?
AND (p.deleted_at > ?::timestamptz OR
f.deleted_at > ?::timestamptz)
@@ -1135,7 +1209,7 @@
;; --- MUTATION COMMAND: restore-files-immediatelly
(def ^:private sql:resolve-editable-files
"SELECT f.id, f.project_id
"SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
@@ -1176,38 +1250,18 @@
{:file-id file-id}
{::db/return-keys false}))
(def ^:private sql:restore-projects
"UPDATE project SET deleted_at = null WHERE id = ANY(?::uuid[])")
(defn- restore-projects
[conn project-ids]
(let [project-ids (db/create-array conn "uuid" project-ids)]
(->> (db/exec-one! conn [sql:restore-projects project-ids])
(db/get-update-count))))
(defn- restore-deleted-team-files
[{:keys [::db/conn]} {:keys [::rpc/profile-id team-id ids]}]
(teams/check-edition-permissions! conn profile-id team-id)
(let [total-files
(count ids)
{:keys [files projects]}
(reduce (fn [result {:keys [id project-id]}]
(let [index (-> result :files count)]
(events/tap :progress {:file-id id :index index :total total-files})
(restore-file conn id)
(-> result
(update :files conj id)
(update :projects conj project-id))))
{:files #{} :projectes #{}}
(db/plan conn [sql:resolve-editable-files team-id
(db/create-array conn "uuid" ids)]))]
(restore-projects conn projects)
files))
(reduce (fn [affected {:keys [id]}]
(let [index (inc (count affected))]
(events/tap :progress {:file-id id :index index :total (count ids)})
(restore-file conn id)
(conj affected id)))
#{}
(db/plan conn [sql:resolve-editable-files team-id
(db/create-array conn "uuid" ids)])))
(def ^:private schema:restore-deleted-team-files
[:map {:title "restore-deleted-team-files"}
@@ -1215,8 +1269,8 @@
[:ids [::sm/set ::sm/uuid]]])
(sv/defmethod ::restore-deleted-team-files
"Removes the deletion mark from the specified files (and respective
projects) on the specified team."
"Removes the deletion mark from the specified files (and respective projects)."
{::doc/added "2.12"
::sse/stream? true
::sm/params schema:restore-deleted-team-files}

View File

@@ -96,7 +96,7 @@
;; loading all pages into memory for find the frame set for thumbnail.
(defn get-file-data-for-thumbnail
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file} strip-frames-with-thumbnails]
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
(letfn [;; function responsible on finding the frame marked to be
;; used as thumbnail; the returned frame always have
;; the :page-id set to the page that it belongs.
@@ -173,7 +173,7 @@
;; Assoc the available thumbnails and prune not visible shapes
;; for avoid transfer unnecessary data.
strip-frames-with-thumbnails
:always
(update :objects assoc-thumbnails page-id thumbs)))))
(def ^:private
@@ -186,8 +186,7 @@
[:map {:title "PartialFile"}
[:id ::sm/uuid]
[:revn {:min 0} ::sm/int]
[:page [:map-of :keyword ::sm/any]]
[:strip-frames-with-thumbnails {:optional true} ::sm/boolean]])
[:page [:map-of :keyword ::sm/any]]])
(sv/defmethod ::get-file-data-for-thumbnail
"Retrieves the data for generate the thumbnail of the file. Used
@@ -196,26 +195,24 @@
::doc/module :files
::sm/params schema:get-file-data-for-thumbnail
::sm/result schema:partial-file}
[cfg {:keys [::rpc/profile-id file-id strip-frames-with-thumbnails] :as params}]
[cfg {:keys [::rpc/profile-id file-id] :as params}]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/check-read-permissions! conn profile-id file-id)
(let [team (teams/get-team conn
:profile-id profile-id
:file-id file-id)
file (bfc/get-file cfg file-id
:include-deleted? true
:realize? true
:read-only? true)
strip-frames-with-thumbnails
(or (nil? strip-frames-with-thumbnails) ;; if not present, default to true
(true? strip-frames-with-thumbnails))]
:read-only? true)]
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-file-features! (:features file)))
{:file-id file-id
:revn (:revn file)
:page (get-file-data-for-thumbnail cfg file strip-frames-with-thumbnails)}))))
:page (get-file-data-for-thumbnail cfg file)}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MUTATION COMMANDS
@@ -331,16 +328,12 @@
;; --- MUTATION COMMAND: create-file-thumbnail
(defn- create-file-thumbnail
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [file-id revn props media] :as params}]
(defn- create-file-thumbnail!
[{:keys [::db/conn ::sto/storage]} {:keys [file-id revn props media] :as params}]
(media/validate-media-type! media)
(media/validate-media-size! media)
(let [file (bfc/get-file cfg file-id
:include-deleted? true
:load-data? false)
props (db/tjson (or props {}))
(let [props (db/tjson (or props {}))
path (:path media)
mtype (:mtype media)
hash (sto/calculate-hash path)
@@ -369,7 +362,7 @@
(db/update! conn :file-thumbnail
{:media-id (:id media)
:deleted-at (:deleted-at file)
:deleted-at nil
:updated-at tnow
:props props}
{:file-id file-id
@@ -380,7 +373,6 @@
:revn revn
:created-at tnow
:updated-at tnow
:deleted-at (:deleted-at file)
:props props
:media-id (:id media)}))
@@ -405,8 +397,6 @@
::rtry/when rtry/conflict-exception?
::sm/params schema:create-file-thumbnail}
;; FIXME: do not run the thumbnail upload inside a transaction
[cfg {:keys [::rpc/profile-id file-id] :as params}]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
;; TODO For now we check read permissions instead of write,
@@ -414,6 +404,6 @@
;; review this approach on the future.
(files/check-read-permissions! conn profile-id file-id)
(when-not (db/read-only? conn)
(let [media (create-file-thumbnail cfg params)]
(let [media (create-file-thumbnail! cfg params)]
{:uri (files/resolve-public-uri (:id media))
:id (:id media)})))))

View File

@@ -6,7 +6,6 @@
(ns app.rpc.commands.fonts
(:require
[app.binfile.common :as bfc]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
@@ -67,7 +66,7 @@
(uuid? file-id)
(let [file (db/get-by-id conn :file file-id {:columns [:id :project-id]})
project (db/get-by-id conn :project (:project-id file) {:columns [:id :team-id]})
perms (bfc/get-file-permissions conn profile-id file-id share-id)]
perms (files/get-permissions conn profile-id file-id share-id)]
(files/check-read-permissions! perms)
(db/query conn :team-font-variant
{:team-id (:team-id project)

View File

@@ -66,12 +66,12 @@
:member-email (:email profile))
token (tokens/generate cfg claims)]
(-> {:invitation-token token}
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/props (:props profile)
::audit/profile-id (:id profile)})))
(-> (profile/strip-private-attrs profile)
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/props (:props profile)
::audit/profile-id (:id profile)}))))))
@@ -83,6 +83,6 @@
(profile/clean-email)
(profile/get-profile-by-email conn))
(->> (assoc info :is-active true :is-demo false)
(auth/create-profile cfg)
(auth/create-profile-rels conn)
(auth/create-profile! conn)
(auth/create-profile-rels! conn)
(profile/strip-private-attrs))))))

View File

@@ -7,10 +7,14 @@
(ns app.rpc.commands.media
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.media :as cm]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http.client :as http]
[app.loggers.audit :as-alias audit]
[app.media :as media]
[app.rpc :as-alias rpc]
@@ -18,7 +22,13 @@
[app.rpc.commands.files :as files]
[app.rpc.doc :as-alias doc]
[app.storage :as sto]
[app.util.services :as sv]))
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[cuerdas.core :as str]
[datoteka.io :as io]))
(def default-max-file-size
(* 1024 1024 10)) ; 10 MiB
(def thumbnail-options
{:width 100
@@ -187,12 +197,56 @@
mobj))
(defn download-image
[{:keys [::http/client]} uri]
(letfn [(parse-and-validate [{:keys [headers] :as response}]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)
max-size (cf/get :media-max-file-size default-max-file-size)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "seems like the url points to resource with unknown size"))
(when (> size max-size)
(ex/raise :type :validation
:code :file-too-large
:hint (str/ffmt "the file size % is greater than the maximum %"
size
default-max-file-size)))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "seems like the url points to an invalid media object"))
{:size size :mtype mtype :format format}))]
(let [{:keys [body] :as response} (http/req! client
{:method :get :uri uri}
{:response-type :input-stream :sync? true})
{:keys [size mtype]} (parse-and-validate response)
path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write* path body :size size)]
(when (not= written size)
(ex/raise :type :internal
:code :mismatch-write-size
:hint "unexpected state: unable to write to file"))
{:filename "tempfile"
:size size
:path path
:mtype mtype})))
(defn- create-file-media-object-from-url
[cfg {:keys [url name] :as params}]
(let [content (media/download-image cfg url)
(let [content (download-image cfg url)
params (-> params
(assoc :content content)
(assoc :name (d/nilv name "unknown")))]
(assoc :name (or name (:filename content))))]
;; NOTE: we use the climit here in a dynamic invocation because we
;; don't want saturate the process-image limit with IO (download

View File

@@ -154,6 +154,7 @@
(declare validate-password!)
(declare update-profile-password!)
(declare invalidate-profile-session!)
(def ^:private
schema:update-profile-password
@@ -168,7 +169,8 @@
::climit/id :auth/global
::db/transaction true}
[cfg {:keys [::rpc/profile-id password] :as params}]
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))]
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))
session-id (::session/id params)]
(when (= (:email profile) (str/lower (:password params)))
(ex/raise :type :validation
@@ -176,13 +178,15 @@
:hint "you can't use your email as password"))
(update-profile-password! cfg (assoc profile :password password))
(->> (rph/get-request params)
(session/get-session)
(session/invalidate-others cfg))
(invalidate-profile-session! cfg profile-id session-id)
nil))
(defn- invalidate-profile-session!
"Removes all sessions except the current one."
[{:keys [::db/conn]} profile-id session-id]
(let [sql "delete from http_session where profile_id = ? and id != ?"]
(:next.jdbc/update-count (db/exec-one! conn [sql profile-id session-id]))))
(defn- validate-password!
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
@@ -280,9 +284,9 @@
:file-path (str (:path file))
:file-mtype (:mtype file)}}))))
(defn- generate-thumbnail
[_ input]
(let [input (media/run {:cmd :info :input input})
(defn- generate-thumbnail!
[_ file]
(let [input (media/run {:cmd :info :input file})
thumb (media/run {:cmd :profile-thumbnail
:format :jpeg
:quality 85
@@ -303,7 +307,7 @@
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
[:process-image/global]])
(assoc ::climit/label "upload-photo")
(climit/invoke! generate-thumbnail file))]
(climit/invoke! generate-thumbnail! file))]
(sto/put-object! storage params)))
;; --- MUTATION: Request Email Change

View File

@@ -169,19 +169,12 @@
;; --- MUTATION: Create Project
(defn- create-project
[{:keys [::db/conn] :as cfg} {:keys [::rpc/request-at profile-id team-id] :as params}]
(assert (ct/inst? request-at) "expect request-at assigned")
(let [params (-> params
(assoc :created-at request-at)
(assoc :modified-at request-at))
project (teams/create-project conn params)
timestamp (::rpc/request-at params)]
[{:keys [::db/conn] :as cfg} {:keys [profile-id team-id] :as params}]
(let [project (teams/create-project conn params)]
(teams/create-project-role conn profile-id (:id project) :owner)
(db/insert! conn :team-project-profile-rel
{:project-id (:id project)
:profile-id profile-id
:created-at timestamp
:modified-at timestamp
:team-id team-id
:is-pinned false})
(assoc project :is-pinned false)))

View File

@@ -73,7 +73,7 @@
{:id (:id profile)}))
(-> claims
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg profile-id))
(rph/with-meta {::audit/name "verify-profile-email"
::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)}))))

View File

@@ -13,6 +13,7 @@
[app.config :as cf]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.teams :as teams]
[app.rpc.cond :as-alias cond]
[app.rpc.doc :as-alias doc]
@@ -120,7 +121,7 @@
[system {:keys [::rpc/profile-id file-id share-id] :as params}]
(db/run! system
(fn [{:keys [::db/conn] :as system}]
(let [perms (bfc/get-file-permissions conn profile-id file-id share-id)
(let [perms (files/get-permissions conn profile-id file-id share-id)
params (-> params
(assoc ::perms perms)
(assoc :profile-id profile-id))]

View File

@@ -39,8 +39,9 @@
(defn- encode
[s]
(-> s
(bh/blake2b-256)
(bc/bytes->b64-str true)))
bh/blake2b-256
bc/bytes->b64u
bc/bytes->str))
(defn- fmt-key
[s]

View File

@@ -16,7 +16,6 @@
[app.common.schema.desc-native :as smdn]
[app.common.schema.openapi :as oapi]
[app.common.schema.registry :as sr]
[app.common.uri :as u]
[app.config :as cf]
[app.http.sse :as-alias sse]
[app.loggers.webhooks :as-alias webhooks]
@@ -26,6 +25,7 @@
[clojure.java.io :as io]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
[pretty-spec.core :as ps]
[yetti.response :as-alias yres]))
@@ -33,8 +33,8 @@
;; DOC (human readable)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- context
[{:keys [methods entrypoint label openapi]}]
(defn- prepare-doc-context
[methods]
(letfn [(fmt-spec [mdata]
(when-let [spec (ex/ignoring (s/spec (::sv/spec mdata)))]
(with-out-str
@@ -62,10 +62,8 @@
:added (::added mdata)
:changes (some->> (::changes mdata) (partition-all 2) (map vec))
:spec (fmt-spec mdata)
:entrypoint (-> entrypoint
(u/ensure-path-slash)
(u/join (::sv/name mdata))
(str))
:entrypoint (str (cf/get :public-uri) "/api/rpc/command/" (::sv/name mdata))
:params-schema-js (fmt-schema :js mdata ::sm/params)
:result-schema-js (fmt-schema :js mdata ::sm/result)
:webhook-schema-js (fmt-schema :js mdata ::sm/webhook)
@@ -74,9 +72,6 @@
:webhook-schema-clj (fmt-schema :clj mdata ::sm/webhook)})]
{:version (:main cf/version)
:label label
:entrypoint (str entrypoint)
:openapi (str openapi)
:methods
(->> methods
(map val)
@@ -85,19 +80,17 @@
(map get-context)
(sort-by (juxt :module :name)))}))
(defn- handler
[& {:keys [template] :as options}]
(defn- doc-handler
[context]
(if (contains? cf/flags :backend-api-doc)
(let [context (delay (context options))
template (or template "app/templates/api-doc.tmpl")]
(fn [request]
(let [params (:query-params request)
pstyle (:type params "js")
context (assoc @context :param-style pstyle)]
(fn [request]
(let [params (:query-params request)
pstyle (:type params "js")
context (assoc @context :param-style pstyle)]
{::yres/status 200
::yres/body (-> (io/resource template)
(tmpl/render context))})))
{::yres/status 200
::yres/body (-> (io/resource "app/templates/api-doc.tmpl")
(tmpl/render context))}))
(fn [_]
{::yres/status 404})))
@@ -105,8 +98,8 @@
;; OPENAPI / SWAGGER (v3.1)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- openapi-context
[{:keys [methods entrypoint description]}]
(defn prepare-openapi-context
[methods]
(let [definitions (atom {})
options {:registry sr/default-registry
::oapi/definitions-path "#/components/schemas/"
@@ -119,9 +112,7 @@
(fn [tsx schema]
(let [schema (sm/schema schema)
example (sm/generate schema)
example (sm/encode schema example output-transformer)
example (json/encode example :key-fn json/write-camel-key)]
example (sm/encode schema example output-transformer)]
{:default
{:description "A default response"
:content
@@ -132,9 +123,7 @@
gen-params-doc
(fn [tsx schema]
(let [example (sm/generate schema)
example (sm/encode schema example output-transformer)
example (json/encode example :key-fn json/write-camel-key)]
example (sm/encode schema example output-transformer)]
{:required true
:content
{"application/json"
@@ -169,35 +158,34 @@
(map gen-method-doc)
(sort-by (juxt :module :name))
(map (fn [doc]
[(:name doc) (:repr doc)]))
[(str/ffmt "/command/%" (:name doc)) (:repr doc)]))
(into {})))]
{:openapi "3.0.0"
:info {:version (:main cf/version)}
:servers [{:url (str entrypoint)
:description (or description "")}]
:servers [{:url (str/ffmt "%/api/rpc" (cf/get :public-uri))
;; :description "penpot backend"
}]
:paths paths
:components {:schemas @definitions}}))
(defn- openapi-json-handler
[& {:as options}]
(defn openapi-json-handler
[context]
(if (contains? cf/flags :backend-openapi-doc)
(let [context (delay (openapi-context options))]
(fn [_]
{::yres/status 200
::yres/headers {"content-type" "application/json; charset=utf-8"}
::yres/body (json/encode @context)}))
(fn [_]
{::yres/status 200
::yres/headers {"content-type" "application/json; charset=utf-8"}
::yres/body (json/encode @context)})
(fn [_]
{::yres/status 404})))
(defn- openapi-handler
[& {:keys [uri label]}]
(defn openapi-handler
[]
(if (contains? cf/flags :backend-openapi-doc)
(fn [_]
(let [swagger-js (slurp (io/resource "app/assets/swagger-ui-4.18.3.js"))
swagger-cs (slurp (io/resource "app/assets/swagger-ui-4.18.3.css"))
context {:uri (str uri)
:label label
context {:public-uri (cf/get :public-uri)
:swagger-js swagger-js
:swagger-css swagger-cs}]
{::yres/status 200
@@ -208,43 +196,27 @@
{::yres/status 404})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ROUTES HELPER
;; MODULE INIT
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn routes
[& {:keys [label base-uri description methods]}]
(let [entrypoint
(-> base-uri
(u/ensure-path-slash)
(u/join "methods"))
(defmethod ig/assert-key ::routes
[_ params]
(assert (sm/valid? ::rpc/methods (::rpc/methods params)) "expected valid methods"))
openapi
(-> base-uri
(u/ensure-path-slash)
(u/join "doc/openapi"))
(defmethod ig/init-key ::routes
[_ {:keys [::rpc/methods] :as cfg}]
[(let [context (delay (prepare-doc-context methods))]
[["/_doc"
{:handler (doc-handler context)
:allowed-methods #{:get}}]
["/doc"
{:handler (doc-handler context)
:allowed-methods #{:get}}]])
template
(case label
"management" "app/templates/management-api-doc.tmpl"
"main" "app/templates/main-api-doc.tmpl")]
["/doc"
["" {:handler (handler :methods methods
:label label
:entrypoint entrypoint
:openapi openapi
:template template)
:allowed-methods #{:get}}]
["/openapi"
{:handler (openapi-handler
:uri (u/join openapi "openapi.json")
:label label)
:allowed-methods #{:get}}]
["/openapi.json"
{:handler (openapi-json-handler {:entrypoint entrypoint
:description description
:methods methods})
:allowed-methods #{:get}}]]))
(let [context (delay (prepare-openapi-context methods))]
[["/openapi"
{:handler (openapi-handler)
:allowed-methods #{:get}}]
["/openapi.json"
{:handler (openapi-json-handler context)
:allowed-methods #{:get}}]])])

View File

@@ -83,16 +83,3 @@
"A convenience allias for yetti.response/stream-body"
[f]
(yres/stream-body f))
(defn get-request
"Get http request from RPC params"
[params]
(assert (contains? params ::rpc/request-at) "rpc params required")
(-> (meta params)
(get ::http/request)))
(defn get-auth-data
"Get http auth-data from RPC params"
[params]
(-> (get-request params)
(get ::http/auth-data)))

View File

@@ -1,49 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.management.exporter
(:require
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.config :as cf]
[app.media :refer [schema:upload]]
[app.rpc :as-alias rpc]
[app.rpc.doc :as doc]
[app.storage :as sto]
[app.util.services :as sv]))
;; ---- RPC METHOD: UPLOAD-TEMPFILE
(def ^:private
schema:upload-tempfile-params
[:map {:title "upload-templfile-params"}
[:content schema:upload]])
(def ^:private
schema:upload-tempfile-result
[:map {:title "upload-templfile-result"}])
(sv/defmethod ::upload-tempfile
{::doc/added "2.12"
::sm/params schema:upload-tempfile-params
::sm/result schema:upload-tempfile-result}
[cfg {:keys [::rpc/profile-id content]}]
(let [storage (sto/resolve cfg)
hash (sto/calculate-hash (:path content))
data (-> (sto/content (:path content))
(sto/wrap-with-hash hash))
content {::sto/content data
::sto/deduplicate? true
::sto/touched-at (ct/in-future {:minutes 10})
:profile-id profile-id
:content-type (:mtype content)
:bucket "tempfile"}
object (sto/put-object! storage content)]
{:id (:id object)
:uri (-> (cf/get :public-uri)
(u/join "/assets/by-id/")
(u/join (str (:id object))))}))

View File

@@ -1,183 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.management.subscription
(:require
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.schema.generators :as sg]
[app.common.time :as ct]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.profile :as profile]
[app.rpc.doc :as doc]
[app.util.services :as sv]))
;; ---- RPC METHOD: AUTHENTICATE
(def ^:private
schema:authenticate-params
[:map {:title "authenticate-params"}])
(def ^:private
schema:authenticate-result
[:map {:title "authenticate-result"}
[:profile-id ::sm/uuid]])
(sv/defmethod ::auth
{::doc/added "2.12"
::sm/params schema:authenticate-params
::sm/result schema:authenticate-result}
[_ {:keys [::rpc/profile-id]}]
{:profile-id profile-id})
;; ---- RPC METHOD: GET-CUSTOMER
;; FIXME: move to app.common.time
(def ^:private schema:timestamp
(sm/type-schema
{:type ::timestamp
:pred ct/inst?
:type-properties
{:title "inst"
:description "The same as :app.common.time/inst but encodes to epoch"
:error/message "should be an instant"
:gen/gen (->> (sg/small-int)
(sg/fmap (fn [v] (ct/inst v))))
:decode/string #(some-> % ct/inst)
:encode/string #(some-> % inst-ms)
:decode/json #(some-> % ct/inst)
:encode/json #(some-> % inst-ms)}}))
(def ^:private schema:subscription
[:map {:title "Subscription"}
[:id ::sm/text]
[:customer-id ::sm/text]
[:type [:enum
"unlimited"
"professional"
"enterprise"]]
[:status [:enum
"active"
"canceled"
"incomplete"
"incomplete_expired"
"past_due"
"paused"
"trialing"
"unpaid"]]
[:billing-period [:enum
"month"
"day"
"week"
"year"]]
[:quantity :int]
[:description [:maybe ::sm/text]]
[:created-at schema:timestamp]
[:start-date [:maybe schema:timestamp]]
[:ended-at [:maybe schema:timestamp]]
[:trial-end [:maybe schema:timestamp]]
[:trial-start [:maybe schema:timestamp]]
[:cancel-at [:maybe schema:timestamp]]
[:canceled-at [:maybe schema:timestamp]]
[:current-period-end [:maybe schema:timestamp]]
[:current-period-start [:maybe schema:timestamp]]
[:cancel-at-period-end :boolean]
[:cancellation-details
[:map {:title "CancellationDetails"}
[:comment [:maybe ::sm/text]]
[:reason [:maybe ::sm/text]]
[:feedback [:maybe
[:enum
"customer_service"
"low_quality"
"missing_feature"
"other"
"switched_service"
"too_complex"
"too_expensive"
"unused"]]]]]])
(def ^:private sql:get-customer-slots
"WITH teams AS (
SELECT tpr.team_id AS id,
tpr.profile_id AS profile_id
FROM team_profile_rel AS tpr
WHERE tpr.is_owner IS true
AND tpr.profile_id = ?
), teams_with_slots AS (
SELECT tpr.team_id AS id,
count(*) AS total
FROM team_profile_rel AS tpr
WHERE tpr.team_id IN (SELECT id FROM teams)
AND tpr.can_edit IS true
GROUP BY 1
ORDER BY 2
)
SELECT max(total) AS total FROM teams_with_slots;")
(defn- get-customer-slots
[cfg profile-id]
(let [result (db/exec-one! cfg [sql:get-customer-slots profile-id])]
(:total result)))
(def ^:private schema:get-customer-params
[:map])
(def ^:private schema:get-customer-result
[:map
[:id ::sm/uuid]
[:name :string]
[:num-editors ::sm/int]
[:subscription {:optional true} schema:subscription]])
(sv/defmethod ::get-customer
{::doc/added "2.12"
::sm/params schema:get-customer-params
::sm/result schema:get-customer-result}
[cfg {:keys [::rpc/profile-id]}]
(let [profile (profile/get-profile cfg profile-id)]
{:id (get profile :id)
:name (get profile :fullname)
:email (get profile :email)
:num-editors (get-customer-slots cfg profile-id)
:subscription (-> profile :props :subscription)}))
;; ---- RPC METHOD: GET-CUSTOMER
(def ^:private schema:update-customer-params
[:map
[:subscription [:maybe schema:subscription]]])
(def ^:private schema:update-customer-result
[:map])
(sv/defmethod ::update-customer
{::doc/added "2.12"
::sm/params schema:update-customer-params
::sm/result schema:update-customer-result}
[cfg {:keys [::rpc/profile-id subscription]}]
(let [{:keys [props] :as profile}
(profile/get-profile cfg profile-id ::db/for-update true)
props
(assoc props :subscription subscription)]
(l/dbg :hint "update customer"
:profile-id (str profile-id)
:subscription-type (get subscription :type)
:subscription-status (get subscription :status)
:subscription-quantity (get subscription :quantity))
(db/update! cfg :profile
{:props (db/tjson props)}
{:id profile-id}
{::db/return-keys false})
nil))

View File

@@ -104,29 +104,28 @@
(def ^:private schema:limit
[:and
[:map
[::name :keyword]
[::name :any]
[::strategy schema:strategy]
[::key :string]
[::opts :string]
[::capacity {:optional true} ::sm/int]
[::rate {:optional true} ::sm/int]
[::interval {:optional true} ::ct/duration]
[::params {:optional true} [::sm/vec :any]]
[::permits {:optional true} ::sm/int]
[::unit {:optional true} [:enum :days :hours :minutes :seconds :weeks]]]
[:fn (fn [attrs]
(let [contains-fn (partial contains? attrs)]
(or (every? contains-fn [::capacity ::rate ::interval])
(every? contains-fn [::permits ::unit]))))]])
[::opts :string]]
[:or
[:map
[::capacity ::sm/int]
[::rate ::sm/int]
[::internal ::ct/duration]
[::params [::sm/vec :any]]]
[:map
[::nreq ::sm/int]
[::unit [:enum :days :hours :minutes :seconds :weeks]]]]])
(def ^:private schema:limits
[:map-of :keyword [::sm/vec schema:limit]])
(def ^:private valid-limit-tuple?
(sm/validator schema:limit-tuple))
(sm/lazy-validator schema:limit-tuple))
(def ^:private valid-rlimit-instance?
(sm/validator ::rpc/rlimit))
(sm/lazy-validator ::rpc/rlimit))
(defmethod parse-limit :window
[[name strategy opts :as vlimit]]
@@ -135,16 +134,16 @@
(merge
{::name name
::strategy strategy}
(if-let [[_ permits unit] (re-find window-opts-re opts)]
(let [permits (parse-long permits)]
{::permits permits
(if-let [[_ nreq unit] (re-find window-opts-re opts)]
(let [nreq (parse-long nreq)]
{::nreq nreq
::unit (case unit
"d" :days
"h" :hours
"m" :minutes
"s" :seconds
"w" :weeks)
::key (str "penpot.rlimit." (cf/get :tenant) ".window." (d/name name))
::key (str "ratelimit.window." (d/name name))
::opts opts})
(ex/raise :type :validation
:code :invalid-window-limit-opts
@@ -165,15 +164,15 @@
::interval interval
::opts opts
::params [(->seconds interval) rate capacity]
::key (str "penpot.rlimit." (cf/get :tenant) ".bucket." (d/name name))})
::key (str "ratelimit.bucket." (d/name name))})
(ex/raise :type :validation
:code :invalid-bucket-limit-opts
:hint (str/ffmt "looks like '%' does not have a valid format" opts))))
(defmethod process-limit :bucket
[rconn profile-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
[rconn user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
(let [script (-> bucket-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id)])
(assoc ::rscript/keys [(str key "." service "." user-id)])
(assoc ::rscript/vals (conj params (->seconds now))))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
@@ -193,18 +192,18 @@
(assoc ::lresult/remaining remaining))))
(defmethod process-limit :window
[rconn profile-id now {:keys [::permits ::unit ::key ::service] :as limit}]
[rconn user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
(let [ts (ct/truncate now unit)
ttl (ct/diff now (ct/plus ts {unit 1}))
script (-> window-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [permits (->seconds ttl)]))
(assoc ::rscript/keys [(str key "." service "." user-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [nreq (->seconds ttl)]))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
remaining (nth result 1)]
(l/trace :hint "limit processed"
:service service
:name (name (::name limit))
:limit (name (::name limit))
:strategy (name (::strategy limit))
:opts (::opts limit)
:allowed allowed?
@@ -215,8 +214,8 @@
(assoc ::lresult/reset (ct/plus ts {unit 1})))))
(defn- process-limits
[rconn profile-id limits now]
(let [results (into [] (map (partial process-limit rconn profile-id now)) limits)
[rconn user-id limits now]
(let [results (into [] (map (partial process-limit rconn user-id now)) limits)
remaining (->> results
(d/index-by ::name ::lresult/remaining)
(uri/map->query-string))
@@ -228,7 +227,7 @@
(when rejected
(l/warn :hint "rejected rate limit"
:profile-id (str profile-id)
:user-id (str user-id)
:limit-service (-> rejected ::service name)
:limit-name (-> rejected ::name name)
:limit-strategy (-> rejected ::strategy name)))
@@ -372,9 +371,12 @@
(defn- on-refresh-error
[_ cause]
(when-not (instance? java.util.concurrent.RejectedExecutionException cause)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true)))
(if-let [explain (-> cause ex-data ex/explain)]
(l/warn ::l/raw (str "unable to refresh config, invalid format:\n" explain)
::l/sync? true)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true))))
(defn- get-config-path
[]

View File

@@ -25,9 +25,9 @@ local allowed = filled >= requested
local newTokens = filled
if allowed then
newTokens = filled - requested
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
end
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
redis.call("expire", tokensKey, ttl)
return { allowed, newTokens }

View File

@@ -22,7 +22,8 @@
(defn- generate-random-key
[]
(-> (bn/random-bytes 64)
(bc/bytes->b64-str true)))
(bc/bytes->b64u)
(bc/bytes->str)))
(defn- get-all-props
[conn]
@@ -84,11 +85,12 @@
(l/warn :hint (str "using autogenerated secret-key, it will change on each restart and will invalidate "
"all sessions on each restart, it is highly recommended setting up the "
"PENPOT_SECRET_KEY environment variable")))
(let [secret (or key (generate-random-key))]
(-> (get-all-props conn)
(assoc :secret-key secret)
(assoc :tokens-key (keys/derive secret :salt "tokens"))
(assoc :management-key (keys/derive secret :salt "management"))
(update :instance-id handle-instance-id conn (db/read-only? pool)))))))
(sm/register! ::props [:map-of :keyword ::sm/any])
;; FIXME
(sm/register! ::props :any)

View File

@@ -8,13 +8,13 @@
"Keys derivation service."
(:refer-clojure :exclude [derive])
(:require
[app.common.spec :as us]
[buddy.core.kdf :as bk]))
(defn derive
"Derive a key from secret-key"
[secret-key & {:keys [salt size] :or {size 32}}]
(assert (string? secret-key) "expect string")
(assert (seq secret-key) "expect string")
(us/assert! ::us/not-empty-string secret-key)
(let [engine (bk/engine {:key secret-key
:salt salt
:alg :hkdf

View File

@@ -61,8 +61,8 @@
:is-active is-active
:password password
:props {}}]
(->> (cmd.auth/create-profile system params)
(cmd.auth/create-profile-rels conn)))))))
(->> (cmd.auth/create-profile! conn params)
(cmd.auth/create-profile-rels! conn)))))))
(defmethod exec-command "update-profile"
[{:keys [fullname email password is-active]}]

View File

@@ -25,7 +25,6 @@
[app.db.sql :as-alias sql]
[app.features.fdata :as fdata]
[app.features.file-snapshots :as fsnap]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.main :as main]
[app.msgbus :as mbus]
@@ -844,33 +843,10 @@
:deleted-at deleted-at
:id id})))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SSO
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn add-sso-config
[& {:keys [base-uri client-id client-secret domain]}]
(assert (and (string? base-uri) (str/starts-with? base-uri "http")) "expected a valid base-uri")
(assert (string? client-id) "expected a valid client-id")
(assert (string? client-secret) "expected a valid client-secret")
(assert (string? domain) "expected a valid domain")
(db/insert! main/system :sso-provider
{:id (uuid/next)
:type "oidc"
:client-id client-id
:client-secret client-secret
:domain domain
:base-uri base-uri}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MISC
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn decode-session-token
[token]
(session/decode-token main/system token))
(defn instrument-var
[var]
(alter-var-root var (fn [f]

View File

@@ -35,16 +35,12 @@
:assets-s3 :s3
nil)))
(def default-bucket
"file-media-object")
(def valid-buckets
#{"file-media-object"
"team-font-variant"
"file-object-thumbnail"
"file-thumbnail"
"profile"
"tempfile"
"file-data"
"file-data-fragment"
"file-change"})
@@ -167,6 +163,9 @@
backend
(:metadata result))))
(def ^:private sql:retrieve-storage-object
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
(defn row->storage-object [res]
(let [mdata (or (some-> (:metadata res) (db/decode-transit-pgobject)) {})]
(impl/storage-object
@@ -178,15 +177,9 @@
(keyword (:backend res))
mdata)))
(def ^:private sql:get-storage-object
"SELECT *
FROM storage_object
WHERE id = ?
AND (deleted_at IS NULL)")
(defn- get-database-object
(defn- retrieve-database-object
[conn id]
(some-> (db/exec-one! conn [sql:get-storage-object id])
(some-> (db/exec-one! conn [sql:retrieve-storage-object id])
(row->storage-object)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -209,7 +202,7 @@
(defn get-object
[{:keys [::db/connectable] :as storage} id]
(assert (valid-storage? storage))
(get-database-object connectable id))
(retrieve-database-object connectable id))
(defn put-object!
"Creates a new object with the provided content."

View File

@@ -37,6 +37,7 @@
(into #{} (map :id))
(not-empty))))
(def ^:private sql:delete-sobjects
"DELETE FROM storage_object
WHERE id = ANY(?::uuid[])")
@@ -76,37 +77,47 @@
(d/group-by (comp keyword :backend) :id #{} items))
(def ^:private sql:get-deleted-sobjects
"SELECT s.*
FROM storage_object AS s
"SELECT s.* FROM storage_object AS s
WHERE s.deleted_at IS NOT NULL
AND s.deleted_at <= ?
AND s.deleted_at < now() - ?::interval
ORDER BY s.deleted_at ASC")
(defn- get-buckets
[conn]
(let [now (ct/now)]
[conn min-age]
(let [age (db/interval min-age)]
(sequence
(comp (partition-all 25)
(mapcat group-by-backend))
(db/cursor conn [sql:get-deleted-sobjects now]))))
(db/cursor conn [sql:get-deleted-sobjects age]))))
(defn- clean-deleted!
[{:keys [::db/conn] :as cfg}]
[{:keys [::db/conn ::min-age] :as cfg}]
(reduce (fn [total [backend-id ids]]
(let [deleted (delete-in-bulk! cfg backend-id ids)]
(+ total (or deleted 0))))
0
(get-buckets conn)))
(get-buckets conn min-age)))
(defmethod ig/assert-key ::handler
[_ params]
(assert (sto/valid-storage? (::sto/storage params)) "expect valid storage")
(assert (db/pool? (::db/pool params)) "expect valid storage"))
(defmethod ig/expand-key ::handler
[k v]
{k (assoc v ::min-age (ct/duration {:hours 2}))})
(defmethod ig/init-key ::handler
[_ cfg]
(fn [_]
(db/tx-run! cfg (fn [cfg]
(let [total (clean-deleted! cfg)]
(l/inf :hint "task finished" :total total)
{:deleted total})))))
[_ {:keys [::min-age] :as cfg}]
(fn [{:keys [props] :as task}]
(let [min-age (ct/duration (or (:min-age props) min-age))]
(db/tx-run! cfg (fn [cfg]
(let [cfg (assoc cfg ::min-age min-age)
total (clean-deleted! cfg)]
(l/inf :hint "task finished"
:min-age (ct/format-duration min-age)
:total total)
{:deleted total}))))))

View File

@@ -22,10 +22,8 @@
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.storage :as sto]
[app.storage :as-alias sto]
[app.storage.impl :as impl]
[integrant.core :as ig]))
@@ -103,15 +101,14 @@
(def ^:private sql:mark-delete-in-bulk
"UPDATE storage_object
SET deleted_at = ?,
SET deleted_at = now(),
touched_at = NULL
WHERE id = ANY(?::uuid[])")
(defn- mark-delete-in-bulk!
[conn deletion-delay ids]
(let [ids (db/create-array conn "uuid" ids)
now (ct/plus (ct/now) deletion-delay)]
(db/exec-one! conn [sql:mark-delete-in-bulk now ids])))
[conn ids]
(let [ids (db/create-array conn "uuid" ids)]
(db/exec-one! conn [sql:mark-delete-in-bulk ids])))
;; NOTE: A getter that retrieves the key which will be used for group
;; ids; previously we have no value, then we introduced the
@@ -130,7 +127,7 @@
[{:keys [metadata]}]
(or (some-> metadata :bucket)
(some-> metadata :reference d/name)
sto/default-bucket))
"file-media-object"))
(defn- process-objects!
[conn has-refs? bucket objects]
@@ -140,20 +137,18 @@
(if-let [{:keys [id] :as object} (first objects)]
(if (has-refs? conn object)
(do
(l/dbg :id (str id)
:status "freeze"
:bucket bucket)
(l/debug :id (str id)
:status "freeze"
:bucket bucket)
(recur (conj to-freeze id) to-delete (rest objects)))
(do
(l/dbg :id (str id)
:status "delete"
:bucket bucket)
(l/debug :id (str id)
:status "delete"
:bucket bucket)
(recur to-freeze (conj to-delete id) (rest objects))))
(let [deletion-delay (if (= bucket "tempfile")
(ct/duration {:hours 2})
(cf/get-deletion-delay))]
(do
(some->> (seq to-freeze) (mark-freeze-in-bulk! conn))
(some->> (seq to-delete) (mark-delete-in-bulk! conn deletion-delay))
(some->> (seq to-delete) (mark-delete-in-bulk! conn))
[(count to-freeze) (count to-delete)]))))
(defn- process-bucket!
@@ -165,7 +160,6 @@
"file-thumbnail" (process-objects! conn has-file-thumbnails-refs? bucket objects)
"profile" (process-objects! conn has-profile-refs? bucket objects)
"file-data" (process-objects! conn has-file-data-refs? bucket objects)
"tempfile" (process-objects! conn (constantly false) bucket objects)
(ex/raise :type :internal
:code :unexpected-unknown-reference
:hint (dm/fmt "unknown reference '%'" bucket))))
@@ -179,27 +173,27 @@
[0 0]
(d/group-by lookup-bucket identity #{} chunk)))
(def ^:private sql:get-touched-storage-objects
(def ^:private
sql:get-touched-storage-objects
"SELECT so.*
FROM storage_object AS so
WHERE so.touched_at IS NOT NULL
AND so.touched_at <= ?
ORDER BY touched_at ASC
FOR UPDATE
SKIP LOCKED
LIMIT 10")
(defn get-chunk
[conn timestamp]
(->> (db/exec! conn [sql:get-touched-storage-objects timestamp])
[conn]
(->> (db/exec! conn [sql:get-touched-storage-objects])
(map impl/decode-row)
(not-empty)))
(defn- process-touched!
[{:keys [::db/pool ::timestamp] :as cfg}]
[{:keys [::db/pool] :as cfg}]
(loop [freezed 0
deleted 0]
(if-let [chunk (get-chunk pool timestamp)]
(if-let [chunk (get-chunk pool)]
(let [[nfo ndo] (db/tx-run! cfg process-chunk! chunk)]
(recur (long (+ freezed nfo))
(long (+ deleted ndo))))
@@ -215,6 +209,5 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [_]
(process-touched! (assoc cfg ::timestamp (ct/now)))))
(fn [_] (process-touched! cfg)))

View File

@@ -79,17 +79,14 @@
;; API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn tempfile*
[& {:keys [suffix prefix]
(defn tempfile
[& {:keys [suffix prefix min-age]
:or {prefix "penpot."
suffix ".tmp"}}]
(let [attrs (fs/make-permissions "rw-r--r--")
path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))]
(Files/createFile path attrs)))
(defn tempfile
[& {:keys [min-age] :as opts}]
(let [path (tempfile* opts)]
path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))
path (Files/createFile path attrs)]
(fs/delete-on-exit! path)
(sp/offer! queue [path (some-> min-age ct/duration)])
path))

View File

@@ -218,9 +218,6 @@
(when (or (nil? revn) (= revn (:revn file)))
file)))
;; FIXME: we should skip files that does not match the revn on the
;; props and add proper schema for this task props
(defn- process-file!
[cfg {:keys [file-id] :as props}]
(if-let [file (get-file cfg props)]

View File

@@ -8,7 +8,6 @@
"A maintenance task that is responsible of properly scheduling the
file-gc task for all files that matches the eligibility threshold."
(:require
[app.common.logging :as l]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
@@ -22,24 +21,25 @@
f.modified_at
FROM file AS f
WHERE f.has_media_trimmed IS false
AND f.modified_at < ?
AND f.modified_at < now() - ?::interval
AND f.deleted_at IS NULL
ORDER BY f.modified_at DESC
FOR UPDATE OF f
SKIP LOCKED")
(defn- get-candidates
[{:keys [::db/conn ::min-age] :as cfg}]
(let [min-age (db/interval min-age)]
(db/plan conn [sql:get-candidates min-age] {:fetch-size 10})))
(defn- schedule!
[{:keys [::db/conn] :as cfg} threshold]
[cfg]
(let [total (reduce (fn [total {:keys [id modified-at revn]}]
(let [params {:file-id id :revn revn}]
(l/trc :hint "schedule"
:file-id (str id)
:revn revn
:modified-at (ct/format-inst modified-at))
(let [params {:file-id id :modified-at modified-at :revn revn}]
(wrk/submit! (assoc cfg ::wrk/params params))
(inc total)))
0
(db/plan conn [sql:get-candidates threshold] {:fetch-size 10}))]
(get-candidates cfg))]
{:processed total}))
(defmethod ig/assert-key ::handler
@@ -53,12 +53,12 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [{:keys [props] :as task}]
(let [threshold (-> (ct/duration (or (:min-age props) (::min-age cfg)))
(ct/in-past))]
(let [min-age (ct/duration (or (:min-age props) (::min-age cfg)))]
(-> cfg
(assoc ::db/rollback (:rollback? props))
(assoc ::min-age min-age)
(assoc ::wrk/task :file-gc)
(assoc ::wrk/priority 10)
(assoc ::wrk/mark-retries 0)
(assoc ::wrk/delay 10000)
(db/tx-run! schedule! threshold)))))
(assoc ::wrk/delay 1000)
(db/tx-run! schedule!)))))

View File

@@ -18,15 +18,15 @@
(def ^:private sql:get-profiles
"SELECT id, photo_id FROM profile
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-profiles!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-profiles timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-profiles deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id photo-id]}]
(l/trc :obj "profile" :id (str id))
@@ -41,15 +41,15 @@
(def ^:private sql:get-teams
"SELECT deleted_at, id, photo_id FROM team
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-teams!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-teams timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-teams deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id photo-id deleted-at]}]
(l/trc :obj "team"
:id (str id)
@@ -68,15 +68,15 @@
"SELECT id, team_id, deleted_at, woff1_file_id, woff2_file_id, otf_file_id, ttf_file_id
FROM team_font_variant
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-fonts!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-fonts timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-fonts deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id team-id deleted-at] :as font}]
(l/trc :obj "font-variant"
:id (str id)
@@ -98,15 +98,15 @@
"SELECT id, deleted_at, team_id
FROM project
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-projects!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-projects timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-projects deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id team-id deleted-at]}]
(l/trc :obj "project"
:id (str id)
@@ -124,15 +124,15 @@
f.project_id
FROM file AS f
WHERE f.deleted_at IS NOT NULL
AND f.deleted_at <= ?
AND f.deleted_at < now() + ?::interval
ORDER BY f.deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-files!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-files timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-files deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
(l/trc :obj "file"
:id (str id)
@@ -148,15 +148,15 @@
"SELECT file_id, revn, media_id, deleted_at
FROM file_thumbnail
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn delete-file-thumbnails!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-thumbnails timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
(l/trc :obj "file-thumbnail"
:file-id (str file-id)
@@ -175,15 +175,15 @@
"SELECT file_id, object_id, media_id, deleted_at
FROM file_tagged_object_thumbnail
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn delete-file-object-thumbnails!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-object-thumbnails timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-object-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
(l/trc :obj "file-object-thumbnail"
:file-id (str file-id)
@@ -203,15 +203,15 @@
"SELECT id, file_id, media_id, thumbnail_id, deleted_at
FROM file_media_object
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-media-objects!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-media-objects timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-media-objects deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
(l/trc :obj "file-media-object"
:id (str id)
@@ -231,15 +231,16 @@
"SELECT file_id, id, type, deleted_at, metadata, backend
FROM file_data
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-data!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-data timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-data deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id id type deleted-at metadata backend]}]
(some->> metadata
@@ -265,15 +266,15 @@
"SELECT id, file_id, deleted_at
FROM file_change
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-changes!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-change timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-change deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
(l/trc :obj "file-change"
:id (str id)
@@ -321,8 +322,9 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [_]
(let [cfg (assoc cfg ::timestamp (ct/now))]
(fn [{:keys [props] :as task}]
(let [threshold (ct/duration (get props :deletion-threshold 0))
cfg (assoc cfg ::deletion-threshold (db/interval threshold))]
(loop [procs (map deref deletion-proc-vars)
total 0]
(if-let [proc-fn (first procs)]

View File

@@ -15,25 +15,19 @@
[buddy.sign.jwe :as jwe]))
(defn generate
([cfg claims] (generate cfg claims nil))
([{:keys [::setup/props] :as cfg} claims header]
(assert (contains? props :tokens-key) "expect props to have tokens-key")
[{:keys [::setup/props] :as cfg} claims]
(assert (contains? cfg ::setup/props))
(let [tokens-key
(get props :tokens-key)
(let [tokens-key
(get props :tokens-key)
payload
(-> claims
(update :iat (fn [v] (or v (ct/now))))
(d/without-nils)
(t/encode))]
payload
(-> claims
(update :iat (fn [v] (or v (ct/now))))
(d/without-nils)
(t/encode))]
(jwe/encrypt payload tokens-key {:alg :a256kw :enc :a256gcm :header header}))))
(defn decode-header
[token]
(ex/ignoring
(jwe/decode-header token)))
(jwe/encrypt payload tokens-key {:alg :a256kw :enc :a256gcm})))
(defn decode
[{:keys [::setup/props] :as cfg} token]

View File

@@ -27,7 +27,7 @@
(throw (IllegalArgumentException. "Missing arguments on `defmethod` macro.")))
(let [mdata (assoc mdata
::docstring (some-> docs str/unindent)
::docstring (some-> docs str/<<-)
::spec sname
::name (name sname))

View File

@@ -7,18 +7,10 @@
(ns app.util.template
(:require
[app.common.exceptions :as ex]
[cuerdas.core :as str]
[selmer.filters :as sf]
[selmer.parser :as sp]))
;; (sp/cache-off!)
(sf/add-filter! :abbreviate
(fn [s n]
(let [n (parse-long n)]
(str/abbreviate s n))))
(defn render
[path context]
(try

View File

@@ -77,8 +77,8 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private sql:insert-new-task
"insert into task (id, name, props, queue, label, priority, max_retries, created_at, modified_at, scheduled_at)
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"insert into task (id, name, props, queue, label, priority, max_retries, scheduled_at)
values (?, ?, ?, ?, ?, ?, ?, now() + ?)
returning id")
(def ^:private
@@ -88,7 +88,7 @@
AND queue=?
AND label=?
AND status = 'new'
AND scheduled_at > ?")
AND scheduled_at > now()")
(def ^:private schema:options
[:map {:title "submit-options"}
@@ -111,19 +111,17 @@
(check-options! options)
(let [delay (ct/duration delay)
now (ct/now)
scheduled-at (-> (ct/plus now delay)
(ct/truncate :millisecond))
props (db/tjson params)
id (uuid/next)
tenant (cf/get :tenant)
task (d/name task)
queue (str/ffmt "%:%" tenant (d/name queue))
conn (db/get-connectable options)
deleted (when dedupe
(-> (db/exec-one! conn [sql:remove-not-started-tasks task queue label now])
(db/get-update-count)))]
(let [duration (ct/duration delay)
interval (db/interval duration)
props (db/tjson params)
id (uuid/next)
tenant (cf/get :tenant)
task (d/name task)
queue (str/ffmt "%:%" tenant (d/name queue))
conn (db/get-connectable options)
deleted (when dedupe
(-> (db/exec-one! conn [sql:remove-not-started-tasks task queue label])
:next.jdbc/update-count))]
(l/trc :hint "submit task"
:name task
@@ -131,13 +129,11 @@
:queue queue
:label label
:dedupe (boolean dedupe)
:delay (ct/format-duration delay)
:delay (ct/format-duration duration)
:replace (or deleted 0))
(db/exec-one! conn [sql:insert-new-task id task props queue
label priority max-retries
now now scheduled-at])
label priority max-retries interval])
id))
(defn invoke!

View File

@@ -137,34 +137,33 @@ RETURNING task.id, task.queue")
::wait)))
(run-batch []
(try
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(finally
(.close ^AutoCloseable rconn))))
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(catch InterruptedException cause
(throw cause))
(catch InterruptedException cause
(throw cause))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))))
(finally
(.close ^AutoCloseable rconn)))))
(dispatcher []
(l/inf :hint "started")
@@ -177,7 +176,7 @@ RETURNING task.id, task.queue")
(catch InterruptedException _
(l/trc :hint "interrupted"))
(catch Throwable cause
(l/err :hint "unexpected exception" :cause cause))
(l/err :hint " unexpected exception" :cause cause))
(finally
(l/inf :hint "terminated"))))]

View File

@@ -158,9 +158,7 @@
(inst-ms (:scheduled-at task)))
(l/wrn :hint "skiping task, rescheduled"
:task-id task-id
:runner-id id
:scheduled-at (ct/format-inst (:scheduled-at task))
:expected-scheduled-at (ct/format-inst scheduled-at))
:runner-id id)
:else
(let [result (run-task cfg task)]
@@ -181,8 +179,7 @@
{:error explain
:status "retry"
:modified-at now
:scheduled-at (-> (ct/plus now delay)
(ct/truncate :millisecond))
:scheduled-at (ct/plus now delay)
:retry-num nretry}
{:id (:id task)})
nil))

View File

@@ -30,7 +30,6 @@
[app.rpc.commands.files :as files]
[app.rpc.commands.files-create :as files.create]
[app.rpc.commands.files-update :as files.update]
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
[app.rpc.helpers :as rph]
[app.util.blob :as blob]
@@ -105,8 +104,13 @@
(assoc-in [:app.rpc/methods :app.setup/templates] templates)
(dissoc :app.srepl/server
:app.http/server
:app.http/route
:app.http/router
:app.auth.oidc.providers/google
:app.auth.oidc.providers/gitlab
:app.auth.oidc.providers/github
:app.auth.oidc.providers/generic
:app.setup/templates
:app.auth.oidc/routes
:app.http.oauth/handler
:app.notifications/handler
:app.loggers.mattermost/reporter
@@ -178,25 +182,23 @@
:is-demo false}
params)]
(db/run! system
(fn [{:keys [::db/conn] :as cfg}]
(fn [{:keys [::db/conn]}]
(->> params
(cmd.auth/create-profile cfg)
(cmd.auth/create-profile-rels conn)))))))
(cmd.auth/create-profile! conn)
(cmd.auth/create-profile-rels! conn)))))))
(defn create-project*
([i params] (create-project* *system* i params))
([system i {:keys [profile-id team-id] :as params}]
(us/assert uuid? profile-id)
(us/assert uuid? team-id)
(assert (uuid? profile-id))
(assert (uuid? team-id))
(let [timestamp (ct/now)]
(db/run! system
(fn [cfg]
(->> (merge {:id (mk-uuid "project" i)
:name (str "project" i)}
params
{::rpc/request-at timestamp})
(#'projects/create-project cfg)))))))
(db/run! system
(fn [{:keys [::db/conn]}]
(->> (merge {:id (mk-uuid "project" i)
:name (str "project" i)}
params)
(#'teams/create-project conn))))))
(defn create-file*
([i params]
@@ -547,44 +549,6 @@
(io/copy r sw)
(.toString sw))))
(defn parse-sse
[content]
(let [state
(reduce (fn [{:keys [events data event id] :as state} line]
(cond
;; empty line → dispatch event if we have data
(str/blank? line)
(if (seq data)
(-> state
(update :events conj {:event (or event "message")
:data (-> (str/join "\n" data))})
(assoc :data [] :event nil))
state)
;; comment line (starts with :)
(str/starts-with? line ":")
state
:else
(let [[field raw-value] (str/split line #":" 2)
value (some-> raw-value (str/replace #"^ " ""))]
(case field
"data" (update state :data conj (or value ""))
"event" (assoc state :event value)
;; ignore retry and unknown fields
state))))
{:events [] :data [] :event nil}
(str/split content #"\r?\n"))
;; handle unterminated last event (no trailing blank line)
state (if (seq (:data state))
(update state :events conj
{:event (or (:event state) "message")
:data (str/join "\n" (:data state))})
state)]
(:events state)))
(defn consume-sse
[callback]
(let [{:keys [::yres/status ::yres/body ::yres/headers] :as response} (callback {})
@@ -594,9 +558,12 @@
(try
(px/exec! :virtual #(rcp/write-body-to-stream body nil output))
(into []
(map (fn [{:keys [event data]}]
[(keyword event)
(tr/decode-str data)]))
(parse-sse (slurp' input)))
(map (fn [event]
(let [[item1 item2] (re-seq #"(.*): (.*)\n?" event)]
[(keyword (nth item1 2))
(tr/decode-str (nth item2 2))])))
(-> (slurp' input)
(str/split "\n\n")))
(finally
(.close input)))))

View File

@@ -22,6 +22,17 @@
(t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset)
(t/deftest authenticate-method
(let [profile (th/create-profile* 1)
token (#'sess/gen-token th/*system* {:profile-id (:id profile)})
request {:params {:token token}}
response (#'mgmt/authenticate th/*system* request)]
(t/is (= 200 (::yres/status response)))
(t/is (= "authentication" (-> response ::yres/body :iss)))
(t/is (= (:id profile) (-> response ::yres/body :uid)))))
(t/deftest get-customer-method
(let [profile (th/create-profile* 1)
request {:params {:id (:id profile)}}
@@ -78,3 +89,7 @@
(let [subs' (-> response ::yres/body :subscription)]
(t/is (= subs' subs))))))

View File

@@ -0,0 +1,57 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns backend-tests.http-middleware-access-token-test
(:require
[app.db :as db]
[app.http.access-token]
[app.main :as-alias main]
[app.rpc :as-alias rpc]
[app.rpc.commands.access-token]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
[clojure.test :as t]
[mockery.core :refer [with-mocks]]))
(t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset)
(t/deftest soft-auth-middleware
(let [profile (th/create-profile* 1)
token (db/tx-run! th/*system* app.rpc.commands.access-token/create-access-token (:id profile) "test" nil)
request (volatile! nil)
handler (#'app.http.access-token/wrap-soft-auth
(fn [req] (vreset! request req))
th/*system*)]
(with-mocks [m1 {:target 'app.http.access-token/get-token
:return nil}]
(handler {})
(t/is (= {} @request)))
(with-mocks [m1 {:target 'app.http.access-token/get-token
:return (:token token)}]
(handler {})
(let [token-id (get @request :app.http.access-token/id)]
(t/is (= token-id (:id token)))))))
(t/deftest authz-middleware
(let [profile (th/create-profile* 1)
token (db/tx-run! th/*system* app.rpc.commands.access-token/create-access-token (:id profile) "test" nil)
request (volatile! {})
handler (#'app.http.access-token/wrap-authz
(fn [req] (vreset! request req))
th/*system*)]
(handler nil)
(t/is (nil? @request))
(handler {:app.http.access-token/id (:id token)})
(t/is (= #{} (:app.http.access-token/perms @request)))
(t/is (= (:id profile) (:app.http.access-token/profile-id @request)))))

View File

@@ -1,135 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns backend-tests.http-middleware-test
(:require
[app.common.time :as ct]
[app.db :as db]
[app.http :as-alias http]
[app.http.access-token]
[app.http.middleware :as mw]
[app.http.session :as session]
[app.main :as-alias main]
[app.rpc :as-alias rpc]
[app.rpc.commands.access-token]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
[clojure.test :as t]
[mockery.core :refer [with-mocks]]
[yetti.request :as yreq]
[yetti.response :as yres]))
(t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset)
(defrecord DummyRequest [headers cookies]
yreq/IRequestCookies
(get-cookie [_ name]
{:value (get cookies name)})
yreq/IRequest
(get-header [_ name]
(get headers name)))
(t/deftest auth-middleware-1
(let [request (volatile! nil)
handler (#'app.http.middleware/wrap-auth
(fn [req] (vreset! request req))
{})]
(handler (->DummyRequest {} {}))
(t/is (nil? (::http/auth-data @request)))
(handler (->DummyRequest {"authorization" "Token aaaa"} {}))
(let [{:keys [token claims] token-type :type} (get @request ::http/auth-data)]
(t/is (= :token token-type))
(t/is (= "aaaa" token))
(t/is (nil? claims)))))
(t/deftest auth-middleware-2
(let [request (volatile! nil)
handler (#'app.http.middleware/wrap-auth
(fn [req] (vreset! request req))
{})]
(handler (->DummyRequest {} {}))
(t/is (nil? (::http/auth-data @request)))
(handler (->DummyRequest {"authorization" "Bearer aaaa"} {}))
(let [{:keys [token claims] token-type :type} (get @request ::http/auth-data)]
(t/is (= :bearer token-type))
(t/is (= "aaaa" token))
(t/is (nil? claims)))))
(t/deftest auth-middleware-3
(let [request (volatile! nil)
handler (#'app.http.middleware/wrap-auth
(fn [req] (vreset! request req))
{})]
(handler (->DummyRequest {} {}))
(t/is (nil? (::http/auth-data @request)))
(handler (->DummyRequest {} {"auth-token" "foobar"}))
(let [{:keys [token claims] token-type :type} (get @request ::http/auth-data)]
(t/is (= :cookie token-type))
(t/is (= "foobar" token))
(t/is (nil? claims)))))
(t/deftest shared-key-auth
(let [handler (#'app.http.middleware/wrap-shared-key-auth
(fn [req] {::yres/status 200})
"secret-key")]
(let [response (handler (->DummyRequest {} {}))]
(t/is (= 403 (::yres/status response))))
(let [response (handler (->DummyRequest {"x-shared-key" "secret-key2"} {}))]
(t/is (= 403 (::yres/status response))))
(let [response (handler (->DummyRequest {"x-shared-key" "secret-key"} {}))]
(t/is (= 200 (::yres/status response))))))
(t/deftest access-token-authz
(let [profile (th/create-profile* 1)
token (db/tx-run! th/*system* app.rpc.commands.access-token/create-access-token (:id profile) "test" nil)
handler (#'app.http.access-token/wrap-authz identity th/*system*)]
(let [response (handler nil)]
(t/is (nil? response)))
(let [response (handler {::http/auth-data {:type :token :token "foobar" :claims {:tid (:id token)}}})]
(t/is (= #{} (:app.http.access-token/perms response)))
(t/is (= (:id profile) (:app.http.access-token/profile-id response))))))
(t/deftest session-authz
(let [cfg th/*system*
manager (session/inmemory-manager)
profile (th/create-profile* 1)
handler (-> (fn [req] req)
(#'session/wrap-authz {::session/manager manager})
(#'mw/wrap-auth {:bearer (partial session/decode-token cfg)
:cookie (partial session/decode-token cfg)}))
session (->> (session/create-session manager {:profile-id (:id profile)
:user-agent "user agent"})
(#'session/assign-token cfg))
response (handler (->DummyRequest {} {"auth-token" (:token session)}))
{:keys [token claims] token-type :type}
(get response ::http/auth-data)]
(t/is (= :cookie token-type))
(t/is (= (:token session) token))
(t/is (= "authentication" (:iss claims)))
(t/is (= "penpot" (:aud claims)))
(t/is (= (:id session) (:sid claims)))
(t/is (= (:id profile) (:uid claims)))))

View File

@@ -23,7 +23,7 @@
(smt/check!
(smt/for [context (->> sg/int
(sg/fmap (fn [_]
(#'rpc.doc/openapi-context (::rpc/methods th/*system*)))))]
(rpc.doc/prepare-openapi-context (::rpc/methods th/*system*)))))]
(try
(json/encode context)
true

View File

@@ -9,7 +9,6 @@
[app.common.features :as cfeat]
[app.common.pprint :as pp]
[app.common.thumbnails :as thc]
[app.common.time :as ct]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -17,7 +16,6 @@
[app.db.sql :as sql]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -134,10 +132,9 @@
;; this will run pending task triggered by deleting user snapshot
(th/run-pending-tasks!)
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
;; delete 2 snapshots and 2 file data entries
(t/is (= 4 (:processed res)))))))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
;; delete 2 snapshots and 2 file data entries
(t/is (= 4 (:processed res))))))))
(t/deftest snapshots-locking
(let [profile-1 (th/create-profile* 1 {:is-active true})

View File

@@ -313,7 +313,7 @@
;; freeze because of the deduplication (we have uploaded 2 times
;; the same files).
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -372,14 +372,14 @@
(th/db-exec! ["update file_change set deleted_at = now() where file_id = ? and label is not null" (:id file)])
(th/db-exec! ["update file set has_media_trimmed = false where id = ?" (:id file)])
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
;; this will remove the file change and file data entries for two snapshots
(t/is (= 4 (:processed res))))
;; Rerun the file-gc and objects-gc
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
;; this will remove the file media objects marked as deleted
;; on prev file-gc
(t/is (= 2 (:processed res))))
@@ -387,7 +387,7 @@
;; Now that file-gc have deleted the file-media-object usage,
;; lets execute the touched-gc task, we should see that two of
;; them are marked to be deleted
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
@@ -572,7 +572,7 @@
;; Now that file-gc have deleted the file-media-object usage,
;; lets execute the touched-gc task, we should see that two of
;; them are marked to be deleted.
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
@@ -665,7 +665,7 @@
;; because of the deduplication (we have uploaded 2 times the
;; same files).
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -715,7 +715,7 @@
;; Now that objects-gc have deleted the object thumbnail lets
;; execute the touched-gc task
(let [res (th/run-task! "storage-gc-touched" {})]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 1 (:freeze res))))
;; check file media objects
@@ -750,7 +750,7 @@
;; Now that file-gc have deleted the object thumbnail lets
;; execute the touched-gc task
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:delete res))))
;; check file media objects
@@ -922,9 +922,8 @@
(t/is (= 0 (:processed result))))
;; run permanent deletion
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 3 (:processed result)))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 3 (:processed result))))
;; query the list of file libraries of a after hard deletion
(let [data {::th/type :get-file-libraries
@@ -1135,7 +1134,7 @@
(th/sleep 300)
;; run the task
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
;; check that object thumbnails are still here
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id (:id file)})]
@@ -1164,7 +1163,7 @@
(t/is (= 2 (count rows))))
;; run the task again
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
;; check that we have all object thumbnails
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id (:id file)})]
@@ -1227,7 +1226,7 @@
(t/is (= 2 (count rows)))))
(t/testing "gc task"
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
(let [rows (th/db-query :file-thumbnail {:file-id (:id file)})]
(t/is (= 2 (count rows)))
@@ -1274,7 +1273,7 @@
;; The FileGC task will schedule an inner taskq
(th/run-pending-tasks!)
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -1368,7 +1367,7 @@
;; we ensure that once object-gc is passed and marked two storage
;; objects to delete
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
@@ -1490,7 +1489,7 @@
(t/is (some? (not-empty (:objects component))))))
;; Re-run the file-gc task
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
(let [row (th/db-get :file {:id (:id file)})]
(t/is (true? (:has-media-trimmed row))))
@@ -1520,7 +1519,7 @@
;; Now, we have deleted the usage of component if we pass file-gc,
;; that component should be deleted
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
;; Check that component is properly removed
(let [data {::th/type :get-file
@@ -1611,8 +1610,8 @@
:component-id c-id})}])
;; Run the file-gc on file and library
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-2)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-2)})))
;; Check that component exists
(let [data {::th/type :get-file
@@ -1685,7 +1684,7 @@
;; Now, we have deleted the usage of component if we pass file-gc,
;; that component should be deleted
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
;; Check that component is properly removed
(let [data {::th/type :get-file
@@ -1834,8 +1833,8 @@
(t/is (not= (:id fill) (:id fmedia)))))
;; Run the file-gc on file and library
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-2)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-2)})))
;; Now proceed to delete file and absorb it
(let [data {::th/type :delete-file
@@ -1926,7 +1925,7 @@
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
(t/is (= (:deleted-at row) now)))))))
(t/deftest restore-deleted-files
(t/deftest deleted-files-restore
(let [prof (th/create-profile* 1 {:is-active true})
team-id (:default-team-id prof)
proj-id (:default-project-id prof)
@@ -1989,78 +1988,3 @@
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
(t/is (nil? (:deleted-at row)))))))
(t/deftest restore-deleted-files-and-projets
(let [profile (th/create-profile* 1 {:is-active true})
team-id (:default-team-id profile)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (clock/fixed now)]
(let [project (th/create-project* 1 {:profile-id (:id profile)
:team-id team-id})
file (th/create-file* 1 {:profile-id (:id profile)
:project-id (:id project)})
data {::th/type :delete-project
:id (:id project)
::rpc/profile-id (:id profile)}
out (th/command! data)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(t/is (nil? (:result out)))
(th/run-pending-tasks!)
;; get deleted files
(let [data {::th/type :get-team-deleted-files
::rpc/profile-id (:id profile)
:team-id team-id}
out (th/command! data)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [[row1 :as result] (:result out)]
(t/is (= 1 (count result)))
(t/is (= (:will-be-deleted-at row1) #penpot/inst "2025-11-07T00:00:00Z"))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (= (:modified-at row1) #penpot/inst "2025-10-31T00:00:00Z"))))
;; Check if project is deleted
(let [[row1 :as rows] (th/db-query :project {:id (:id project)})]
;; (pp/pprint rows)
(t/is (= 1 (count rows)))
(t/is (= (:deleted-at row1) #penpot/inst "2025-11-07T00:00:00Z"))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (= (:modified-at row1) #penpot/inst "2025-10-31T00:00:00Z")))
;; Restore files
(let [data {::th/type :restore-deleted-team-files
::rpc/profile-id (:id profile)
:team-id team-id
:ids #{(:id file)}}
out (th/command! data)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [result (:result out)]
(t/is (fn? result))
(let [events (th/consume-sse result)]
;; (pp/pprint events)
(t/is (= 2 (count events)))
(t/is (= :end (first (last events))))
(t/is (= (:ids data) (last (last events)))))))
(let [[row1 :as rows] (th/db-query :file {:project-id (:id project)})]
;; (pp/pprint rows)
(t/is (= 1 (count rows)))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (nil? (:deleted-at row1))))
;; Check if project is restored
(let [[row1 :as rows] (th/db-query :project {:id (:id project)})]
;; (pp/pprint rows)
(t/is (= 1 (count rows)))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (nil? (:deleted-at row1))))))))

View File

@@ -8,14 +8,12 @@
(:require
[app.common.pprint :as pp]
[app.common.thumbnails :as thc]
[app.common.time :as ct]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.auth :as cauth]
[app.setup.clock :as clock]
[app.storage :as sto]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
@@ -85,8 +83,7 @@
(t/is (map? (:result out))))
;; run the task again
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! "storage-gc-touched" {}))]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 2 (:freeze res))))
(let [[row1 row2 :as rows] (th/db-query :file-tagged-object-thumbnail
@@ -117,9 +114,9 @@
;; Run the File GC task that should remove unused file object
;; thumbnails
(th/run-task! :file-gc {:file-id (:id file)})
(th/run-task! :file-gc {:min-age 0 :file-id (:id file)})
(let [result (th/run-task! :objects-gc {})]
(let [result (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 3 (:processed result))))
;; check if row2 related thumbnail row still exists
@@ -136,8 +133,7 @@
(t/is (some? (sto/get-object storage (:media-id row2))))
;; run the task again
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! :storage-gc-touched {}))]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:delete res)))
(t/is (= 0 (:freeze res))))
@@ -147,9 +143,8 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(let [result (th/run-task! :storage-gc-deleted {:min-age 0})]
(t/is (= 1 (:deleted result))))
(t/is (nil? (sto/get-object storage (:media-id row1))))
(t/is (some? (sto/get-object storage (:media-id row2))))
@@ -221,9 +216,9 @@
;; Run the File GC task that should remove unused file object
;; thumbnails
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
(let [result (th/run-task! :objects-gc {})]
(let [result (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 2 (:processed result))))
;; check if row1 related thumbnail row still exists
@@ -235,7 +230,7 @@
(t/is (= (:object-id data1) (:object-id row)))
(t/is (uuid? (:media-id row1))))
(let [result (th/run-task! :storage-gc-touched {})]
(let [result (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:delete result))))
;; Check if storage objects still exists after file-gc
@@ -247,9 +242,8 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted result)))))
(let [result (th/run-task! :storage-gc-deleted {:min-age 0})]
(t/is (= 1 (:deleted result))))
(t/is (some? (sto/get-object storage (:media-id row2)))))))

View File

@@ -6,13 +6,11 @@
(ns backend-tests.rpc-font-test
(:require
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -131,7 +129,7 @@
;; (th/print-result! out)
(t/is (nil? (:error out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 6 (:freeze res))))
(let [params {::th/type :delete-font
@@ -143,17 +141,16 @@
(t/is (nil? (:error out)))
(t/is (nil? (:result out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed res))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 2 (:processed res))))
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 6 (:delete res)))))))
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 6 (:delete res))))))
(t/deftest font-deletion-2
(let [prof (th/create-profile* 1 {:is-active true})
@@ -192,7 +189,7 @@
;; (th/print-result! out)
(t/is (nil? (:error out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 6 (:freeze res))))
(let [params {::th/type :delete-font
@@ -204,17 +201,16 @@
(t/is (nil? (:error out)))
(t/is (nil? (:result out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res)))))))
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res))))))
(t/deftest font-deletion-3
(let [prof (th/create-profile* 1 {:is-active true})
@@ -252,7 +248,7 @@
(t/is (nil? (:error out1)))
(t/is (nil? (:error out2)))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 6 (:freeze res))))
(let [params {::th/type :delete-font-variant
@@ -264,14 +260,13 @@
(t/is (nil? (:error out)))
(t/is (nil? (:result out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res)))))))
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res))))))

View File

@@ -6,13 +6,11 @@
(ns backend-tests.rpc-project-test
(:require
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[backend-tests.helpers :as th]
[clojure.test :as t]))
@@ -228,9 +226,8 @@
(t/is (= 0 (count result)))))
;; run permanent deletion
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed result)))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 1 (:processed result))))
;; query the list of files of a after hard deletion
(let [data {::th/type :get-project-files

View File

@@ -13,7 +13,6 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
@@ -526,9 +525,8 @@
(t/is (= :not-found (:type edata)))))
;; run permanent deletion
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed result)))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 2 (:processed result))))
;; query the list of projects of a after hard deletion
(let [data {::th/type :get-projects
@@ -583,9 +581,8 @@
(t/is (= 1 (count rows)))
(t/is (ct/inst? (:deleted-at (first rows)))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 7 (:processed result)))))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 7 (:processed result))))))
(t/deftest create-team-access-request
(with-mocks [mock {:target 'app.email/send! :return nil}]

View File

@@ -11,7 +11,6 @@
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -54,13 +53,19 @@
(configure-storage-backend))
content (sto/content "content")
object (sto/put-object! storage {::sto/content content
::sto/expired-at (ct/in-future {:hours 1})
::sto/expired-at (ct/in-future {:seconds 1})
:content-type "text/plain"})]
(t/is (sto/object? object))
(t/is (ct/inst? (:expired-at object)))
(t/is (ct/is-after? (:expired-at object) (ct/now)))
(t/is (nil? (sto/get-object storage (:id object))))))
(t/is (= object (sto/get-object storage (:id object))))
(th/sleep 1000)
(t/is (nil? (sto/get-object storage (:id object))))
(t/is (nil? (sto/get-object-data storage object)))
(t/is (nil? (sto/get-object-url storage object)))
(t/is (nil? (sto/get-object-path storage object)))))
(t/deftest put-and-delete-object
(let [storage (-> (:app.storage/storage th/*system*)
@@ -93,25 +98,20 @@
::sto/expired-at (ct/now)
:content-type "text/plain"})
object2 (sto/put-object! storage {::sto/content content2
::sto/expired-at (ct/in-future {:hours 2})
::sto/expired-at (ct/in-past {:hours 2})
:content-type "text/plain"})
object3 (sto/put-object! storage {::sto/content content3
::sto/expired-at (ct/in-future {:hours 1})
::sto/expired-at (ct/in-past {:hours 1})
:content-type "text/plain"})]
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 0}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(th/sleep 200)
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res))))
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
(t/is (= 2 (:count res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 61}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
(t/is (= 1 (:count res))))))
(t/is (= 2 (:count res))))))
(t/deftest touched-gc-task-1
(let [storage (-> (:app.storage/storage th/*system*)
@@ -158,7 +158,7 @@
{:id (:id result-1)})
;; run the objects gc task for permanent deletion
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 1 (:processed res))))
;; check that we still have all the storage objects
@@ -182,6 +182,7 @@
(let [res (th/db-exec-one! ["select count(*) from storage_object where deleted_at is not null"])]
(t/is (= 0 (:count res)))))))
(t/deftest touched-gc-task-2
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
@@ -242,12 +243,11 @@
{:id (:id result-2)})
;; run the objects gc task for permanent deletion
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 1 (:processed res))))
;; revert touched state to all storage objects
(th/db-exec-one! ["update storage_object set touched_at=?" (ct/now)])
(th/db-exec-one! ["update storage_object set touched_at=now()"])
;; Run the task again
(let [res (th/run-task! :storage-gc-touched {})]
@@ -293,10 +293,10 @@
result-2 (:result out2)]
;; now we proceed to manually mark all storage objects touched
(th/db-exec! ["update storage_object set touched_at=?" (ct/now)])
(th/db-exec! ["update storage_object set touched_at=now()"])
;; run the touched gc task
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -305,48 +305,16 @@
(t/is (= 2 (count rows)))))
;; now we proceed to manually delete all file_media_object
(th/db-exec! ["update file_media_object set deleted_at = ?" (ct/now)])
(th/db-exec! ["update file_media_object set deleted_at = now()"])
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! "objects-gc" {:min-age 0})]
(t/is (= 2 (:processed res))))
;; run the touched gc task
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
;; check that we have all no objects
(let [rows (th/db-exec! ["select * from storage_object where deleted_at is null"])]
(t/is (= 0 (count rows))))))
(t/deftest tempfile-bucket-test
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
content1 (sto/content "content1")
now (ct/now)
object1 (sto/put-object! storage {::sto/content content1
::sto/touched-at (ct/plus now {:minutes 1})
:bucket "tempfile"
:content-type "text/plain"})]
(binding [ct/*clock* (clock/fixed now)]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res)))))
(binding [ct/*clock* (clock/fixed (ct/plus now {:minutes 1}))]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 1 (:delete res)))))
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 1}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 2}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))))

View File

@@ -17,7 +17,7 @@
org.slf4j/slf4j-api {:mvn/version "2.0.17"}
pl.tkowalcz.tjahzi/log4j2-appender {:mvn/version "0.9.40"}
selmer/selmer {:mvn/version "1.12.69"}
selmer/selmer {:mvn/version "1.12.62"}
criterium/criterium {:mvn/version "0.4.6"}
metosin/jsonista {:mvn/version "0.3.13"}
@@ -29,7 +29,8 @@
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
integrant/integrant {:mvn/version "1.0.0"}
funcool/cuerdas {:mvn/version "2026.415"}
funcool/tubax {:mvn/version "2021.05.20-0"}
funcool/cuerdas {:mvn/version "2025.06.16-414"}
funcool/promesa
{:git/sha "46048fc0d4bf5466a2a4121f5d52aefa6337f2e8"
:git/url "https://github.com/funcool/promesa"}
@@ -47,8 +48,12 @@
com.sun.mail/jakarta.mail {:mvn/version "2.0.2"}
org.la4j/la4j {:mvn/version "0.6.0"}
;; exception printing
fipp/fipp {:mvn/version "0.6.29"}
me.flowthing/pp {:mvn/version "2024-11-13.77"}
io.aviso/pretty {:mvn/version "1.4.4"}
environ/environ {:mvn/version "1.2.0"}}
:paths ["src" "vendor" "target/classes"]

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -ex
corepack enable;
corepack install;
yarn install;
yarn run test;

View File

@@ -1048,12 +1048,6 @@
(into [elem])
(into (subvec without-elem insert-pos)))))))
(defn invert-map
"Returns a map with keys and values swapped.
If the input map has duplicate values, later entries overwrite earlier ones."
[m]
(into {} (map (fn [[k v]] [v k]) m)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; String Functions
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -9,10 +9,10 @@
(:refer-clojure :exclude [get-in select-keys str with-open max])
#?(:cljs (:require-macros [app.common.data.macros]))
(:require
#?(:clj [cljs.analyzer.api :as aapi])
#?(:clj [clojure.core :as c]
:cljs [cljs.core :as c])
[app.common.data :as d]
[cljs.analyzer.api :as aapi]
[cuerdas.core :as str]))
(defmacro select-keys
@@ -44,43 +44,42 @@
[& params]
`(str/concat ~@params))
#?(:clj
(defmacro export
"A helper macro that allows reexport a var in a current namespace."
[v]
(if (boolean (:ns &env))
(defmacro export
"A helper macro that allows reexport a var in a current namespace."
[v]
(if (boolean (:ns &env))
;; Code for ClojureScript
(let [mdata (aapi/resolve &env v)
arglists (second (get-in mdata [:meta :arglists]))
sym (symbol (c/name v))
andsym (symbol "&")
procarg #(if (= % andsym) % (gensym "param"))]
(if (pos? (count arglists))
`(def
~(with-meta sym (:meta mdata))
(fn ~@(for [args arglists]
(let [args (map procarg args)]
(if (some #(= andsym %) args)
(let [[sargs dargs] (split-with #(not= andsym %) args)]
`([~@sargs ~@dargs] (apply ~v ~@sargs ~@(rest dargs))))
`([~@args] (~v ~@args)))))))
`(def ~(with-meta sym (:meta mdata)) ~v)))
;; Code for ClojureScript
(let [mdata (aapi/resolve &env v)
arglists (second (get-in mdata [:meta :arglists]))
sym (symbol (c/name v))
andsym (symbol "&")
procarg #(if (= % andsym) % (gensym "param"))]
(if (pos? (count arglists))
`(def
~(with-meta sym (:meta mdata))
(fn ~@(for [args arglists]
(let [args (map procarg args)]
(if (some #(= andsym %) args)
(let [[sargs dargs] (split-with #(not= andsym %) args)]
`([~@sargs ~@dargs] (apply ~v ~@sargs ~@(rest dargs))))
`([~@args] (~v ~@args)))))))
`(def ~(with-meta sym (:meta mdata)) ~v)))
;; Code for Clojure
(let [vr (resolve v)
m (meta vr)
n (:name m)
n (with-meta n
(cond-> {}
(:dynamic m) (assoc :dynamic true)
(:protocol m) (assoc :protocol (:protocol m))))]
`(let [m# (meta ~vr)]
(def ~n (deref ~vr))
(alter-meta! (var ~n) merge (dissoc m# :name))
;; (when (:macro m#)
;; (.setMacro (var ~n)))
~vr)))))
;; Code for Clojure
(let [vr (resolve v)
m (meta vr)
n (:name m)
n (with-meta n
(cond-> {}
(:dynamic m) (assoc :dynamic true)
(:protocol m) (assoc :protocol (:protocol m))))]
`(let [m# (meta ~vr)]
(def ~n (deref ~vr))
(alter-meta! (var ~n) merge (dissoc m# :name))
;; (when (:macro m#)
;; (.setMacro (var ~n)))
~vr))))
(defmacro fmt
"String interpolation helper. Can only be used with strings known at

View File

@@ -14,7 +14,8 @@
[app.common.schema :as sm]
[clojure.core :as c]
[clojure.spec.alpha :as s]
[cuerdas.core :as str])
[cuerdas.core :as str]
[expound.alpha :as expound])
#?(:clj
(:import
clojure.lang.IPersistentMap)))
@@ -109,6 +110,13 @@
(contains? data :explain))
(explain (:explain data) opts)
(and (contains? data ::s/problems)
(contains? data ::s/value)
(contains? data ::s/spec))
(binding [s/*explain-out* expound/printer]
(with-out-str
(s/explain-out (update data ::s/problems #(take (:length opts 10) %)))))
(contains? data ::sm/explain)
(sm/humanize-explain (::sm/explain data) opts)))

View File

@@ -53,7 +53,6 @@
"plugins/runtime"
"tokens/numeric-input"
"design-tokens/v1"
"text-editor/v2-html-paste"
"text-editor/v2"
"render-wasm/v1"
"variants/v1"})
@@ -76,7 +75,6 @@
(def frontend-only-features
#{"styles/v2"
"plugins/runtime"
"text-editor/v2-html-paste"
"text-editor/v2"
"tokens/numeric-input"
"render-wasm/v1"})
@@ -126,7 +124,6 @@
:feature-plugins "plugins/runtime"
:feature-design-tokens "design-tokens/v1"
:feature-text-editor-v2 "text-editor/v2"
:feature-text-editor-v2-html-paste "text-editor/v2-html-paste"
:feature-render-wasm "render-wasm/v1"
:feature-variants "variants/v1"
:feature-token-input "tokens/numeric-input"

View File

@@ -485,13 +485,6 @@
(commit-change change1)
(commit-change change2))))
(defn add-tokens-lib
[state tokens-lib]
(-> state
(commit-change
{:type :set-tokens-lib
:tokens-lib tokens-lib})))
(defn delete-shape
[file id]
(commit-change

View File

@@ -371,7 +371,7 @@
[:set-tokens-lib
[:map {:title "SetTokensLib"}
[:type [:= :set-tokens-lib]]
[:tokens-lib [:maybe ctob/schema:tokens-lib]]]]
[:tokens-lib ::sm/any]]] ;; TODO: we should define a plain object schema for tokens-lib
[:set-token
[:map {:title "SetTokenChange"}
@@ -463,16 +463,35 @@
;; Changes Processing Impl
#_:clj-kondo/ignore
(defn- validate-shape
[{:keys [id] :as shape} page-id]
(when-not (cts/valid-shape? shape)
(ex/raise :type :assertion
:code :data-validation
:hint (str "invalid shape found '" id "'")
:page-id page-id
:shape-id id
::sm/explain (cts/explain-shape shape))))
(defn validate-shapes!
[data-old data-new items]
(letfn [(validate-shape! [[page-id id]]
(let [shape-old (dm/get-in data-old [:pages-index page-id :objects id])
shape-new (dm/get-in data-new [:pages-index page-id :objects id])]
;; If object has changed or is new verify is correct
(when (and (some? shape-new)
(not= shape-old shape-new))
(when-not (and (cts/valid-shape? shape-new)
(cts/shape? shape-new))
(ex/raise :type :assertion
:code :data-validation
:hint (str "invalid shape found after applying changes on file "
(:id data-new))
:file-id (:id data-new)
::sm/explain (cts/explain-shape shape-new))))))]
(->> (into #{} (map :page-id) items)
(mapcat (fn [page-id]
(filter #(= page-id (:page-id %)) items)))
(mapcat (fn [{:keys [type id page-id] :as item}]
(sequence
(map (partial vector page-id))
(case type
(:add-obj :mod-obj :del-obj) (cons id nil)
(:mov-objects :reg-objects) (:shapes item)
nil))))
(run! validate-shape!))))
(defn- process-touched-change
[data {:keys [id page-id component-id]}]
@@ -499,8 +518,14 @@
(check-changes items))
(binding [*touched-changes* (volatile! #{})]
(let [result (reduce #(or (process-change %1 %2) %1) data items)]
(reduce process-touched-change result @*touched-changes*)))))
(let [result (reduce #(or (process-change %1 %2) %1) data items)
result (reduce process-touched-change result @*touched-changes*)]
;; Validate result shapes (only on the backend)
;;
;; TODO: (PERF) add changed shapes tracking and only validate
;; the tracked changes instead of iterate over all shapes
#?(:clj (validate-shapes! data result items))
result))))
;; --- Comment Threads
@@ -588,10 +613,9 @@
(defmethod process-change :add-obj
[data {:keys [id obj page-id component-id frame-id parent-id index ignore-touched]}]
;; NOTE: we only perform hard validation on backend
#?(:clj (validate-shape obj page-id))
(let [update-container #(ctst/add-shape id obj % frame-id parent-id index ignore-touched)]
(let [update-container
(fn [container]
(ctst/add-shape id obj container frame-id parent-id index ignore-touched))]
(when *state*
(swap! *state* collect-shape-media-refs obj page-id))
@@ -614,9 +638,6 @@
(when (and *state* page-id)
(swap! *state* collect-shape-media-refs shape page-id))
;; NOTE: we only perform hard validation on backend
#?(:clj (validate-shape shape page-id))
(assoc objects id shape))
objects))
@@ -671,6 +692,8 @@
(d/update-in-when data [:pages-index page-id] fix-container)
(d/update-in-when data [:components component-id] fix-container))))
;; FIXME: remove, seems like this method is already unused
;; reg-objects operation "regenerates" the geometry and selrect of the parent groups
(defmethod process-change :reg-objects
[data {:keys [page-id component-id shapes]}]
;; FIXME: Improve performance
@@ -699,60 +722,48 @@
(update-group [group objects]
(let [lookup (d/getf objects)
children (get group :shapes)
group (cond
;; If the group is empty we don't make any changes. Will be removed by a later process
(empty? children)
group
children (get group :shapes)]
(cond
;; If the group is empty we don't make any changes. Will be removed by a later process
(empty? children)
group
(= :bool (:type group))
(path/update-bool-shape group objects)
(= :bool (:type group))
(path/update-bool-shape group objects)
(:masked-group group)
(->> (map lookup children)
(set-mask-selrect group))
(:masked-group group)
(->> (map lookup children)
(set-mask-selrect group))
:else
(->> (map lookup children)
(gsh/update-group-selrect group)))]
#?(:clj (validate-shape group page-id))
group))]
:else
(->> (map lookup children)
(gsh/update-group-selrect group)))))]
(if page-id
(d/update-in-when data [:pages-index page-id :objects] reg-objects)
(d/update-in-when data [:components component-id :objects] reg-objects))))
(defmethod process-change :mov-objects
;; FIXME: ignore-touched is no longer used, so we can consider it deprecated
[data {:keys [parent-id shapes index page-id component-id #_ignore-touched after-shape allow-altering-copies syncing]}]
[data {:keys [parent-id shapes index page-id component-id ignore-touched after-shape allow-altering-copies syncing]}]
(letfn [(calculate-invalid-targets [objects shape-id]
(let [reduce-fn #(into %1 (calculate-invalid-targets objects %2))]
(->> (get-in objects [shape-id :shapes])
(reduce reduce-fn #{shape-id}))))
;; Avoid placing a shape as a direct or indirect child of itself, or
;; inside its main component if it's in a copy, or inside a copy, or
;; from a copy
;; Avoid placing a shape as a direct or indirect child of itself,
;; or inside its main component if it's in a copy,
;; or inside a copy, or from a copy
(is-valid-move? [objects shape-id]
(let [invalid-targets (calculate-invalid-targets objects shape-id)
shape (get objects shape-id)]
(and shape
(not (invalid-targets parent-id))
(not (cfh/components-nesting-loop? objects shape-id parent-id))
(or
;; In some cases (like a component
;; swap) it's allowed to change the
;; structure of a copy
allow-altering-copies
;; DEPRECATED, remove once v2.12 released
syncing
(and
;; We don't want to change the structure of component copies
(not (ctk/in-component-copy? (get objects (:parent-id shape))))
;; We need to check the origin and target frames
(not (ctk/in-component-copy? (get objects parent-id))))))))
(or allow-altering-copies ;; In some cases (like a component swap) it's allowed to change the structure of a copy
syncing ;; If we are syncing the changes of a main component, it's allowed to change the structure of a copy
(and
(not (ctk/in-component-copy? (get objects (:parent-id shape)))) ;; We don't want to change the structure of component copies
(not (ctk/in-component-copy? (get objects parent-id)))))))) ;; We need to check the origin and target frames
(insert-items [prev-shapes index shapes]
(let [prev-shapes (or prev-shapes [])]
@@ -761,13 +772,17 @@
(cfh/append-at-the-end prev-shapes shapes))))
(add-to-parent [parent index shapes]
(update parent :shapes
(fn [parent-shapes]
(-> parent-shapes
(insert-items index shapes)
;; We need to ensure that no `nil` in the shapes list
;; after adding all the incoming shapes to the parent.
(d/vec-without-nils)))))
(let [parent (-> parent
(update :shapes insert-items index shapes)
;; We need to ensure that no `nil` in the
;; shapes list after adding all the
;; incoming shapes to the parent.
(update :shapes d/vec-without-nils))]
(cond-> parent
(and (:shape-ref parent)
(#{:group :frame} (:type parent))
(not ignore-touched))
(dissoc :remote-synced))))
(remove-from-old-parent [old-objects objects shape-id]
(let [prev-parent-id (dm/get-in old-objects [shape-id :parent-id])]
@@ -775,63 +790,58 @@
;; the new destination target parent id.
(if (= prev-parent-id parent-id)
objects
(d/update-in-when objects [prev-parent-id :shapes]
(fn [shapes]
(-> shapes
(d/without-obj shape-id)
(d/vec-without-nils)))))))
(let [sid shape-id
pid prev-parent-id
obj (get objects pid)
component? (and (:shape-ref obj)
(= (:type obj) :group)
(not ignore-touched))]
(-> objects
(d/update-in-when [pid :shapes] d/without-obj sid)
(d/update-in-when [pid :shapes] d/vec-without-nils)
(cond-> component? (d/update-when pid #(dissoc % :remote-synced))))))))
(update-parent-id [objects id]
(d/update-when objects id assoc :parent-id parent-id))
(-> objects
(d/update-when id assoc :parent-id parent-id)))
;; Updates the frame-id references that might be outdated
(update-frame-id [frame-id objects id]
(let [obj (some-> (get objects id)
(assoc :frame-id frame-id))]
(assign-frame-id [frame-id objects id]
(let [objects (d/update-when objects id assoc :frame-id frame-id)
obj (get objects id)]
(cond-> objects
(some? obj)
(assoc id obj)
;; If we moving a frame, we DO NOT NEED update
;; children because the children will point correctly
;; to the frame what we are currently moving
(not (cfh/frame-shape? obj))
(as-> $$ (reduce (partial update-frame-id frame-id) $$ (:shapes obj))))))
(validate-shape [objects #_:clj-kondo/ignore shape-id]
#?(:clj (when-let [shape (get objects shape-id)]
(validate-shape shape page-id)))
objects)
;; If we moving frame, the parent frame is the root
;; and we DO NOT NEED update children because the
;; children will point correctly to the frame what we
;; are currently moving
(not= :frame (:type obj))
(as-> $$ (reduce (partial assign-frame-id frame-id) $$ (:shapes obj))))))
(move-objects [objects]
(let [parent (get objects parent-id)]
;; Do not proceed with the move if parent does not
;; exists; this can happen on a race condition when an
;; inflight move operations lands when parent is deleted
(if (and (seq shapes) (every? (partial is-valid-move? objects) shapes) parent)
(let [index (or (some-> (d/index-of (:shapes parent) after-shape) inc) index)
frame-id (if (cfh/frame-shape? parent)
(:id parent)
(:frame-id parent))]
(as-> objects $
;; Add the new shapes to the parent object.
(d/update-when $ parent-id #(add-to-parent % index shapes))
(let [valid? (every? (partial is-valid-move? objects) shapes)
parent (get objects parent-id)
after-shape-index (d/index-of (:shapes parent) after-shape)
index (if (nil? after-shape-index) index (inc after-shape-index))
frame-id (if (= :frame (:type parent))
(:id parent)
(:frame-id parent))]
;; Update each individual shape link to the new parent
(reduce update-parent-id $ shapes)
(if (and valid? (seq shapes))
(as-> objects $
;; Add the new shapes to the parent object.
(d/update-when $ parent-id #(add-to-parent % index shapes))
;; Analyze the old parents and clear the old links
;; only if the new parent is different form old
;; parent.
(reduce (partial remove-from-old-parent objects) $ shapes)
;; Update each individual shape link to the new parent
(reduce update-parent-id $ shapes)
;; Ensure that all shapes of the new parent has a
;; correct link to the topside frame.
(reduce (partial update-frame-id frame-id) $ shapes)
;; Perform validation of the affected shapes
(reduce validate-shape $ shapes)))
;; Analyze the old parents and clear the old links
;; only if the new parent is different form old
;; parent.
(reduce (partial remove-from-old-parent objects) $ shapes)
;; Ensure that all shapes of the new parent has a
;; correct link to the topside frame.
(reduce (partial assign-frame-id frame-id) $ shapes))
objects)))]
(if page-id

View File

@@ -72,11 +72,9 @@
(= :bool (dm/get-prop shape :type))))
(defn text-shape?
([shape]
(and (some? shape)
(= :text (dm/get-prop shape :type))))
([objects id]
(text-shape? (get objects id))))
[shape]
(and (some? shape)
(= :text (dm/get-prop shape :type))))
(defn rect-shape?
[shape]

View File

@@ -1357,6 +1357,38 @@
(update :pages-index d/update-vals update-container)
(d/update-when :components d/update-vals update-container))))
(defmethod migrate-data "0004-clean-shadow-color"
[data _]
(let [decode-color (sm/decoder types.color/schema:color sm/json-transformer)
clean-shadow-color
(fn [color]
(let [ref-id (get color :id)
ref-file (get color :file-id)]
(-> (d/without-qualified color)
(select-keys [:opacity :color :gradient :image :ref-id :ref-file])
(cond-> ref-id
(assoc :ref-id ref-id))
(cond-> ref-file
(assoc :ref-file ref-file))
(decode-color))))
clean-shadow
(fn [shadow]
(update shadow :color clean-shadow-color))
update-object
(fn [object]
(d/update-when object :shadow #(mapv clean-shadow %)))
update-container
(fn [container]
(d/update-when container :objects d/update-vals update-object))]
(-> data
(update :pages-index d/update-vals update-container)
(d/update-when :components d/update-vals update-container))))
(defmethod migrate-data "0005-deprecate-image-type"
[data _]
(letfn [(update-object [object]
@@ -1665,45 +1697,6 @@
(update :pages-index d/update-vals update-container)
(d/update-when :components d/update-vals update-container))))
(defmethod migrate-data "0015-clean-shadow-color"
[data _]
(let [decode-shadow-color
(sm/decoder ctss/schema:color sm/json-transformer)
clean-shadow-color
(fn [color]
(let [ref-id (get color :id)
ref-file (get color :file-id)]
(-> (d/without-qualified color)
(select-keys ctss/color-attrs)
(cond-> ref-id
(assoc :ref-id ref-id))
(cond-> ref-file
(assoc :ref-file ref-file))
(decode-shadow-color)
(d/without-nils))))
clean-shadow
(fn [shadow]
(update shadow :color clean-shadow-color))
clean-xform
(comp
(keep clean-shadow)
(filter ctss/valid-shadow?))
update-object
(fn [object]
(d/update-when object :shadow #(into [] clean-xform %)))
update-container
(fn [container]
(d/update-when container :objects d/update-vals update-object))]
(-> data
(update :pages-index d/update-vals update-container)
(d/update-when :components d/update-vals update-container))))
;; Copy fills from position-data to text nodes when all text nodes lack fills,
;; all position-data have fills, and the counts match
(defmethod migrate-data "0016-copy-fills-from-position-data-to-text-node"
@@ -1825,6 +1818,7 @@
"0002-clean-shape-interactions"
"0003-fix-root-shape"
"0003-convert-path-content-v2"
"0004-clean-shadow-color"
"0005-deprecate-image-type"
"0006-fix-old-texts-fills"
"0008-fix-library-colors-v4"
@@ -1838,5 +1832,4 @@
"0014-fix-tokens-lib-duplicate-ids"
"0014-clear-components-nil-objects"
"0015-fix-text-attrs-blank-strings"
"0015-clean-shadow-color"
"0016-copy-fills-from-position-data-to-text-node"]))

View File

@@ -82,113 +82,6 @@
(declare create-svg-children)
(declare parse-svg-element)
(defn- process-gradient-stops
"Processes gradient stops to extract stop-color and stop-opacity from style attributes
and convert them to direct attributes. This ensures stops with style='stop-color:#...;stop-opacity:1'
are properly converted to stop-color and stop-opacity attributes."
[stops]
(mapv (fn [stop]
(let [stop-attrs (:attrs stop)
stop-style (get stop-attrs :style)
;; Parse style if it's a string using csvg/parse-style utility
parsed-style (when (and (string? stop-style) (seq stop-style))
(csvg/parse-style stop-style))
;; Extract stop-color and stop-opacity from style
style-stop-color (when parsed-style (:stop-color parsed-style))
style-stop-opacity (when parsed-style (:stop-opacity parsed-style))
;; Merge: use direct attributes first, then style values as fallback
final-attrs (cond-> stop-attrs
(and style-stop-color (not (contains? stop-attrs :stop-color)))
(assoc :stop-color style-stop-color)
(and style-stop-opacity (not (contains? stop-attrs :stop-opacity)))
(assoc :stop-opacity style-stop-opacity)
;; Remove style attribute if we've extracted its values
(or style-stop-color style-stop-opacity)
(dissoc :style))]
(assoc stop :attrs final-attrs)))
stops))
(defn- resolve-gradient-href
"Resolves xlink:href references in gradients by merging the referenced gradient's
stops and attributes with the referencing gradient. This ensures gradients that
reference other gradients (like linearGradient3550 referencing linearGradient3536)
inherit the stops from the base gradient.
According to SVG spec, when a gradient has xlink:href:
- It inherits all attributes from the referenced gradient
- It inherits all stops from the referenced gradient
- The referencing gradient's attributes override the base ones
- If the referencing gradient has stops, they replace the base stops
Returns the defs map with all gradient href references resolved."
[defs]
(letfn [(resolve-gradient [gradient-id gradient-node defs visited]
(if (contains? visited gradient-id)
(do
#?(:cljs (js/console.warn "[resolve-gradient] Circular reference detected for" gradient-id)
:clj nil)
gradient-node) ;; Avoid circular references
(let [attrs (:attrs gradient-node)
href-id (or (:href attrs) (:xlink:href attrs))
href-id (when (and (string? href-id) (pos? (count href-id)))
(subs href-id 1)) ;; Remove leading #
base-gradient (when (and href-id (contains? defs href-id))
(get defs href-id))
resolved-base (when base-gradient (resolve-gradient href-id base-gradient defs (conj visited gradient-id)))]
(if resolved-base
;; Merge: base gradient attributes + referencing gradient attributes
;; Use referencing gradient's stops if present, otherwise use base stops
(let [base-attrs (:attrs resolved-base)
ref-attrs (:attrs gradient-node)
;; Start with base attributes (without id), then merge with ref attributes
;; This ensures ref attributes override base ones
base-attrs-clean (dissoc base-attrs :id)
ref-attrs-clean (dissoc ref-attrs :href :xlink:href :id)
;; Special handling for gradientTransform: if both have it, combine them
base-transform (get base-attrs :gradientTransform)
ref-transform (get ref-attrs :gradientTransform)
combined-transform (cond
(and base-transform ref-transform)
(str base-transform " " ref-transform) ;; Apply base first, then ref
:else (or ref-transform base-transform))
;; Merge attributes: base first, then ref (ref overrides)
merged-attrs (-> (d/deep-merge base-attrs-clean ref-attrs-clean)
(cond-> combined-transform
(assoc :gradientTransform combined-transform)))
;; If referencing gradient has content (stops), use it; otherwise use base content
final-content (if (seq (:content gradient-node))
(:content gradient-node)
(:content resolved-base))
;; Process stops to extract stop-color and stop-opacity from style attributes
processed-content (process-gradient-stops final-content)
result {:tag (:tag gradient-node)
:attrs (assoc merged-attrs :id gradient-id)
:content processed-content}]
result)
;; Process stops even for gradients without references to extract style attributes
(let [processed-content (process-gradient-stops (:content gradient-node))]
(assoc gradient-node :content processed-content))))))]
(let [gradient-tags #{:linearGradient :radialGradient}
result (reduce-kv
(fn [acc id node]
(if (contains? gradient-tags (:tag node))
(assoc acc id (resolve-gradient id node defs #{}))
(assoc acc id node)))
{}
defs)]
result)))
(defn create-svg-shapes
([svg-data pos objects frame-id parent-id selected center?]
(create-svg-shapes (uuid/next) svg-data pos objects frame-id parent-id selected center?))
@@ -219,9 +112,6 @@
(csvg/fix-percents)
(csvg/extract-defs))
;; Resolve gradient href references in all defs before processing shapes
def-nodes (resolve-gradient-href def-nodes)
;; In penpot groups have the size of their children. To
;; respect the imported svg size and empty space let's create
;; a transparent shape as background to respect the imported
@@ -252,23 +142,12 @@
(reduce (partial create-svg-children objects selected frame-id root-id svg-data)
[unames []]
(d/enumerate (->> (:content svg-data)
(mapv #(csvg/inherit-attributes root-attrs %)))))
(mapv #(csvg/inherit-attributes root-attrs %)))))]
;; Collect all defs from children and merge into root shape
all-defs-from-children (reduce (fn [acc child]
(if-let [child-defs (:svg-defs child)]
(merge acc child-defs)
acc))
{}
children)
;; Merge defs from svg-data and children into root shape
root-shape-with-defs (assoc root-shape :svg-defs (merge def-nodes all-defs-from-children))]
[root-shape-with-defs children])))
[root-shape children])))
(defn create-raw-svg
[name frame-id {:keys [x y width height offset-x offset-y defs] :as svg-data} {:keys [attrs] :as data}]
[name frame-id {:keys [x y width height offset-x offset-y]} {:keys [attrs] :as data}]
(let [props (csvg/attrs->props attrs)
vbox (grc/make-rect offset-x offset-y width height)]
(cts/setup-shape
@@ -281,11 +160,10 @@
:y y
:content data
:svg-attrs props
:svg-viewbox vbox
:svg-defs defs})))
:svg-viewbox vbox})))
(defn create-svg-root
[id frame-id parent-id {:keys [name x y width height offset-x offset-y attrs defs] :as svg-data}]
[id frame-id parent-id {:keys [name x y width height offset-x offset-y attrs]}]
(let [props (-> (dissoc attrs :viewBox :view-box :xmlns)
(d/without-keys csvg/inheritable-props)
(csvg/attrs->props))]
@@ -299,8 +177,7 @@
:height height
:x (+ x offset-x)
:y (+ y offset-y)
:svg-attrs props
:svg-defs defs})))
:svg-attrs props})))
(defn create-svg-children
[objects selected frame-id parent-id svg-data [unames children] [_index svg-element]]
@@ -321,7 +198,7 @@
(defn create-group
[name frame-id {:keys [x y width height offset-x offset-y defs] :as svg-data} {:keys [attrs]}]
[name frame-id {:keys [x y width height offset-x offset-y] :as svg-data} {:keys [attrs]}]
(let [transform (csvg/parse-transform (:transform attrs))
attrs (-> attrs
(d/without-keys csvg/inheritable-props)
@@ -337,8 +214,7 @@
:height height
:svg-transform transform
:svg-attrs attrs
:svg-viewbox vbox
:svg-defs defs})))
:svg-viewbox vbox})))
(defn create-path-shape [name frame-id svg-data {:keys [attrs] :as data}]
(when (and (contains? attrs :d) (seq (:d attrs)))
@@ -647,21 +523,6 @@
:else (dm/str tag))]
(dm/str "svg-" suffix)))
(defn- filter-valid-def-references
"Filters out false positive references that are not valid def IDs.
Filters out:
- Colors in style attributes (hex colors like #f9dd67)
- Style fragments that contain CSS keywords (like stop-opacity)
- References that don't exist in defs"
[ref-ids defs]
(let [is-style-fragment? (fn [ref-id]
(or (clr/hex-color-string? (str "#" ref-id))
(str/includes? ref-id ";") ;; Contains CSS separator
(str/includes? ref-id "stop-opacity") ;; CSS keyword
(str/includes? ref-id "stop-color")))] ;; CSS keyword
(->> ref-ids
(remove is-style-fragment?) ;; Filter style fragments and hex colors
(filter #(contains? defs %))))) ;; Only existing defs
(defn parse-svg-element
[frame-id svg-data {:keys [tag attrs hidden] :as element} unames]
@@ -673,11 +534,7 @@
(let [name (or (:id attrs) (tag->name tag))
att-refs (csvg/find-attr-references attrs)
defs (get svg-data :defs)
valid-refs (filter-valid-def-references att-refs defs)
all-refs (csvg/find-def-references defs valid-refs)
;; Filter the final result to ensure all references are valid defs
;; This prevents false positives from style attributes in gradient stops
references (filter-valid-def-references all-refs defs)
references (csvg/find-def-references defs att-refs)
href-id (or (:href attrs) (:xlink:href attrs) " ")
href-id (if (and (string? href-id)

Some files were not shown because too many files have changed in this diff Show More