Compare commits

..

1 Commits

Author SHA1 Message Date
Andrey Antukh
c37f5dd286 WIP 2025-11-12 13:01:09 +01:00
471 changed files with 30408 additions and 38323 deletions

View File

@@ -114,7 +114,7 @@ jobs:
# uses the same cache as this task so we prepopulate it
command: |
yarn install
yarn run playwright install chromium --with-deps
yarn run playwright install chromium
- run:
name: "lint scss on frontend"
@@ -207,6 +207,51 @@ jobs:
"npx http-server storybook-static --port 6006 --silent" \
"npx wait-on tcp:6006 && yarn test:storybook"
test-integration:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: large
environment:
JAVA_OPTS: -Xmx6g -Xms2g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
# Build frontend
- run:
name: "frontend build"
working_directory: "./frontend"
command: |
yarn install
yarn run build:app:assets
yarn run build:app
yarn run build:app:libs
# Build the wasm bundle
- run:
name: "wasm build"
working_directory: "./render-wasm"
command: |
EMSDK_QUIET=1 . /opt/emsdk/emsdk_env.sh
./build release
# Run integration tests
- run:
name: "integration tests"
working_directory: "./frontend"
command: |
yarn run playwright install chromium
yarn run test:e2e -x --workers=4
test-backend:
docker:
- image: penpotapp/devenv:latest
@@ -302,4 +347,5 @@ workflows:
- lint: success
- lint
- test-integration
- test-render-wasm

View File

@@ -1,14 +0,0 @@
name: _STAGING RENDER
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "staging-render"
build_wasm: "yes"
build_storybook: "yes"

View File

@@ -11,7 +11,7 @@ jobs:
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}
build_wasm: "yes"
build_wasm: "no"
build_storybook: "yes"
build-docker:
@@ -21,22 +21,6 @@ jobs:
with:
gh_ref: ${{ github.ref_name }}
notify:
name: Notifications
runs-on: ubuntu-24.04
needs: build-docker
steps:
- name: Notify Mattermost
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
🐳 *[PENPOT] Docker image available.*
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra
publish-final-tag:
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
needs: build-docker

View File

@@ -68,12 +68,12 @@ jobs:
for image in "${IMAGES[@]}"; do
skopeo copy --all \
docker://$DOCKER_REGISTRY/$image:$TAG \
docker://docker.io/penpotapp/$image:$TAG
docker://docker.io/$PUB_DOCKER_USERNAME/$image:$TAG
for alias in main latest; do
skopeo copy --all \
docker://$DOCKER_REGISTRY/$image:$TAG \
docker://docker.io/penpotapp/$image:$alias
docker://docker.io/$PUB_DOCKER_USERNAME/$image:$alias
done
done

View File

@@ -1,298 +0,0 @@
name: "CI"
defaults:
run:
shell: bash
on:
pull_request:
types:
- opened
- synchronize
push:
branches:
- develop
- staging
concurrency:
group: ${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
lint:
name: "Linter"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Check clojure code format
run: |
./scripts/lint
test-common:
name: "Common Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests on JVM
working-directory: ./common
run: |
clojure -M:dev:test
- name: Run tests on NODE
working-directory: ./common
run: |
./scripts/test
test-frontend:
name: "Frontend Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Unit Tests
working-directory: ./frontend
run: |
./scripts/test
- name: Component Tests
working-directory: ./frontend
run: |
./scripts/test-components
test-render-wasm:
name: "Render WASM Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Format
working-directory: ./render-wasm
run: |
cargo fmt --check
- name: Lint
working-directory: ./render-wasm
run: |
./lint
- name: Test
working-directory: ./render-wasm
run: |
./test
test-backend:
name: "Backend Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
services:
postgres:
image: postgres:17
# Provide the password for postgres
env:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis:
image: valkey/valkey:9
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests
working-directory: ./backend
env:
PENPOT_TEST_DATABASE_URI: "postgresql://postgres/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://redis/1"
run: |
clojure -M:dev:test --reporter kaocha.report/documentation
test-library:
name: "Library Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests
working-directory: ./library
run: |
./scripts/test
build-integration:
name: "Build Integration Bundle"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Build Bundle
working-directory: ./frontend
run: |
corepack enable;
corepack install;
yarn install
yarn run build:app:assets
yarn run build:app
yarn run build:app:libs
- name: Build WASM
working-directory: "./render-wasm"
run: |
./build release
- name: Store Bundle Cache
uses: actions/cache@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
test-integration-1:
name: "Integration Tests 1/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="1/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-1
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-2:
name: "Integration Tests 2/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="2/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-2
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-3:
name: "Integration Tests 3/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="3/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-3
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-4:
name: "Integration Tests 4/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="4/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-4
path: frontend/test-results/
overwrite: true
retention-days: 3

1
.gitignore vendored
View File

@@ -80,4 +80,3 @@ node_modules
/playwright/.cache/
/render-wasm/target/
/**/.yarn/*
/.pnpm-store

View File

@@ -1,87 +1,23 @@
# CHANGELOG
## 2.12.1
### :bug: Bugs fixed
- Fix setting a portion of text as bold or underline messes things up [Github #7980](https://github.com/penpot/penpot/issues/7980)
- Fix problem with style in fonts input [Taiga #12935](https://tree.taiga.io/project/penpot/issue/12935)
- Fix problem with path editor and right click [Github #7917](https://github.com/penpot/penpot/issues/7917)
## 2.12.0
## 2.12.0 (Unreleased)
### :boom: Breaking changes & Deprecations
#### Backend RPC API changes
The backend RPC API URLS are changed from `/api/rpc/command/<name>` to
`/api/main/methods/<name>`. The previous PATH is preserved for backward
compatibility; however, if you are a user of this API, it is strongly
recommended that you adapt your code to use the new PATH.
#### Updated SSO Callback URL
The OAuth / Single Sign-On (SSO) callback endpoint has changed to
align with the new OpenID Connect (OIDC) implementation.
Old callback URL:
```
https://<your_domain>/api/auth/oauth/<oauth_provider>/callback
```
New callback URL:
```
https://<your_domain>/api/auth/oidc/callback
```
**Action required:**
If you have SSO/Social-Auth configured on your on-premise instance,
the following actions are required before update:
Update your OAuth or SSO provider configuration (e.g., Okta, Google,
Azure AD, etc.) to use the new callback URL. Failure to update may
result in authentication failures after upgrading.
**Reason for change:**
This update standardizes all authentication flows under the single URL
and makis it more modular, enabling the ability to configure SSO auth
provider dinamically.
#### Changes on default docker compose
We have updated the `docker/images/docker-compose.yaml` with a small
change related to the `PENPOT_SECRET_KEY`. Since this version, this
environment variable is also required on exporter. So if you are using
penpot on-premise you will need to apply the same changes on your own
`docker-compose.yaml` file.
We have removed the Minio server from the `docker/images/docker-compose.yml`
example. It's still usable as before, we just removed the example.
- The backend RPC API URLS are changed from `/api/rpc/command/<name>`
to `/api/main/methods/<name>` (the previou PATH is preserved for
backward compatibility; however, if you are a user of this API, it
is strongly recommended that you adapt your code to use the new
PATH.
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
- Ensure consistent snap behavior across all zoom levels [Github #7774](https://github.com/penpot/penpot/pull/7774) by [@Tokytome](https://github.com/Tokytome)
- Fix crash in token grid view due to tooltip validation (by @dfelinto) [Github #7887](https://github.com/penpot/penpot/pull/7887)
- Enable Hindi translations on the application
### :sparkles: New features & Enhancements
- Add the ability to select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
- Add toggle for switching boolean property values [Taiga #12341](https://tree.taiga.io/project/penpot/us/12341)
- Make the file export process more reliable [Taiga #12555](https://tree.taiga.io/project/penpot/us/12555)
- Add auth flow changes [Taiga #12333](https://tree.taiga.io/project/penpot/us/12333)
- Add new shape validation mechanism for shapes [Github #7696](https://github.com/penpot/penpot/pull/7696)
- Apply color tokens from sidebar [Taiga #11353](https://tree.taiga.io/project/penpot/us/11353)
- Display tokens in the inspect tab [Taiga #9313](https://tree.taiga.io/project/penpot/us/9313)
- Refactor clipboard behavior to assess some minor inconsistencies and make pasting binary data faster. [Taiga #12571](https://tree.taiga.io/project/penpot/task/12571)
- Select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
- Toggle for switching boolean property values [Taiga #12341](https://tree.taiga.io/project/penpot/us/12341)
### :bug: Bugs fixed
@@ -96,19 +32,6 @@ example. It's still usable as before, we just removed the example.
- Fix shortcut conflict in text editor (increase/decrease font size vs word selection)
- Fix problem with plugins generating code for pages different than current one [Taiga #12312](https://tree.taiga.io/project/penpot/issue/12312)
- Fix input confirmation behavior is not uniform [Taiga #12294](https://tree.taiga.io/project/penpot/issue/12294)
- Fix copy/pasting application/transit+json [Taiga #12721](https://tree.taiga.io/project/penpot/issue/12721)
- Fix problem with plugins content attribute [Plugins #209](https://github.com/penpot/penpot-plugins/issues/209)
- Fix U and E icon displayed in project list [Taiga #12806](https://tree.taiga.io/project/penpot/issue/12806)
- Fix unpublish library modal not scrolling a long file list [Taiga #12285](https://tree.taiga.io/project/penpot/issue/12285)
- Fix incorrect interaction betwen hower and scroll on assets sidebar [Taiga #12389](https://tree.taiga.io/project/penpot/issue/12389)
- Fix switch variants with paths [Taiga #12841](https://tree.taiga.io/project/penpot/issue/12841)
- Fix referencing typography tokens on font-family tokens [Taiga #12492](https://tree.taiga.io/project/penpot/issue/12492)
- Fix horizontal scroll on layer panel [Taiga #12843](https://tree.taiga.io/project/penpot/issue/12843)
- Fix unicode handling on email template abbreviation filter [Github #7966](https://github.com/penpot/penpot/pull/7966)
## 2.11.1
- Fix WEBP shape export on docker images [Taiga #3838](https://tree.taiga.io/project/penpot/issue/3838)
## 2.11.0

View File

@@ -1,8 +1,7 @@
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
Subject: {{feedback-subject}}
Type: {{feedback-type}}
{% if feedback-error-href %}
{%- if feedback-error-href %}
HREF: {{feedback-error-href}}
{% endif -%}

View File

@@ -240,4 +240,4 @@
</div>
</body>
</html>
</html>

View File

@@ -3,7 +3,7 @@
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"}
{:id "penpot-design-system"
:name "Penpot Design System | Pencil"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Pencil-Penpot-Design-System.penpot"}
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/penpot-app.penpot"}
{:id "wireframing-kit"
:name "Wireframe library"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}

View File

@@ -25,7 +25,8 @@
<Logger name="app.storage.tmp" level="info" />
<Logger name="app.worker" level="trace" />
<Logger name="app.msgbus" level="info" />
<Logger name="app.http" level="info" />
<Logger name="app.http.websocket" level="info" />
<Logger name="app.http.sse" level="info" />
<Logger name="app.util.websocket" level="info" />
<Logger name="app.redis" level="info" />
<Logger name="app.rpc.rlimit" level="info" />

View File

@@ -25,7 +25,8 @@
<Logger name="app.storage.tmp" level="info" />
<Logger name="app.worker" level="trace" />
<Logger name="app.msgbus" level="info" />
<Logger name="app.http" level="info" />
<Logger name="app.http.websocket" level="info" />
<Logger name="app.http.sse" level="info" />
<Logger name="app.util.websocket" level="info" />
<Logger name="app.redis" level="info" />
<Logger name="app.rpc.rlimit" level="info" />

View File

@@ -1,18 +1,18 @@
#!/usr/bin/env bash
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
export PENPOT_MANAGEMENT_API_SHARED_KEY=super-secret-management-api-key
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_PUBLIC_URI=https://localhost:3449
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-login-with-ldap \
enable-login-with-password
disable-login-with-ldap \
disable-login-with-oidc \
disable-login-with-google \
disable-login-with-github \
disable-login-with-gitlab \
enable-login-with-oidc \
enable-login-with-google \
enable-login-with-github \
enable-login-with-gitlab \
enable-backend-worker \
enable-backend-asserts \
disable-feature-fdata-pointer-map \

View File

File diff suppressed because it is too large Load Diff

View File

@@ -255,8 +255,6 @@
(write-entry! output path params)
(events/tap :progress {:section :storage-object :id id})
(with-open [input (sto/get-object-data storage sobject)]
(.putNextEntry ^ZipOutputStream output (ZipEntry. (str "objects/" id ext)))
(io/copy input output :size (:size sobject))
@@ -281,8 +279,6 @@
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
(events/tap :progress {:section :file :id file-id})
(vswap! bfc/*state* update :files assoc file-id
{:id file-id
:name (:name file)
@@ -821,10 +817,9 @@
entries (keep (match-storage-entry-fn) entries)]
(doseq [{:keys [id entry]} entries]
(let [object (-> (read-entry input entry)
(decode-storage-object)
(update :bucket d/nilv sto/default-bucket)
(validate-storage-object))
(let [object (->> (read-entry input entry)
(decode-storage-object)
(validate-storage-object))
ext (cmedia/mtype->extension (:content-type object))
path (str "objects/" id ext)

View File

@@ -5,6 +5,7 @@
;; Copyright (c) KALEIDOS INC
(ns app.config
"A configuration management."
(:refer-clojure :exclude [get])
(:require
[app.common.data :as d]
@@ -102,7 +103,7 @@
[:http-server-io-threads {:optional true} ::sm/int]
[:http-server-max-worker-threads {:optional true} ::sm/int]
[:management-api-key {:optional true} :string]
[:management-api-shared-key {:optional true} :string]
[:telemetry-uri {:optional true} :string]
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
@@ -167,7 +168,7 @@
[:google-client-id {:optional true} :string]
[:google-client-secret {:optional true} :string]
[:oidc-client-id {:optional true} :string]
[:oidc-user-info-source {:optional true} [:enum "auto" "userinfo" "token"]]
[:oidc-user-info-source {:optional true} :keyword]
[:oidc-client-secret {:optional true} :string]
[:oidc-base-uri {:optional true} :string]
[:oidc-token-uri {:optional true} :string]

View File

@@ -106,17 +106,17 @@
(let [content-part (MimeBodyPart.)
alternative-mpart (MimeMultipart. "alternative")]
(when-let [content (get body "text/plain")]
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String content ^String charset)
(.addBodyPart alternative-mpart text-part)))
(when-let [content (get body "text/html")]
(let [html-part (MimeBodyPart.)]
(.setContent html-part ^String content
(str "text/html; charset=" charset))
(.addBodyPart alternative-mpart html-part)))
(when-let [content (get body "text/plain")]
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String content ^String charset)
(.addBodyPart alternative-mpart text-part)))
(.setContent content-part alternative-mpart)
(.addBodyPart mixed-mpart content-part))

View File

@@ -9,7 +9,7 @@
[app.common.logging :as l]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
[app.http.auth :as-alias http.auth]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]))
@@ -33,26 +33,25 @@
(defn- get-token-data
[pool claims]
(when-not (db/read-only? pool)
(when-let [token-id (get claims :tid)]
(when-let [token-id (-> (deref claims) (get :tid))]
(some-> (db/exec-one! pool [sql:get-token-data token-id])
(update :perms db/decode-pgarray #{})))))
(defn- wrap-authz
[handler {:keys [::db/pool]}]
(fn [request]
(let [{:keys [type claims]} (get request ::http/auth-data)]
(if (= :token type)
(let [{:keys [perms profile-id expires-at]} (some->> claims (get-token-data pool))]
;; FIXME: revisit this, this data looks unused
(handler (cond-> request
(some? perms)
(assoc ::perms perms)
(some? profile-id)
(assoc ::profile-id profile-id)
(some? expires-at)
(assoc ::expires-at expires-at))))
(fn [{:keys [::http.auth/token-type] :as request}]
(if (= :token token-type)
(let [{:keys [perms profile-id expires-at]} (some->> (get request ::http.auth/claims)
(get-token-data pool))]
(handler (cond-> request
(some? perms)
(assoc ::perms perms)
(some? profile-id)
(assoc ::profile-id profile-id)
(some? expires-at)
(assoc ::expires-at expires-at))))
(handler request)))))
(handler request))))
(def authz
{:name ::authz

View File

@@ -9,7 +9,8 @@
(:require
[app.common.schema :as sm]
[integrant.core :as ig]
[java-http-clj.core :as http])
[java-http-clj.core :as http]
[promesa.core :as p])
(:import
java.net.http.HttpClient))
@@ -28,9 +29,14 @@
(defn send!
([client req] (send! client req {}))
([client req {:keys [response-type] :or {response-type :string}}]
([client req {:keys [response-type sync?] :or {response-type :string sync? false}}]
(assert (client? client) "expected valid http client")
(http/send req {:client client :as response-type})))
(if sync?
(http/send req {:client client :as response-type})
(try
(http/send-async req {:client client :as response-type})
(catch Throwable cause
(p/rejected cause))))))
(defn- resolve-client
[params]
@@ -50,8 +56,8 @@
([cfg-or-client request]
(let [client (resolve-client cfg-or-client)
request (update request :uri str)]
(send! client request {})))
(send! client request {:sync? true})))
([cfg-or-client request options]
(let [client (resolve-client cfg-or-client)
request (update request :uri str)]
(send! client request options))))
(send! client request (merge {:sync? true} options)))))

View File

@@ -23,7 +23,7 @@
(defn request->context
"Extracts error report relevant context data from request."
[request]
(let [{:keys [claims] :as auth} (get request ::http/auth-data)]
(let [claims (some-> (get request ::auth/claims) deref)]
(-> (cf/logging-context)
(assoc :request/path (:path request))
(assoc :request/method (:method request))
@@ -31,7 +31,6 @@
(assoc :request/user-agent (yreq/get-header request "user-agent"))
(assoc :request/ip-addr (inet/parse-request request))
(assoc :request/profile-id (get claims :uid))
(assoc :request/auth-data auth)
(assoc :version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")))))
(defmulti handle-error
@@ -60,6 +59,7 @@
::yres/body data}
(binding [l/*context* (request->context request)]
(l/wrn :hint "restriction error" :cause err)
{::yres/status 400
::yres/body data}))))

View File

@@ -50,27 +50,23 @@
(db/tx-run! cfg handler request)))))})
(defmethod ig/init-key ::routes
[_ {:keys [::setup/props] :as cfg}]
[_ cfg]
["" {:middleware [[mw/shared-key-auth (cf/get :management-api-shared-key)]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
(let [management-key (or (cf/get :management-api-key)
(get props :management-key))]
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["" {:middleware [[mw/shared-key-auth management-key]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]]))
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]])
;; ---- HELPERS

View File

@@ -12,11 +12,9 @@
[app.common.schema :as-alias sm]
[app.common.transit :as t]
[app.config :as cf]
[app.http :as-alias http]
[app.http.auth :as-alias auth]
[app.http.errors :as errors]
[app.tokens :as tokens]
[app.util.pointer-map :as pmap]
[buddy.core.codecs :as bc]
[cuerdas.core :as str]
[yetti.adapter :as yt]
[yetti.middleware :as ymw]
@@ -254,43 +252,30 @@
(when-let [[_ token-type token] (some->> (yreq/get-header request "authorization")
(re-matches token-re))]
(if (= "token" (str/lower token-type))
{:type :token
:token token}
{:type :bearer
:token token})))
[:token token]
[:bearer token])))
get-token-from-cookie
(fn [request]
(let [cname (cf/get :auth-token-cookie-name)
token (some-> (yreq/get-cookie request cname) :value)]
(when-not (str/empty? token)
{:type :cookie
:token token})))
[:cookie token])))
get-token
(some-fn get-token-from-cookie get-token-from-authorization)
process-request
(fn [request]
(if-let [{:keys [type token] :as auth} (get-token request)]
(let [decode-fn (get decoders type)]
(if (or (= type :cookie) (= type :bearer))
(let [metadata (tokens/decode-header token)]
;; NOTE: we only proceed to decode claims on new
;; cookie tokens. The old cookies dont need to be
;; decoded because they use the token string as ID
(if (and (= (:kid metadata) 1)
(= (:ver metadata) 1)
(some? decode-fn))
(assoc request ::http/auth-data (assoc auth
:claims (decode-fn token)
:metadata metadata))
(assoc request ::http/auth-data (assoc auth :metadata {:ver 0}))))
(if decode-fn
(assoc request ::http/auth-data (assoc auth :claims (decode-fn token)))
(assoc request ::http/auth-data auth))))
(if-let [[token-type token] (get-token request)]
(let [request (-> request
(assoc ::auth/token token)
(assoc ::auth/token-type token-type))
decoder (get decoders token-type)]
(if (fn? decoder)
(assoc request ::auth/claims (delay (decoder token)))
request))
request))]
(fn [request]
@@ -303,14 +288,11 @@
(defn- wrap-shared-key-auth
[handler shared-key]
(if shared-key
(let [shared-key (if (string? shared-key)
shared-key
(bc/bytes->b64-str shared-key true))]
(fn [request]
(let [key (yreq/get-header request "x-shared-key")]
(if (= key shared-key)
(handler request)
{::yres/status 403}))))
(fn [request]
(let [key (yreq/get-header request "x-shared-key")]
(if (= key shared-key)
(handler request)
{::yres/status 403})))
(fn [_ _]
{::yres/status 403})))

View File

@@ -11,19 +11,17 @@
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.http :as-alias http]
[app.http.auth :as-alias http.auth]
[app.http.session.tasks :as-alias tasks]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[cuerdas.core :as str]
[integrant.core :as ig]
[yetti.request :as yreq]
[yetti.response :as yres]))
[yetti.request :as yreq]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
@@ -40,10 +38,10 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defprotocol ISessionManager
(read-session [_ id])
(create-session [_ params])
(update-session [_ session])
(delete-session [_ id]))
(read [_ key])
(write! [_ key data])
(update! [_ data])
(delete! [_ key]))
(defn manager?
[o]
@@ -58,82 +56,71 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:params
[:map {:title "SessionParams" :closed true}
[:map {:title "session-params"}
[:user-agent ::sm/text]
[:profile-id ::sm/uuid]
[:user-agent {:optional true} ::sm/text]
[:sso-provider-id {:optional true} ::sm/uuid]
[:sso-session-id {:optional true} :string]])
[:created-at ::ct/inst]])
(def ^:private valid-params?
(sm/validator schema:params))
(defn- prepare-session-params
[params key]
(assert (string? key) "expected key to be a string")
(assert (not (str/blank? key)) "expected key to be not empty")
(assert (valid-params? params) "expected valid params")
{:user-agent (:user-agent params)
:profile-id (:profile-id params)
:created-at (:created-at params)
:updated-at (:created-at params)
:id key})
(defn- database-manager
[pool]
(reify ISessionManager
(read-session [_ id]
(if (string? id)
;; Backward compatibility
(let [session (db/exec-one! pool (sql/select :http-session {:id id}))]
(-> session
(assoc :modified-at (:updated-at session))
(dissoc :updated-at)))
(db/exec-one! pool (sql/select :http-session-v2 {:id id}))))
(read [_ token]
(db/exec-one! pool (sql/select :http-session {:id token})))
(create-session [_ params]
(assert (valid-params? params) "expect valid session params")
(write! [_ key params]
(let [params (-> params
(assoc :created-at (ct/now))
(prepare-session-params key))]
(db/insert! pool :http-session params)
params))
(let [now (ct/now)
params (-> params
(assoc :id (uuid/next))
(assoc :created-at now)
(assoc :modified-at now))]
(db/insert! pool :http-session-v2 params
{::db/return-keys true})))
(update! [_ params]
(let [updated-at (ct/now)]
(db/update! pool :http-session
{:updated-at updated-at}
{:id (:id params)})
(assoc params :updated-at updated-at)))
(update-session [_ session]
(let [modified-at (ct/now)]
(if (string? (:id session))
(db/insert! pool :http-session-v2
(-> session
(assoc :id (uuid/next))
(assoc :created-at modified-at)
(assoc :modified-at modified-at)))
(db/update! pool :http-session-v2
{:modified-at modified-at}
{:id (:id session)}
{::db/return-keys true}))))
(delete-session [_ id]
(if (string? id)
(db/delete! pool :http-session {:id id} {::db/return-keys false})
(db/delete! pool :http-session-v2 {:id id} {::db/return-keys false}))
(delete! [_ token]
(db/delete! pool :http-session {:id token})
nil)))
(defn inmemory-manager
[]
(let [cache (atom {})]
(reify ISessionManager
(read-session [_ id]
(get @cache id))
(read [_ token]
(get @cache token))
(create-session [_ params]
(assert (valid-params? params) "expect valid session params")
(write! [_ key params]
(let [params (-> params
(assoc :created-at (ct/now))
(prepare-session-params key))]
(swap! cache assoc key params)
params))
(let [now (ct/now)
session (-> params
(assoc :id (uuid/next))
(assoc :created-at now)
(assoc :modified-at now))]
(swap! cache assoc (:id session) session)
session))
(update! [_ params]
(let [updated-at (ct/now)]
(swap! cache update (:id params) assoc :updated-at updated-at)
(assoc params :updated-at updated-at)))
(update-session [_ session]
(let [modified-at (ct/now)]
(swap! cache update (:id session) assoc :modified-at modified-at)
(assoc session :modified-at modified-at)))
(delete-session [_ id]
(swap! cache dissoc id)
(delete! [_ token]
(swap! cache dissoc token)
nil))))
(defmethod ig/assert-key ::manager
@@ -153,49 +140,43 @@
;; MANAGER IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare ^:private assign-session-cookie)
(declare ^:private clear-session-cookie)
(defn- assign-token
[cfg session]
(let [claims {:iss "authentication"
:aud "penpot"
:sid (:id session)
:iat (:modified-at session)
:uid (:profile-id session)
:sso-provider-id (:sso-provider-id session)
:sso-session-id (:sso-session-id session)}
header {:kid 1 :ver 1}
token (tokens/generate cfg claims header)]
(assoc session :token token)))
(declare ^:private assign-auth-token-cookie)
(declare ^:private clear-auth-token-cookie)
(declare ^:private gen-token)
(defn create-fn
[{:keys [::manager] :as cfg} {profile-id :id :as profile}
& {:keys [sso-provider-id sso-session-id]}]
[{:keys [::manager] :as cfg} profile-id]
(assert (manager? manager) "expected valid session manager")
(assert (uuid? profile-id) "expected valid uuid for profile-id")
(fn [request response]
(let [uagent (yreq/get-header request "user-agent")
session (->> {:user-agent uagent
:profile-id profile-id
:sso-provider-id sso-provider-id
:sso-session-id sso-session-id}
(d/without-nils)
(create-session manager)
(assign-token cfg))]
(l/trc :hint "create" :id (str (:id session)) :profile-id (str profile-id))
(assign-session-cookie response session))))
params {:profile-id profile-id
:user-agent uagent}
token (gen-token cfg params)
session (write! manager token params)]
(l/trc :hint "create" :profile-id (str profile-id))
(-> response
(assign-auth-token-cookie session)))))
(defn delete-fn
[{:keys [::manager]}]
(assert (manager? manager) "expected valid session manager")
(fn [request response]
(some->> (get request ::id) (delete-session manager))
(clear-session-cookie response)))
(let [cname (cf/get :auth-token-cookie-name)
cookie (yreq/get-cookie request cname)]
(l/trc :hint "delete" :profile-id (:profile-id request))
(some->> (:value cookie) (delete! manager))
(-> response
(assoc :status 204)
(assoc :body nil)
(clear-auth-token-cookie)))))
(defn- gen-token
[cfg {:keys [profile-id created-at]}]
(tokens/generate cfg {:iss "authentication"
:iat created-at
:uid profile-id}))
(defn decode-token
[cfg token]
(try
@@ -205,64 +186,44 @@
:token token
:cause cause))))
(defn get-session
[request]
(get request ::session))
(defn invalidate-others
[cfg session]
(let [sql "delete from http_session_v2 where profile_id = ? and id != ?"]
(-> (db/exec-one! cfg [sql (:profile-id session) (:id session)])
(db/get-update-count))))
(defn- renew-session?
[{:keys [id modified-at] :as session}]
(or (string? id)
(and (ct/inst? modified-at)
(let [elapsed (ct/diff modified-at (ct/now))]
(neg? (compare default-renewal-max-age elapsed))))))
[{:keys [updated-at] :as session}]
(and (ct/inst? updated-at)
(let [elapsed (ct/diff updated-at (ct/now))]
(neg? (compare default-renewal-max-age elapsed)))))
(defn- wrap-authz
[handler {:keys [::manager] :as cfg}]
(assert (manager? manager) "expected valid session manager")
(fn [request]
(let [{:keys [type token claims metadata]} (get request ::http/auth-data)]
(cond
(= type :cookie)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
(fn [{:keys [::http.auth/token-type] :as request}]
(cond
(= token-type :cookie)
(let [session (some->> (get request ::http.auth/token)
(read manager))
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::id (:id session))))
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))
response (handler request)]
response (handler request)]
(if (renew-session? session)
(let [session (update! manager session)]
(-> response
(assign-auth-token-cookie session)))
response))
(if (and session (renew-session? session))
(let [session (->> session
(update-session manager)
(assign-token cfg))]
(assign-session-cookie response session))
response))
(= token-type :bearer)
(let [session (some->> (get request ::http.auth/token)
(read manager))
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::id (:id session))))]
(handler request))
(= type :bearer)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))]
(handler request))
:else
(handler request)))))
:else
(handler request))))
(def authz
{:name ::authz
@@ -270,10 +231,10 @@
;; --- IMPL
(defn- assign-session-cookie
[response {token :token modified-at :modified-at}]
(defn- assign-auth-token-cookie
[response {token :id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
created-at modified-at
created-at updated-at
renewal (ct/plus created-at default-renewal-max-age)
expires (ct/plus created-at max-age)
secure? (contains? cf/flags :secure-session-cookies)
@@ -288,12 +249,12 @@
:comment comment
:same-site (if cors? :none (if strict? :strict :lax))
:secure secure?}]
(update response ::yres/cookies assoc name cookie)))
(update response :cookies assoc name cookie)))
(defn- clear-session-cookie
(defn- clear-auth-token-cookie
[response]
(let [cname (cf/get :auth-token-cookie-name)]
(update response ::yres/cookies assoc cname {:path "/" :value "" :max-age 0})))
(update response :cookies assoc cname {:path "/" :value "" :max-age 0})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TASK: SESSION GC

View File

@@ -25,8 +25,7 @@
[app.util.inet :as inet]
[app.util.services :as-alias sv]
[app.worker :as wrk]
[cuerdas.core :as str]
[yetti.request :as yreq]))
[cuerdas.core :as str]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HELPERS
@@ -79,32 +78,17 @@
(remove #(contains? reserved-props (key %))))
props))
(defn get-external-session-id
[request]
(when-let [session-id (yreq/get-header request "x-external-session-id")]
(when-not (or (> (count session-id) 256)
(= session-id "null")
(str/blank? session-id))
session-id)))
(defn- get-client-event-origin
[request]
(when-let [origin (yreq/get-header request "x-event-origin")]
(when-not (or (= origin "null")
(str/blank? origin))
(str/prune origin 200))))
(defn get-client-user-agent
[request]
(when-let [user-agent (yreq/get-header request "user-agent")]
(str/prune user-agent 500)))
(defn- get-client-version
[request]
(when-let [origin (yreq/get-header request "x-frontend-version")]
(when-not (or (= origin "null")
(str/blank? origin))
(str/prune origin 100))))
(defn event-from-rpc-params
"Create a base event skeleton with pre-filled some important
data that can be extracted from RPC params object"
[params]
(let [context {:external-session-id (::rpc/external-session-id params)
:external-event-origin (::rpc/external-event-origin params)
:triggered-by (::rpc/handler-name params)}]
{::type "action"
::profile-id (::rpc/profile-id params)
::ip-addr (::rpc/ip-addr params)
::context (d/without-nils context)}))
;; --- SPECS
@@ -133,33 +117,6 @@
(def ^:private check-event
(sm/check-fn schema:event))
(defn- prepare-context-from-request
[request]
(let [client-event-origin (get-client-event-origin request)
client-version (get-client-version request)
client-user-agent (get-client-user-agent request)
session-id (get-external-session-id request)
token-id (::actoken/id request)]
(d/without-nils
{:external-session-id session-id
:access-token-id (some-> token-id str)
:client-event-origin client-event-origin
:client-user-agent client-user-agent
:client-version client-version
:version (:full cf/version)})))
(defn event-from-rpc-params
"Create a base event skeleton with pre-filled some important
data that can be extracted from RPC params object"
[params]
(let [context (some-> params meta ::http/request prepare-context-from-request)
event {::type "action"
::profile-id (or (::rpc/profile-id params) uuid/zero)
::ip-addr (::rpc/ip-addr params)}]
(cond-> event
(some? context)
(assoc ::context context))))
(defn prepare-event
[cfg mdata params result]
(let [resultm (meta result)
@@ -169,15 +126,23 @@
(::rpc/profile-id params)
uuid/zero)
session-id (get params ::rpc/external-session-id)
event-origin (get params ::rpc/external-event-origin)
props (-> (or (::replace-props resultm)
(-> params
(merge (::props resultm))
(dissoc :profile-id)
(dissoc :type)))
(clean-props))
context (merge (::context resultm)
(prepare-context-from-request request))
token-id (::actoken/id request)
context (-> (::context resultm)
(assoc :external-session-id session-id)
(assoc :external-event-origin event-origin)
(assoc :access-token-id (some-> token-id str))
(d/without-nils))
ip-addr (inet/parse-request request)]
{::type (or (::type resultm)

View File

@@ -57,7 +57,7 @@
:uid uuid/zero})
body (t/encode {:events events})
headers {"content-type" "application/transit+json"
"origin" (str (cf/get :public-uri))
"origin" (cf/get :public-uri)
"cookie" (u/map->query-string {:auth-token token})}
params {:uri uri
:timeout 12000

View File

@@ -49,7 +49,7 @@
ctx (-> context
(assoc :tenant (cf/get :tenant))
(assoc :host (cf/get :host))
(assoc :public-uri (str (cf/get :public-uri)))
(assoc :public-uri (cf/get :public-uri))
(assoc :logger/name logger)
(assoc :logger/level level)
(dissoc :request/params :value :params :data))]

View File

@@ -259,17 +259,14 @@
::oidc.providers/generic
{::http.client/client (ig/ref ::http.client/client)}
::oidc/providers
[(ig/ref ::oidc.providers/google)
(ig/ref ::oidc.providers/github)
(ig/ref ::oidc.providers/gitlab)
(ig/ref ::oidc.providers/generic)]
::oidc/routes
{::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)
::setup/props (ig/ref ::setup/props)
::oidc/providers (ig/ref ::oidc/providers)
::oidc/providers {:google (ig/ref ::oidc.providers/google)
:github (ig/ref ::oidc.providers/github)
:gitlab (ig/ref ::oidc.providers/gitlab)
:oidc (ig/ref ::oidc.providers/generic)}
::session/manager (ig/ref ::session/manager)
::email/blacklist (ig/ref ::email/blacklist)
::email/whitelist (ig/ref ::email/whitelist)}
@@ -301,7 +298,6 @@
{::db/pool (ig/ref ::db/pool)
::mtx/metrics (ig/ref ::mtx/metrics)
::mbus/msgbus (ig/ref ::mbus/msgbus)
::setup/props (ig/ref ::setup/props)
::session/manager (ig/ref ::session/manager)}
:app.http.assets/routes
@@ -323,6 +319,7 @@
{::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)
::rds/pool (ig/ref ::rds/pool)
:app.nitrate/instance (ig/ref :app.nitrate/instance)
::wrk/executor (ig/ref ::wrk/netty-executor)
::session/manager (ig/ref ::session/manager)
::ldap/provider (ig/ref ::ldap/provider)
@@ -339,6 +336,9 @@
::email/blacklist (ig/ref ::email/blacklist)
::email/whitelist (ig/ref ::email/whitelist)}
:app.nitrate/instance
{::http.client/client (ig/ref ::http.client/client)}
:app.rpc/management-methods
{::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)

View File

@@ -17,7 +17,6 @@
[app.common.time :as ct]
[app.config :as cf]
[app.db :as-alias db]
[app.http.client :as http]
[app.storage :as-alias sto]
[app.storage.tmp :as tmp]
[buddy.core.bytes :as bb]
@@ -38,9 +37,6 @@
org.im4java.core.IMOperation
org.im4java.core.Info))
(def default-max-file-size
(* 1024 1024 10)) ; 10 MiB
(def schema:upload
[:map {:title "Upload"}
[:filename :string]
@@ -245,7 +241,7 @@
(ex/raise :type :validation
:code :invalid-svg-file
:hint "uploaded svg does not provides dimensions"))
(merge input info {:ts (ct/now) :size (fs/size path)}))
(merge input info {:ts (ct/now)}))
(let [instance (Info. (str path))
mtype' (.getProperty instance "Mime type")]
@@ -265,7 +261,6 @@
(assoc input
:width width
:height height
:size (fs/size path)
:ts (ct/now)))))))
(defmethod process-error org.im4java.core.InfoException
@@ -275,54 +270,6 @@
:hint "invalid image"
:cause error))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; IMAGE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn download-image
"Download an image from the provided URI and return the media input object"
[{:keys [::http/client]} uri]
(letfn [(parse-and-validate [{:keys [headers] :as response}]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)
max-size (cf/get :media-max-file-size default-max-file-size)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "seems like the url points to resource with unknown size"))
(when (> size max-size)
(ex/raise :type :validation
:code :file-too-large
:hint (str/ffmt "the file size % is greater than the maximum %"
size
default-max-file-size)))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "seems like the url points to an invalid media object"))
{:size size :mtype mtype :format format}))]
(let [{:keys [body] :as response} (http/req! client
{:method :get :uri uri}
{:response-type :input-stream})
{:keys [size mtype]} (parse-and-validate response)
path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write* path body :size size)]
(when (not= written size)
(ex/raise :type :internal
:code :mismatch-write-size
:hint "unexpected state: unable to write to file"))
{;; :size size
:path path
:mtype mtype})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FONTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -450,13 +450,7 @@
:fn (mg/resource "app/migrations/sql/0141-add-idx-to-file-library-rel.sql")}
{:name "0141-add-file-data-table.sql"
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}
{:name "0142-add-sso-provider-table"
:fn (mg/resource "app/migrations/sql/0142-add-sso-provider-table.sql")}
{:name "0143-http-session-v2-table"
:fn (mg/resource "app/migrations/sql/0143-add-http-session-v2-table.sql")}])
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}])
(defn apply-migrations!
[pool name migrations]

View File

@@ -1,33 +0,0 @@
CREATE TABLE sso_provider (
id uuid PRIMARY KEY,
created_at timestamptz NOT NULL DEFAULT now(),
modified_at timestamptz NOT NULL DEFAULT now(),
is_enabled boolean NOT NULL DEFAULT true,
type text NOT NULL CHECK (type IN ('oidc')),
domain text NOT NULL,
client_id text NOT NULL,
client_secret text NOT NULL,
base_uri text NOT NULL,
token_uri text NULL,
auth_uri text NULL,
user_uri text NULL,
jwks_uri text NULL,
logout_uri text NULL,
roles_attr text NULL,
email_attr text NULL,
name_attr text NULL,
user_info_source text NOT NULL DEFAULT 'token'
CHECK (user_info_source IN ('token', 'userinfo', 'auto')),
scopes text[] NULL,
roles text[] NULL
);
CREATE UNIQUE INDEX sso_provider__domain__idx
ON sso_provider(domain);

View File

@@ -1,23 +0,0 @@
CREATE TABLE http_session_v2 (
id uuid PRIMARY KEY,
created_at timestamptz NOT NULL DEFAULT now(),
modified_at timestamptz NOT NULL DEFAULT now(),
profile_id uuid REFERENCES profile(id) ON DELETE CASCADE,
user_agent text NULL,
sso_provider_id uuid NULL REFERENCES sso_provider(id) ON DELETE CASCADE,
sso_session_id text NULL
);
CREATE INDEX http_session_v2__profile_id__idx
ON http_session_v2(profile_id);
CREATE INDEX http_session_v2__sso_provider_id__idx
ON http_session_v2(sso_provider_id)
WHERE sso_provider_id IS NOT NULL;
CREATE INDEX http_session_v2__sso_session_id__idx
ON http_session_v2(sso_session_id)
WHERE sso_session_id IS NOT NULL;

View File

@@ -68,21 +68,33 @@
response (if (fn? result)
(result request)
(let [result (rph/unwrap result)
status (or (::http/status mdata)
(if (nil? result)
204
200))
status (::http/status mdata 200)
headers (cond-> (::http/headers mdata {})
(yres/stream-body? result)
(assoc "content-type" "application/octet-stream"))]
{::yres/status status
::yres/headers headers
::yres/body result}))]
(-> response
(handle-response-transformation request mdata)
(handle-before-comple-hook mdata))))
(defn get-external-session-id
[request]
(when-let [session-id (yreq/get-header request "x-external-session-id")]
(when-not (or (> (count session-id) 256)
(= session-id "null")
(str/blank? session-id))
session-id)))
(defn- get-external-event-origin
[request]
(when-let [origin (yreq/get-header request "x-event-origin")]
(when-not (or (> (count origin) 256)
(= origin "null")
(str/blank? origin))
origin)))
(defn- make-rpc-handler
"Ring handler that dispatches cmd requests and convert between
internal async flow into ring async flow."
@@ -93,19 +105,23 @@
etag (yreq/get-header request "if-none-match")
profile-id (or (::session/profile-id request)
(::actoken/profile-id request))
ip-addr (inet/parse-request request)
session-id (get-external-session-id request)
event-origin (get-external-event-origin request)
data (-> params
(assoc ::handler-name handler-name)
(assoc ::ip-addr ip-addr)
(assoc ::request-at (ct/now))
(assoc ::external-session-id session-id)
(assoc ::external-event-origin event-origin)
(assoc ::session/id (::session/id request))
(assoc ::cond/key etag)
(cond-> (uuid? profile-id)
(assoc ::profile-id profile-id)))
data (with-meta data
{::http/request request})
data (vary-meta data assoc ::http/request request)
handler-fn (get methods (keyword handler-name) default-handler)]
(when (and (or (= method :get)
@@ -295,8 +311,7 @@
[cfg]
(let [cfg (assoc cfg ::type "management" ::metrics-id :rpc-management-timing)]
(->> (sv/scan-ns
'app.rpc.management.subscription
'app.rpc.management.exporter)
'app.rpc.management.subscription)
(map (partial process-method cfg "management" wrap-management))
(into {}))))
@@ -347,16 +362,15 @@
(assert (valid-methods? (::management-methods params)) "expect valid methods map"))
(defmethod ig/init-key ::routes
[_ {:keys [::methods ::management-methods ::setup/props] :as cfg}]
(let [public-uri (cf/get :public-uri)
management-key (or (cf/get :management-api-key)
(get props :management-key))]
[_ {:keys [::methods ::management-methods] :as cfg}]
(let [public-uri (cf/get :public-uri)]
["/api"
["/management"
["/methods/:type"
{:middleware [[mw/shared-key-auth management-key]
{:middleware [[mw/shared-key-auth (cf/get :management-api-shared-key)]
[session/authz cfg]]
:handler (make-rpc-handler management-methods)}]

View File

@@ -7,24 +7,21 @@
(ns app.rpc.commands.auth
(:require
[app.auth :as auth]
[app.auth.oidc :as oidc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.email :as eml]
[app.email.blacklist :as email.blacklist]
[app.email.whitelist :as email.whitelist]
[app.http :as-alias http]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.climit :as-alias climit]
[app.rpc.commands.profile :as profile]
@@ -33,7 +30,6 @@
[app.rpc.helpers :as rph]
[app.setup :as-alias setup]
[app.setup.welcome-file :refer [create-welcome-file]]
[app.storage :as sto]
[app.tokens :as tokens]
[app.util.services :as sv]
[app.worker :as wrk]
@@ -113,7 +109,7 @@
(assoc profile :is-admin (let [admins (cf/get :admins)]
(contains? admins (:email profile)))))]
(-> response
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)}))))]
@@ -149,24 +145,7 @@
[cfg params]
(if (= (:profile-id params)
(::rpc/profile-id params))
(let [{:keys [claims]}
(rph/get-auth-data params)
provider
(some->> (get claims :sso-provider-id)
(oidc/get-provider cfg))
response
(if (and provider (:logout-uri provider))
(let [params {"logout_hint" (get claims :sso-session-id)
"client_id" (get provider :client-id)
"post_logout_redirect_uri" (str (cf/get :public-uri))}
uri (-> (u/uri (:logout-uri provider))
(assoc :query (u/map->query-string params)))]
{:redirect-uri uri})
{})]
(rph/with-transform response (session/delete-fn cfg)))
(rph/with-transform {} (session/delete-fn cfg))
{}))
;; ---- COMMAND: Recover Profile
@@ -292,30 +271,11 @@
;; ---- COMMAND: Register Profile
(defn import-profile-picture
[cfg uri]
(try
(let [storage (sto/resolve cfg)
input (media/download-image cfg uri)
input (media/run {:cmd :info :input input})
hash (sto/calculate-hash (:path input))
content (-> (sto/content (:path input) (:size input))
(sto/wrap-with-hash hash))
sobject (sto/put-object! storage {::sto/content content
::sto/deduplicate? true
:bucket "profile"
:content-type (:mtype input)})]
(:id sobject))
(catch Throwable cause
(l/wrn :hint "unable to import profile picture"
:uri uri
:cause cause)
nil)))
(defn create-profile
(defn create-profile!
"Create the profile entry on the database with limited set of input
attrs (all the other attrs are filled with default values)."
[{:keys [::db/conn] :as cfg} {:keys [email] :as params}]
[conn {:keys [email] :as params}]
(dm/assert! ::sm/email email)
(let [id (or (:id params) (uuid/next))
props (-> (audit/extract-utm-params params)
(merge (:props params))
@@ -323,7 +283,8 @@
:viewed-walkthrough? false
:nudge {:big 10 :small 1}
:v2-info-shown true
:release-notes-viewed (:main cf/version)}))
:release-notes-viewed (:main cf/version)})
(db/tjson))
password (or (:password params) "!")
@@ -338,12 +299,6 @@
theme (:theme params nil)
email (str/lower email)
photo-id (some->> (or (:oidc/picture props)
(:google/picture props)
(:github/picture props)
(:gitlab/picture props))
(import-profile-picture cfg))
params {:id id
:fullname (:fullname params)
:email email
@@ -351,13 +306,11 @@
:lang locale
:password password
:deleted-at (:deleted-at params)
:props (db/tjson props)
:props props
:theme theme
:photo-id photo-id
:is-active is-active
:is-muted is-muted
:is-demo is-demo}]
(try
(-> (db/insert! conn :profile params)
(profile/decode-row))
@@ -370,7 +323,7 @@
(throw cause))))))
(defn create-profile-rels
(defn create-profile-rels!
[conn {:keys [id] :as profile}]
(let [features (cfeat/get-enabled-features cf/flags)
team (teams/create-team conn
@@ -420,13 +373,12 @@
;; to detect if the profile is already registered
(or (profile/get-profile-by-email conn (:email claims))
(let [is-active (or (boolean (:is-active claims))
(boolean (:email-verified claims))
(not (contains? cf/flags :email-verification)))
params (-> params
(assoc :is-active is-active)
(update :password auth/derive-password))
profile (->> (create-profile cfg params)
(create-profile-rels conn))]
profile (->> (create-profile! conn params)
(create-profile-rels! conn))]
(vary-meta profile assoc :created true))))
created? (-> profile meta :created true?)
@@ -464,10 +416,10 @@
(and (some? invitation)
(= (:email profile)
(:member-email invitation)))
(let [invitation (assoc invitation :member-id (:id profile))
token (tokens/generate cfg invitation)]
(let [claims (assoc invitation :member-id (:id profile))
token (tokens/generate cfg claims)]
(-> {:invitation-token token}
(rph/with-transform (session/create-fn cfg profile claims))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/replace-props props
::audit/context {:action "accept-invitation"}
::audit/profile-id (:id profile)})))
@@ -478,7 +430,7 @@
created?
(if (:is-active profile)
(-> (profile/strip-private-attrs profile)
(rph/with-transform (session/create-fn cfg profile claims))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-defer create-welcome-file-when-needed)
(rph/with-meta
{::audit/replace-props props
@@ -607,32 +559,4 @@
[cfg params]
(db/tx-run! cfg request-profile-recovery params))
;; --- COMMAND: get-sso-config
(defn- extract-domain
"Extract the domain part from email"
[email]
(let [at (str/last-index-of email "@")]
(when (and (>= at 0)
(< at (dec (count email))))
(-> (subs email (inc at))
(str/trim)
(str/lower)))))
(def ^:private schema:get-sso-provider
[:map {:title "get-sso-config"}
[:email ::sm/email]])
(def ^:private schema:get-sso-provider-result
[:map {:title "SSOProvider"}
[:id ::sm/uuid]])
(sv/defmethod ::get-sso-provider
{::rpc/auth false
::doc/added "2.12"
::sm/params schema:get-sso-provider
::sm/result schema:get-sso-provider-result}
[cfg {:keys [email]}]
(when-let [domain (extract-domain email)]
(when-let [config (db/get* cfg :sso-provider {:domain domain})]
(select-keys config [:id]))))

View File

@@ -11,9 +11,9 @@
[app.binfile.v1 :as bf.v1]
[app.binfile.v3 :as bf.v3]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
[app.http.sse :as sse]
@@ -25,12 +25,10 @@
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.rpc.helpers :as rph]
[app.tasks.file-gc]
[app.util.services :as sv]
[app.worker :as-alias wrk]
[datoteka.fs :as fs]))
[app.worker :as-alias wrk]))
(set! *warn-on-reflection* true)
@@ -40,42 +38,52 @@
schema:export-binfile
[:map {:title "export-binfile"}
[:file-id ::sm/uuid]
[:version {:optional true} ::sm/int]
[:include-libraries ::sm/boolean]
[:embed-assets ::sm/boolean]])
(defn- export-binfile
[{:keys [::sto/storage] :as cfg} {:keys [file-id include-libraries embed-assets]}]
(let [output (tmp/tempfile*)]
(try
(-> cfg
(assoc ::bfc/ids #{file-id})
(assoc ::bfc/embed-assets embed-assets)
(assoc ::bfc/include-libraries include-libraries)
(bf.v3/export-files! output))
(defn stream-export-v1
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(rph/stream
(fn [_ output-stream]
(try
(-> cfg
(assoc ::bfc/ids #{file-id})
(assoc ::bfc/embed-assets embed-assets)
(assoc ::bfc/include-libraries include-libraries)
(bf.v1/export-files! output-stream))
(catch Throwable cause
(l/err :hint "exception on exporting file"
:file-id (str file-id)
:cause cause))))))
(let [data (sto/content output)
object (sto/put-object! storage
{::sto/content data
::sto/touched-at (ct/in-future {:minutes 60})
:content-type "application/zip"
:bucket "tempfile"})]
(-> (cf/get :public-uri)
(u/join "/assets/by-id/")
(u/join (str (:id object)))))
(finally
(fs/delete output)))))
(defn stream-export-v3
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(rph/stream
(fn [_ output-stream]
(try
(-> cfg
(assoc ::bfc/ids #{file-id})
(assoc ::bfc/embed-assets embed-assets)
(assoc ::bfc/include-libraries include-libraries)
(bf.v3/export-files! output-stream))
(catch Throwable cause
(l/err :hint "exception on exporting file"
:file-id (str file-id)
:cause cause))))))
(sv/defmethod ::export-binfile
"Export a penpot file in a binary format."
{::doc/added "1.15"
::doc/changes [["2.12" "Remove version parameter, only one version is supported"]]
::webhooks/event? true
::sm/params schema:export-binfile}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
(files/check-read-permissions! pool profile-id file-id)
(sse/response (partial export-binfile cfg params)))
(let [version (or version 1)]
(case (int version)
1 (stream-export-v1 cfg params)
2 (throw (ex-info "not-implemented" {}))
3 (stream-export-v3 cfg params))))
;; --- Command: import-binfile

View File

@@ -39,7 +39,7 @@
fullname (str "Demo User " sem)
password (-> (bn/random-bytes 16)
(bc/bytes->b64 true)
(bc/bytes->b64u)
(bc/bytes->str))
params {:email email
@@ -49,9 +49,9 @@
:deleted-at (ct/in-future (cf/get-deletion-delay))
:password (derive-password password)
:props {}}
profile (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(->> (auth/create-profile cfg params)
(auth/create-profile-rels conn))))]
profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
(->> (auth/create-profile! conn params)
(auth/create-profile-rels! conn))))]
(with-meta {:email email
:password password}
{::audit/profile-id (:id profile)})))

View File

@@ -1209,7 +1209,7 @@
;; --- MUTATION COMMAND: restore-files-immediatelly
(def ^:private sql:resolve-editable-files
"SELECT f.id, f.project_id
"SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
@@ -1250,38 +1250,18 @@
{:file-id file-id}
{::db/return-keys false}))
(def ^:private sql:restore-projects
"UPDATE project SET deleted_at = null WHERE id = ANY(?::uuid[])")
(defn- restore-projects
[conn project-ids]
(let [project-ids (db/create-array conn "uuid" project-ids)]
(->> (db/exec-one! conn [sql:restore-projects project-ids])
(db/get-update-count))))
(defn- restore-deleted-team-files
[{:keys [::db/conn]} {:keys [::rpc/profile-id team-id ids]}]
(teams/check-edition-permissions! conn profile-id team-id)
(let [total-files
(count ids)
{:keys [files projects]}
(reduce (fn [result {:keys [id project-id]}]
(let [index (-> result :files count)]
(events/tap :progress {:file-id id :index index :total total-files})
(restore-file conn id)
(-> result
(update :files conj id)
(update :projects conj project-id))))
{:files #{} :projectes #{}}
(db/plan conn [sql:resolve-editable-files team-id
(db/create-array conn "uuid" ids)]))]
(restore-projects conn projects)
files))
(reduce (fn [affected {:keys [id]}]
(let [index (inc (count affected))]
(events/tap :progress {:file-id id :index index :total (count ids)})
(restore-file conn id)
(conj affected id)))
#{}
(db/plan conn [sql:resolve-editable-files team-id
(db/create-array conn "uuid" ids)])))
(def ^:private schema:restore-deleted-team-files
[:map {:title "restore-deleted-team-files"}
@@ -1289,8 +1269,8 @@
[:ids [::sm/set ::sm/uuid]]])
(sv/defmethod ::restore-deleted-team-files
"Removes the deletion mark from the specified files (and respective
projects) on the specified team."
"Removes the deletion mark from the specified files (and respective projects)."
{::doc/added "2.12"
::sse/stream? true
::sm/params schema:restore-deleted-team-files}

View File

@@ -96,7 +96,7 @@
;; loading all pages into memory for find the frame set for thumbnail.
(defn get-file-data-for-thumbnail
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file} strip-frames-with-thumbnails]
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
(letfn [;; function responsible on finding the frame marked to be
;; used as thumbnail; the returned frame always have
;; the :page-id set to the page that it belongs.
@@ -173,7 +173,7 @@
;; Assoc the available thumbnails and prune not visible shapes
;; for avoid transfer unnecessary data.
strip-frames-with-thumbnails
:always
(update :objects assoc-thumbnails page-id thumbs)))))
(def ^:private
@@ -186,8 +186,7 @@
[:map {:title "PartialFile"}
[:id ::sm/uuid]
[:revn {:min 0} ::sm/int]
[:page [:map-of :keyword ::sm/any]]
[:strip-frames-with-thumbnails {:optional true} ::sm/boolean]])
[:page [:map-of :keyword ::sm/any]]])
(sv/defmethod ::get-file-data-for-thumbnail
"Retrieves the data for generate the thumbnail of the file. Used
@@ -196,7 +195,7 @@
::doc/module :files
::sm/params schema:get-file-data-for-thumbnail
::sm/result schema:partial-file}
[cfg {:keys [::rpc/profile-id file-id strip-frames-with-thumbnails] :as params}]
[cfg {:keys [::rpc/profile-id file-id] :as params}]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/check-read-permissions! conn profile-id file-id)
@@ -206,18 +205,14 @@
file (bfc/get-file cfg file-id
:realize? true
:read-only? true)
strip-frames-with-thumbnails
(or (nil? strip-frames-with-thumbnails) ;; if not present, default to true
(true? strip-frames-with-thumbnails))]
:read-only? true)]
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-file-features! (:features file)))
{:file-id file-id
:revn (:revn file)
:page (get-file-data-for-thumbnail cfg file strip-frames-with-thumbnails)}))))
:page (get-file-data-for-thumbnail cfg file)}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MUTATION COMMANDS

View File

@@ -66,12 +66,12 @@
:member-email (:email profile))
token (tokens/generate cfg claims)]
(-> {:invitation-token token}
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/props (:props profile)
::audit/profile-id (:id profile)})))
(-> (profile/strip-private-attrs profile)
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/props (:props profile)
::audit/profile-id (:id profile)}))))))
@@ -83,6 +83,6 @@
(profile/clean-email)
(profile/get-profile-by-email conn))
(->> (assoc info :is-active true :is-demo false)
(auth/create-profile cfg)
(auth/create-profile-rels conn)
(auth/create-profile! conn)
(auth/create-profile-rels! conn)
(profile/strip-private-attrs))))))

View File

@@ -7,10 +7,14 @@
(ns app.rpc.commands.media
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.media :as cm]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http.client :as http]
[app.loggers.audit :as-alias audit]
[app.media :as media]
[app.rpc :as-alias rpc]
@@ -18,7 +22,13 @@
[app.rpc.commands.files :as files]
[app.rpc.doc :as-alias doc]
[app.storage :as sto]
[app.util.services :as sv]))
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[cuerdas.core :as str]
[datoteka.io :as io]))
(def default-max-file-size
(* 1024 1024 10)) ; 10 MiB
(def thumbnail-options
{:width 100
@@ -187,12 +197,56 @@
mobj))
(defn download-image
[{:keys [::http/client]} uri]
(letfn [(parse-and-validate [{:keys [headers] :as response}]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)
max-size (cf/get :media-max-file-size default-max-file-size)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "seems like the url points to resource with unknown size"))
(when (> size max-size)
(ex/raise :type :validation
:code :file-too-large
:hint (str/ffmt "the file size % is greater than the maximum %"
size
default-max-file-size)))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "seems like the url points to an invalid media object"))
{:size size :mtype mtype :format format}))]
(let [{:keys [body] :as response} (http/req! client
{:method :get :uri uri}
{:response-type :input-stream :sync? true})
{:keys [size mtype]} (parse-and-validate response)
path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write* path body :size size)]
(when (not= written size)
(ex/raise :type :internal
:code :mismatch-write-size
:hint "unexpected state: unable to write to file"))
{:filename "tempfile"
:size size
:path path
:mtype mtype})))
(defn- create-file-media-object-from-url
[cfg {:keys [url name] :as params}]
(let [content (media/download-image cfg url)
(let [content (download-image cfg url)
params (-> params
(assoc :content content)
(assoc :name (d/nilv name "unknown")))]
(assoc :name (or name (:filename content))))]
;; NOTE: we use the climit here in a dynamic invocation because we
;; don't want saturate the process-image limit with IO (download

View File

@@ -18,6 +18,7 @@
[app.db.sql :as-alias sql]
[app.email :as eml]
[app.http.session :as session]
[app.nitrate :as nitrate]
[app.loggers.audit :as audit]
[app.main :as-alias main]
[app.media :as media]
@@ -98,9 +99,14 @@
;; no profile-id is in session, and when db call raises not found. In all other
;; cases we need to reraise the exception.
(try
(-> (get-profile pool profile-id)
(strip-private-attrs)
(update :props filter-props))
(let [nitrate (get cfg ::nitrate/instance)
;; org ((get nitrate :get-organization) profile-id)
;; org (nitrate/call cfg :get-organization {:profile-id profile-id})
(-> (get-profile pool profile-id)
(strip-private-attrs)
(update :props filter-props)))
(catch Throwable _
{:id uuid/zero :fullname "Anonymous User"})))
@@ -154,6 +160,7 @@
(declare validate-password!)
(declare update-profile-password!)
(declare invalidate-profile-session!)
(def ^:private
schema:update-profile-password
@@ -168,7 +175,8 @@
::climit/id :auth/global
::db/transaction true}
[cfg {:keys [::rpc/profile-id password] :as params}]
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))]
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))
session-id (::session/id params)]
(when (= (:email profile) (str/lower (:password params)))
(ex/raise :type :validation
@@ -176,13 +184,15 @@
:hint "you can't use your email as password"))
(update-profile-password! cfg (assoc profile :password password))
(->> (rph/get-request params)
(session/get-session)
(session/invalidate-others cfg))
(invalidate-profile-session! cfg profile-id session-id)
nil))
(defn- invalidate-profile-session!
"Removes all sessions except the current one."
[{:keys [::db/conn]} profile-id session-id]
(let [sql "delete from http_session where profile_id = ? and id != ?"]
(:next.jdbc/update-count (db/exec-one! conn [sql profile-id session-id]))))
(defn- validate-password!
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
@@ -280,9 +290,9 @@
:file-path (str (:path file))
:file-mtype (:mtype file)}}))))
(defn- generate-thumbnail
[_ input]
(let [input (media/run {:cmd :info :input input})
(defn- generate-thumbnail!
[_ file]
(let [input (media/run {:cmd :info :input file})
thumb (media/run {:cmd :profile-thumbnail
:format :jpeg
:quality 85
@@ -303,7 +313,7 @@
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
[:process-image/global]])
(assoc ::climit/label "upload-photo")
(climit/invoke! generate-thumbnail file))]
(climit/invoke! generate-thumbnail! file))]
(sto/put-object! storage params)))
;; --- MUTATION: Request Email Change

View File

@@ -169,19 +169,12 @@
;; --- MUTATION: Create Project
(defn- create-project
[{:keys [::db/conn] :as cfg} {:keys [::rpc/request-at profile-id team-id] :as params}]
(assert (ct/inst? request-at) "expect request-at assigned")
(let [params (-> params
(assoc :created-at request-at)
(assoc :modified-at request-at))
project (teams/create-project conn params)
timestamp (::rpc/request-at params)]
[{:keys [::db/conn] :as cfg} {:keys [profile-id team-id] :as params}]
(let [project (teams/create-project conn params)]
(teams/create-project-role conn profile-id (:id project) :owner)
(db/insert! conn :team-project-profile-rel
{:project-id (:id project)
:profile-id profile-id
:created-at timestamp
:modified-at timestamp
:team-id team-id
:is-pinned false})
(assoc project :is-pinned false)))

View File

@@ -73,7 +73,7 @@
{:id (:id profile)}))
(-> claims
(rph/with-transform (session/create-fn cfg profile))
(rph/with-transform (session/create-fn cfg profile-id))
(rph/with-meta {::audit/name "verify-profile-email"
::audit/props (audit/profile->props profile)
::audit/profile-id (:id profile)}))))

View File

@@ -39,8 +39,9 @@
(defn- encode
[s]
(-> s
(bh/blake2b-256)
(bc/bytes->b64-str true)))
bh/blake2b-256
bc/bytes->b64u
bc/bytes->str))
(defn- fmt-key
[s]

View File

@@ -83,16 +83,3 @@
"A convenience allias for yetti.response/stream-body"
[f]
(yres/stream-body f))
(defn get-request
"Get http request from RPC params"
[params]
(assert (contains? params ::rpc/request-at) "rpc params required")
(-> (meta params)
(get ::http/request)))
(defn get-auth-data
"Get http auth-data from RPC params"
[params]
(-> (get-request params)
(get ::http/auth-data)))

View File

@@ -1,49 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.management.exporter
(:require
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.config :as cf]
[app.media :refer [schema:upload]]
[app.rpc :as-alias rpc]
[app.rpc.doc :as doc]
[app.storage :as sto]
[app.util.services :as sv]))
;; ---- RPC METHOD: UPLOAD-TEMPFILE
(def ^:private
schema:upload-tempfile-params
[:map {:title "upload-templfile-params"}
[:content schema:upload]])
(def ^:private
schema:upload-tempfile-result
[:map {:title "upload-templfile-result"}])
(sv/defmethod ::upload-tempfile
{::doc/added "2.12"
::sm/params schema:upload-tempfile-params
::sm/result schema:upload-tempfile-result}
[cfg {:keys [::rpc/profile-id content]}]
(let [storage (sto/resolve cfg)
hash (sto/calculate-hash (:path content))
data (-> (sto/content (:path content))
(sto/wrap-with-hash hash))
content {::sto/content data
::sto/deduplicate? true
::sto/touched-at (ct/in-future {:minutes 10})
:profile-id profile-id
:content-type (:mtype content)
:bucket "tempfile"}
object (sto/put-object! storage content)]
{:id (:id object)
:uri (-> (cf/get :public-uri)
(u/join "/assets/by-id/")
(u/join (str (:id object))))}))

View File

@@ -104,29 +104,28 @@
(def ^:private schema:limit
[:and
[:map
[::name :keyword]
[::name :any]
[::strategy schema:strategy]
[::key :string]
[::opts :string]
[::capacity {:optional true} ::sm/int]
[::rate {:optional true} ::sm/int]
[::interval {:optional true} ::ct/duration]
[::params {:optional true} [::sm/vec :any]]
[::permits {:optional true} ::sm/int]
[::unit {:optional true} [:enum :days :hours :minutes :seconds :weeks]]]
[:fn (fn [attrs]
(let [contains-fn (partial contains? attrs)]
(or (every? contains-fn [::capacity ::rate ::interval])
(every? contains-fn [::permits ::unit]))))]])
[::opts :string]]
[:or
[:map
[::capacity ::sm/int]
[::rate ::sm/int]
[::internal ::ct/duration]
[::params [::sm/vec :any]]]
[:map
[::nreq ::sm/int]
[::unit [:enum :days :hours :minutes :seconds :weeks]]]]])
(def ^:private schema:limits
[:map-of :keyword [::sm/vec schema:limit]])
(def ^:private valid-limit-tuple?
(sm/validator schema:limit-tuple))
(sm/lazy-validator schema:limit-tuple))
(def ^:private valid-rlimit-instance?
(sm/validator ::rpc/rlimit))
(sm/lazy-validator ::rpc/rlimit))
(defmethod parse-limit :window
[[name strategy opts :as vlimit]]
@@ -135,16 +134,16 @@
(merge
{::name name
::strategy strategy}
(if-let [[_ permits unit] (re-find window-opts-re opts)]
(let [permits (parse-long permits)]
{::permits permits
(if-let [[_ nreq unit] (re-find window-opts-re opts)]
(let [nreq (parse-long nreq)]
{::nreq nreq
::unit (case unit
"d" :days
"h" :hours
"m" :minutes
"s" :seconds
"w" :weeks)
::key (str "penpot.rlimit." (cf/get :tenant) ".window." (d/name name))
::key (str "ratelimit.window." (d/name name))
::opts opts})
(ex/raise :type :validation
:code :invalid-window-limit-opts
@@ -165,15 +164,15 @@
::interval interval
::opts opts
::params [(->seconds interval) rate capacity]
::key (str "penpot.rlimit." (cf/get :tenant) ".bucket." (d/name name))})
::key (str "ratelimit.bucket." (d/name name))})
(ex/raise :type :validation
:code :invalid-bucket-limit-opts
:hint (str/ffmt "looks like '%' does not have a valid format" opts))))
(defmethod process-limit :bucket
[rconn profile-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
[rconn user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
(let [script (-> bucket-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id)])
(assoc ::rscript/keys [(str key "." service "." user-id)])
(assoc ::rscript/vals (conj params (->seconds now))))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
@@ -193,18 +192,18 @@
(assoc ::lresult/remaining remaining))))
(defmethod process-limit :window
[rconn profile-id now {:keys [::permits ::unit ::key ::service] :as limit}]
[rconn user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
(let [ts (ct/truncate now unit)
ttl (ct/diff now (ct/plus ts {unit 1}))
script (-> window-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." profile-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [permits (->seconds ttl)]))
(assoc ::rscript/keys [(str key "." service "." user-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [nreq (->seconds ttl)]))
result (rds/eval rconn script)
allowed? (boolean (nth result 0))
remaining (nth result 1)]
(l/trace :hint "limit processed"
:service service
:name (name (::name limit))
:limit (name (::name limit))
:strategy (name (::strategy limit))
:opts (::opts limit)
:allowed allowed?
@@ -215,8 +214,8 @@
(assoc ::lresult/reset (ct/plus ts {unit 1})))))
(defn- process-limits
[rconn profile-id limits now]
(let [results (into [] (map (partial process-limit rconn profile-id now)) limits)
[rconn user-id limits now]
(let [results (into [] (map (partial process-limit rconn user-id now)) limits)
remaining (->> results
(d/index-by ::name ::lresult/remaining)
(uri/map->query-string))
@@ -228,7 +227,7 @@
(when rejected
(l/warn :hint "rejected rate limit"
:profile-id (str profile-id)
:user-id (str user-id)
:limit-service (-> rejected ::service name)
:limit-name (-> rejected ::name name)
:limit-strategy (-> rejected ::strategy name)))
@@ -372,9 +371,12 @@
(defn- on-refresh-error
[_ cause]
(when-not (instance? java.util.concurrent.RejectedExecutionException cause)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true)))
(if-let [explain (-> cause ex-data ex/explain)]
(l/warn ::l/raw (str "unable to refresh config, invalid format:\n" explain)
::l/sync? true)
(l/warn :hint "unexpected exception on loading config"
:cause cause
::l/sync? true))))
(defn- get-config-path
[]

View File

@@ -25,9 +25,9 @@ local allowed = filled >= requested
local newTokens = filled
if allowed then
newTokens = filled - requested
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
end
redis.call("hset", tokensKey, "tokens", newTokens, "timestamp", timestamp)
redis.call("expire", tokensKey, ttl)
return { allowed, newTokens }

View File

@@ -22,7 +22,8 @@
(defn- generate-random-key
[]
(-> (bn/random-bytes 64)
(bc/bytes->b64-str true)))
(bc/bytes->b64u)
(bc/bytes->str)))
(defn- get-all-props
[conn]
@@ -84,11 +85,12 @@
(l/warn :hint (str "using autogenerated secret-key, it will change on each restart and will invalidate "
"all sessions on each restart, it is highly recommended setting up the "
"PENPOT_SECRET_KEY environment variable")))
(let [secret (or key (generate-random-key))]
(-> (get-all-props conn)
(assoc :secret-key secret)
(assoc :tokens-key (keys/derive secret :salt "tokens"))
(assoc :management-key (keys/derive secret :salt "management"))
(update :instance-id handle-instance-id conn (db/read-only? pool)))))))
(sm/register! ::props [:map-of :keyword ::sm/any])
;; FIXME
(sm/register! ::props :any)

View File

@@ -8,13 +8,13 @@
"Keys derivation service."
(:refer-clojure :exclude [derive])
(:require
[app.common.spec :as us]
[buddy.core.kdf :as bk]))
(defn derive
"Derive a key from secret-key"
[secret-key & {:keys [salt size] :or {size 32}}]
(assert (string? secret-key) "expect string")
(assert (seq secret-key) "expect string")
(us/assert! ::us/not-empty-string secret-key)
(let [engine (bk/engine {:key secret-key
:salt salt
:alg :hkdf

View File

@@ -61,8 +61,8 @@
:is-active is-active
:password password
:props {}}]
(->> (cmd.auth/create-profile system params)
(cmd.auth/create-profile-rels conn)))))))
(->> (cmd.auth/create-profile! conn params)
(cmd.auth/create-profile-rels! conn)))))))
(defmethod exec-command "update-profile"
[{:keys [fullname email password is-active]}]

View File

@@ -25,7 +25,6 @@
[app.db.sql :as-alias sql]
[app.features.fdata :as fdata]
[app.features.file-snapshots :as fsnap]
[app.http.session :as session]
[app.loggers.audit :as audit]
[app.main :as main]
[app.msgbus :as mbus]
@@ -844,33 +843,10 @@
:deleted-at deleted-at
:id id})))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SSO
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn add-sso-config
[& {:keys [base-uri client-id client-secret domain]}]
(assert (and (string? base-uri) (str/starts-with? base-uri "http")) "expected a valid base-uri")
(assert (string? client-id) "expected a valid client-id")
(assert (string? client-secret) "expected a valid client-secret")
(assert (string? domain) "expected a valid domain")
(db/insert! main/system :sso-provider
{:id (uuid/next)
:type "oidc"
:client-id client-id
:client-secret client-secret
:domain domain
:base-uri base-uri}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MISC
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn decode-session-token
[token]
(session/decode-token main/system token))
(defn instrument-var
[var]
(alter-var-root var (fn [f]

View File

@@ -35,16 +35,12 @@
:assets-s3 :s3
nil)))
(def default-bucket
"file-media-object")
(def valid-buckets
#{"file-media-object"
"team-font-variant"
"file-object-thumbnail"
"file-thumbnail"
"profile"
"tempfile"
"file-data"
"file-data-fragment"
"file-change"})
@@ -167,6 +163,9 @@
backend
(:metadata result))))
(def ^:private sql:retrieve-storage-object
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
(defn row->storage-object [res]
(let [mdata (or (some-> (:metadata res) (db/decode-transit-pgobject)) {})]
(impl/storage-object
@@ -178,15 +177,9 @@
(keyword (:backend res))
mdata)))
(def ^:private sql:get-storage-object
"SELECT *
FROM storage_object
WHERE id = ?
AND (deleted_at IS NULL)")
(defn- get-database-object
(defn- retrieve-database-object
[conn id]
(some-> (db/exec-one! conn [sql:get-storage-object id])
(some-> (db/exec-one! conn [sql:retrieve-storage-object id])
(row->storage-object)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -209,7 +202,7 @@
(defn get-object
[{:keys [::db/connectable] :as storage} id]
(assert (valid-storage? storage))
(get-database-object connectable id))
(retrieve-database-object connectable id))
(defn put-object!
"Creates a new object with the provided content."

View File

@@ -37,6 +37,7 @@
(into #{} (map :id))
(not-empty))))
(def ^:private sql:delete-sobjects
"DELETE FROM storage_object
WHERE id = ANY(?::uuid[])")
@@ -76,37 +77,47 @@
(d/group-by (comp keyword :backend) :id #{} items))
(def ^:private sql:get-deleted-sobjects
"SELECT s.*
FROM storage_object AS s
"SELECT s.* FROM storage_object AS s
WHERE s.deleted_at IS NOT NULL
AND s.deleted_at <= ?
AND s.deleted_at < now() - ?::interval
ORDER BY s.deleted_at ASC")
(defn- get-buckets
[conn]
(let [now (ct/now)]
[conn min-age]
(let [age (db/interval min-age)]
(sequence
(comp (partition-all 25)
(mapcat group-by-backend))
(db/cursor conn [sql:get-deleted-sobjects now]))))
(db/cursor conn [sql:get-deleted-sobjects age]))))
(defn- clean-deleted!
[{:keys [::db/conn] :as cfg}]
[{:keys [::db/conn ::min-age] :as cfg}]
(reduce (fn [total [backend-id ids]]
(let [deleted (delete-in-bulk! cfg backend-id ids)]
(+ total (or deleted 0))))
0
(get-buckets conn)))
(get-buckets conn min-age)))
(defmethod ig/assert-key ::handler
[_ params]
(assert (sto/valid-storage? (::sto/storage params)) "expect valid storage")
(assert (db/pool? (::db/pool params)) "expect valid storage"))
(defmethod ig/expand-key ::handler
[k v]
{k (assoc v ::min-age (ct/duration {:hours 2}))})
(defmethod ig/init-key ::handler
[_ cfg]
(fn [_]
(db/tx-run! cfg (fn [cfg]
(let [total (clean-deleted! cfg)]
(l/inf :hint "task finished" :total total)
{:deleted total})))))
[_ {:keys [::min-age] :as cfg}]
(fn [{:keys [props] :as task}]
(let [min-age (ct/duration (or (:min-age props) min-age))]
(db/tx-run! cfg (fn [cfg]
(let [cfg (assoc cfg ::min-age min-age)
total (clean-deleted! cfg)]
(l/inf :hint "task finished"
:min-age (ct/format-duration min-age)
:total total)
{:deleted total}))))))

View File

@@ -22,10 +22,8 @@
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.storage :as sto]
[app.storage :as-alias sto]
[app.storage.impl :as impl]
[integrant.core :as ig]))
@@ -103,15 +101,14 @@
(def ^:private sql:mark-delete-in-bulk
"UPDATE storage_object
SET deleted_at = ?,
SET deleted_at = now(),
touched_at = NULL
WHERE id = ANY(?::uuid[])")
(defn- mark-delete-in-bulk!
[conn deletion-delay ids]
(let [ids (db/create-array conn "uuid" ids)
now (ct/plus (ct/now) deletion-delay)]
(db/exec-one! conn [sql:mark-delete-in-bulk now ids])))
[conn ids]
(let [ids (db/create-array conn "uuid" ids)]
(db/exec-one! conn [sql:mark-delete-in-bulk ids])))
;; NOTE: A getter that retrieves the key which will be used for group
;; ids; previously we have no value, then we introduced the
@@ -130,7 +127,7 @@
[{:keys [metadata]}]
(or (some-> metadata :bucket)
(some-> metadata :reference d/name)
sto/default-bucket))
"file-media-object"))
(defn- process-objects!
[conn has-refs? bucket objects]
@@ -140,20 +137,18 @@
(if-let [{:keys [id] :as object} (first objects)]
(if (has-refs? conn object)
(do
(l/dbg :id (str id)
:status "freeze"
:bucket bucket)
(l/debug :id (str id)
:status "freeze"
:bucket bucket)
(recur (conj to-freeze id) to-delete (rest objects)))
(do
(l/dbg :id (str id)
:status "delete"
:bucket bucket)
(l/debug :id (str id)
:status "delete"
:bucket bucket)
(recur to-freeze (conj to-delete id) (rest objects))))
(let [deletion-delay (if (= bucket "tempfile")
(ct/duration {:hours 2})
(cf/get-deletion-delay))]
(do
(some->> (seq to-freeze) (mark-freeze-in-bulk! conn))
(some->> (seq to-delete) (mark-delete-in-bulk! conn deletion-delay))
(some->> (seq to-delete) (mark-delete-in-bulk! conn))
[(count to-freeze) (count to-delete)]))))
(defn- process-bucket!
@@ -165,7 +160,6 @@
"file-thumbnail" (process-objects! conn has-file-thumbnails-refs? bucket objects)
"profile" (process-objects! conn has-profile-refs? bucket objects)
"file-data" (process-objects! conn has-file-data-refs? bucket objects)
"tempfile" (process-objects! conn (constantly false) bucket objects)
(ex/raise :type :internal
:code :unexpected-unknown-reference
:hint (dm/fmt "unknown reference '%'" bucket))))
@@ -179,27 +173,27 @@
[0 0]
(d/group-by lookup-bucket identity #{} chunk)))
(def ^:private sql:get-touched-storage-objects
(def ^:private
sql:get-touched-storage-objects
"SELECT so.*
FROM storage_object AS so
WHERE so.touched_at IS NOT NULL
AND so.touched_at <= ?
ORDER BY touched_at ASC
FOR UPDATE
SKIP LOCKED
LIMIT 10")
(defn get-chunk
[conn timestamp]
(->> (db/exec! conn [sql:get-touched-storage-objects timestamp])
[conn]
(->> (db/exec! conn [sql:get-touched-storage-objects])
(map impl/decode-row)
(not-empty)))
(defn- process-touched!
[{:keys [::db/pool ::timestamp] :as cfg}]
[{:keys [::db/pool] :as cfg}]
(loop [freezed 0
deleted 0]
(if-let [chunk (get-chunk pool timestamp)]
(if-let [chunk (get-chunk pool)]
(let [[nfo ndo] (db/tx-run! cfg process-chunk! chunk)]
(recur (long (+ freezed nfo))
(long (+ deleted ndo))))
@@ -215,6 +209,5 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [_]
(process-touched! (assoc cfg ::timestamp (ct/now)))))
(fn [_] (process-touched! cfg)))

View File

@@ -79,17 +79,14 @@
;; API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn tempfile*
[& {:keys [suffix prefix]
(defn tempfile
[& {:keys [suffix prefix min-age]
:or {prefix "penpot."
suffix ".tmp"}}]
(let [attrs (fs/make-permissions "rw-r--r--")
path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))]
(Files/createFile path attrs)))
(defn tempfile
[& {:keys [min-age] :as opts}]
(let [path (tempfile* opts)]
path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))
path (Files/createFile path attrs)]
(fs/delete-on-exit! path)
(sp/offer! queue [path (some-> min-age ct/duration)])
path))

View File

@@ -18,15 +18,15 @@
(def ^:private sql:get-profiles
"SELECT id, photo_id FROM profile
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-profiles!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-profiles timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-profiles deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id photo-id]}]
(l/trc :obj "profile" :id (str id))
@@ -41,15 +41,15 @@
(def ^:private sql:get-teams
"SELECT deleted_at, id, photo_id FROM team
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-teams!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-teams timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-teams deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id photo-id deleted-at]}]
(l/trc :obj "team"
:id (str id)
@@ -68,15 +68,15 @@
"SELECT id, team_id, deleted_at, woff1_file_id, woff2_file_id, otf_file_id, ttf_file_id
FROM team_font_variant
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-fonts!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-fonts timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-fonts deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id team-id deleted-at] :as font}]
(l/trc :obj "font-variant"
:id (str id)
@@ -98,15 +98,15 @@
"SELECT id, deleted_at, team_id
FROM project
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-projects!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-projects timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-projects deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id team-id deleted-at]}]
(l/trc :obj "project"
:id (str id)
@@ -124,15 +124,15 @@
f.project_id
FROM file AS f
WHERE f.deleted_at IS NOT NULL
AND f.deleted_at <= ?
AND f.deleted_at < now() + ?::interval
ORDER BY f.deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-files!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-files timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-files deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
(l/trc :obj "file"
:id (str id)
@@ -148,15 +148,15 @@
"SELECT file_id, revn, media_id, deleted_at
FROM file_thumbnail
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn delete-file-thumbnails!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-thumbnails timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
(l/trc :obj "file-thumbnail"
:file-id (str file-id)
@@ -175,15 +175,15 @@
"SELECT file_id, object_id, media_id, deleted_at
FROM file_tagged_object_thumbnail
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn delete-file-object-thumbnails!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-object-thumbnails timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-object-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
(l/trc :obj "file-object-thumbnail"
:file-id (str file-id)
@@ -203,15 +203,15 @@
"SELECT id, file_id, media_id, thumbnail_id, deleted_at
FROM file_media_object
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-media-objects!
[{:keys [::db/conn ::timestamp ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-media-objects timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-media-objects deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
(l/trc :obj "file-media-object"
:id (str id)
@@ -231,15 +231,16 @@
"SELECT file_id, id, type, deleted_at, metadata, backend
FROM file_data
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-data!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-data timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-data deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id id type deleted-at metadata backend]}]
(some->> metadata
@@ -265,15 +266,15 @@
"SELECT id, file_id, deleted_at
FROM file_change
WHERE deleted_at IS NOT NULL
AND deleted_at <= ?
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-changes!
[{:keys [::db/conn ::timestamp ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-change timestamp chunk-size] {:fetch-size 5})
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-change deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
(l/trc :obj "file-change"
:id (str id)
@@ -321,8 +322,9 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [_]
(let [cfg (assoc cfg ::timestamp (ct/now))]
(fn [{:keys [props] :as task}]
(let [threshold (ct/duration (get props :deletion-threshold 0))
cfg (assoc cfg ::deletion-threshold (db/interval threshold))]
(loop [procs (map deref deletion-proc-vars)
total 0]
(if-let [proc-fn (first procs)]

View File

@@ -15,25 +15,19 @@
[buddy.sign.jwe :as jwe]))
(defn generate
([cfg claims] (generate cfg claims nil))
([{:keys [::setup/props] :as cfg} claims header]
(assert (contains? props :tokens-key) "expect props to have tokens-key")
[{:keys [::setup/props] :as cfg} claims]
(assert (contains? cfg ::setup/props))
(let [tokens-key
(get props :tokens-key)
(let [tokens-key
(get props :tokens-key)
payload
(-> claims
(update :iat (fn [v] (or v (ct/now))))
(d/without-nils)
(t/encode))]
payload
(-> claims
(update :iat (fn [v] (or v (ct/now))))
(d/without-nils)
(t/encode))]
(jwe/encrypt payload tokens-key {:alg :a256kw :enc :a256gcm :header header}))))
(defn decode-header
[token]
(ex/ignoring
(jwe/decode-header token)))
(jwe/encrypt payload tokens-key {:alg :a256kw :enc :a256gcm})))
(defn decode
[{:keys [::setup/props] :as cfg} token]

View File

@@ -27,7 +27,7 @@
(throw (IllegalArgumentException. "Missing arguments on `defmethod` macro.")))
(let [mdata (assoc mdata
::docstring (some-> docs str/unindent)
::docstring (some-> docs str/<<-)
::spec sname
::name (name sname))

View File

@@ -7,17 +7,9 @@
(ns app.util.template
(:require
[app.common.exceptions :as ex]
[cuerdas.core :as str]
[selmer.filters :as sf]
[selmer.parser :as sp]))
;; (sp/cache-off!)
(sf/add-filter! :abbreviate
(fn [s n]
(let [n (parse-long n)]
(str/abbreviate s n))))
(sp/cache-off!)
(defn render
[path context]

View File

@@ -137,34 +137,33 @@ RETURNING task.id, task.queue")
::wait)))
(run-batch []
(try
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(finally
(.close ^AutoCloseable rconn))))
(let [rconn (rds/connect cfg)]
(try
(-> cfg
(assoc ::rds/conn rconn)
(db/tx-run! run-batch'))
(catch InterruptedException cause
(throw cause))
(catch InterruptedException cause
(throw cause))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(catch Exception cause
(cond
(rds/exception? cause)
(do
(l/wrn :hint "redis exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
(db/sql-exception? cause)
(do
(l/wrn :hint "database exception (will retry in an instant)" :cause cause)
(px/sleep timeout))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))
:else
(do
(l/err :hint "unhandled exception (will retry in an instant)" :cause cause)
(px/sleep timeout))))))
(finally
(.close ^AutoCloseable rconn)))))
(dispatcher []
(l/inf :hint "started")
@@ -177,7 +176,7 @@ RETURNING task.id, task.queue")
(catch InterruptedException _
(l/trc :hint "interrupted"))
(catch Throwable cause
(l/err :hint "unexpected exception" :cause cause))
(l/err :hint " unexpected exception" :cause cause))
(finally
(l/inf :hint "terminated"))))]

View File

@@ -30,7 +30,6 @@
[app.rpc.commands.files :as files]
[app.rpc.commands.files-create :as files.create]
[app.rpc.commands.files-update :as files.update]
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
[app.rpc.helpers :as rph]
[app.util.blob :as blob]
@@ -105,8 +104,13 @@
(assoc-in [:app.rpc/methods :app.setup/templates] templates)
(dissoc :app.srepl/server
:app.http/server
:app.http/route
:app.http/router
:app.auth.oidc.providers/google
:app.auth.oidc.providers/gitlab
:app.auth.oidc.providers/github
:app.auth.oidc.providers/generic
:app.setup/templates
:app.auth.oidc/routes
:app.http.oauth/handler
:app.notifications/handler
:app.loggers.mattermost/reporter
@@ -178,25 +182,23 @@
:is-demo false}
params)]
(db/run! system
(fn [{:keys [::db/conn] :as cfg}]
(fn [{:keys [::db/conn]}]
(->> params
(cmd.auth/create-profile cfg)
(cmd.auth/create-profile-rels conn)))))))
(cmd.auth/create-profile! conn)
(cmd.auth/create-profile-rels! conn)))))))
(defn create-project*
([i params] (create-project* *system* i params))
([system i {:keys [profile-id team-id] :as params}]
(us/assert uuid? profile-id)
(us/assert uuid? team-id)
(assert (uuid? profile-id))
(assert (uuid? team-id))
(let [timestamp (ct/now)]
(db/run! system
(fn [cfg]
(->> (merge {:id (mk-uuid "project" i)
:name (str "project" i)}
params
{::rpc/request-at timestamp})
(#'projects/create-project cfg)))))))
(db/run! system
(fn [{:keys [::db/conn]}]
(->> (merge {:id (mk-uuid "project" i)
:name (str "project" i)}
params)
(#'teams/create-project conn))))))
(defn create-file*
([i params]

View File

@@ -22,6 +22,17 @@
(t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset)
(t/deftest authenticate-method
(let [profile (th/create-profile* 1)
token (#'sess/gen-token th/*system* {:profile-id (:id profile)})
request {:params {:token token}}
response (#'mgmt/authenticate th/*system* request)]
(t/is (= 200 (::yres/status response)))
(t/is (= "authentication" (-> response ::yres/body :iss)))
(t/is (= (:id profile) (-> response ::yres/body :uid)))))
(t/deftest get-customer-method
(let [profile (th/create-profile* 1)
request {:params {:id (:id profile)}}
@@ -78,3 +89,7 @@
(let [subs' (-> response ::yres/body :subscription)]
(t/is (= subs' subs))))))

View File

@@ -8,8 +8,8 @@
(:require
[app.common.time :as ct]
[app.db :as db]
[app.http :as-alias http]
[app.http.access-token]
[app.http.auth :as-alias auth]
[app.http.middleware :as mw]
[app.http.session :as session]
[app.main :as-alias main]
@@ -42,14 +42,13 @@
(handler (->DummyRequest {} {}))
(t/is (nil? (::http/auth-data @request)))
(t/is (nil? (::auth/token-type @request)))
(t/is (nil? (::auth/token @request)))
(handler (->DummyRequest {"authorization" "Token aaaa"} {}))
(let [{:keys [token claims] token-type :type} (get @request ::http/auth-data)]
(t/is (= :token token-type))
(t/is (= "aaaa" token))
(t/is (nil? claims)))))
(t/is (= :token (::auth/token-type @request)))
(t/is (= "aaaa" (::auth/token @request)))))
(t/deftest auth-middleware-2
(let [request (volatile! nil)
@@ -58,14 +57,16 @@
{})]
(handler (->DummyRequest {} {}))
(t/is (nil? (::http/auth-data @request)))
(t/is (nil? (::auth/token-type @request)))
(t/is (nil? (::auth/token @request)))
(t/is (nil? (::auth/claims @request)))
(handler (->DummyRequest {"authorization" "Bearer aaaa"} {}))
(let [{:keys [token claims] token-type :type} (get @request ::http/auth-data)]
(t/is (= :bearer token-type))
(t/is (= "aaaa" token))
(t/is (nil? claims)))))
(t/is (= :bearer (::auth/token-type @request)))
(t/is (= "aaaa" (::auth/token @request)))
(t/is (nil? (::auth/claims @request)))))
(t/deftest auth-middleware-3
(let [request (volatile! nil)
@@ -74,14 +75,35 @@
{})]
(handler (->DummyRequest {} {}))
(t/is (nil? (::http/auth-data @request)))
(t/is (nil? (::auth/token-type @request)))
(t/is (nil? (::auth/token @request)))
(t/is (nil? (::auth/claims @request)))
(handler (->DummyRequest {} {"auth-token" "foobar"}))
(let [{:keys [token claims] token-type :type} (get @request ::http/auth-data)]
(t/is (= :cookie token-type))
(t/is (= "foobar" token))
(t/is (nil? claims)))))
(t/is (= :cookie (::auth/token-type @request)))
(t/is (= "foobar" (::auth/token @request)))
(t/is (nil? (::auth/claims @request)))))
(t/deftest auth-middleware-4
(let [request (volatile! nil)
handler (#'app.http.middleware/wrap-auth
(fn [req] (vreset! request req))
{:cookie (fn [_] "foobaz")})]
(handler (->DummyRequest {} {}))
(t/is (nil? (::auth/token-type @request)))
(t/is (nil? (::auth/token @request)))
(t/is (nil? (::auth/claims @request)))
(handler (->DummyRequest {} {"auth-token" "foobar"}))
(t/is (= :cookie (::auth/token-type @request)))
(t/is (= "foobar" (::auth/token @request)))
(t/is (delay? (::auth/claims @request)))
(t/is (= "foobaz" (-> @request ::auth/claims deref)))))
(t/deftest shared-key-auth
(let [handler (#'app.http.middleware/wrap-shared-key-auth
@@ -100,36 +122,40 @@
(t/deftest access-token-authz
(let [profile (th/create-profile* 1)
token (db/tx-run! th/*system* app.rpc.commands.access-token/create-access-token (:id profile) "test" nil)
handler (#'app.http.access-token/wrap-authz identity th/*system*)]
request (volatile! {})
(let [response (handler nil)]
(t/is (nil? response)))
handler (#'app.http.access-token/wrap-authz
(fn [req] (vreset! request req))
th/*system*)]
(let [response (handler {::http/auth-data {:type :token :token "foobar" :claims {:tid (:id token)}}})]
(t/is (= #{} (:app.http.access-token/perms response)))
(t/is (= (:id profile) (:app.http.access-token/profile-id response))))))
(handler nil)
(t/is (nil? @request))
(handler {::auth/claims (delay {:tid (:id token)})
::auth/token-type :token})
(t/is (= #{} (:app.http.access-token/perms @request)))
(t/is (= (:id profile) (:app.http.access-token/profile-id @request)))))
(t/deftest session-authz
(let [cfg th/*system*
manager (session/inmemory-manager)
profile (th/create-profile* 1)
handler (-> (fn [req] req)
(#'session/wrap-authz {::session/manager manager})
(#'mw/wrap-auth {:bearer (partial session/decode-token cfg)
:cookie (partial session/decode-token cfg)}))
(let [manager (session/inmemory-manager)
profile (th/create-profile* 1)
handler (-> (fn [req] req)
(#'session/wrap-authz {::session/manager manager})
(#'mw/wrap-auth {}))]
session (->> (session/create-session manager {:profile-id (:id profile)
:user-agent "user agent"})
(#'session/assign-token cfg))
response (handler (->DummyRequest {} {"auth-token" (:token session)}))
(let [response (handler (->DummyRequest {} {"auth-token" "foobar"}))]
(t/is (= :cookie (::auth/token-type response)))
(t/is (= "foobar" (::auth/token response))))
{:keys [token claims] token-type :type}
(get response ::http/auth-data)]
(t/is (= :cookie token-type))
(t/is (= (:token session) token))
(t/is (= "authentication" (:iss claims)))
(t/is (= "penpot" (:aud claims)))
(t/is (= (:id session) (:sid claims)))
(t/is (= (:id profile) (:uid claims)))))
(session/write! manager "foobar" {:profile-id (:id profile)
:user-agent "user agent"
:created-at (ct/now)})
(let [response (handler (->DummyRequest {} {"auth-token" "foobar"}))]
(t/is (= :cookie (::auth/token-type response)))
(t/is (= "foobar" (::auth/token response)))
(t/is (= (:id profile) (::session/profile-id response)))
(t/is (= "foobar" (::session/id response))))))

View File

@@ -9,7 +9,6 @@
[app.common.features :as cfeat]
[app.common.pprint :as pp]
[app.common.thumbnails :as thc]
[app.common.time :as ct]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -17,7 +16,6 @@
[app.db.sql :as sql]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -134,10 +132,9 @@
;; this will run pending task triggered by deleting user snapshot
(th/run-pending-tasks!)
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
;; delete 2 snapshots and 2 file data entries
(t/is (= 4 (:processed res)))))))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
;; delete 2 snapshots and 2 file data entries
(t/is (= 4 (:processed res))))))))
(t/deftest snapshots-locking
(let [profile-1 (th/create-profile* 1 {:is-active true})

View File

@@ -313,7 +313,7 @@
;; freeze because of the deduplication (we have uploaded 2 times
;; the same files).
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -372,14 +372,14 @@
(th/db-exec! ["update file_change set deleted_at = now() where file_id = ? and label is not null" (:id file)])
(th/db-exec! ["update file set has_media_trimmed = false where id = ?" (:id file)])
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
;; this will remove the file change and file data entries for two snapshots
(t/is (= 4 (:processed res))))
;; Rerun the file-gc and objects-gc
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
;; this will remove the file media objects marked as deleted
;; on prev file-gc
(t/is (= 2 (:processed res))))
@@ -387,7 +387,7 @@
;; Now that file-gc have deleted the file-media-object usage,
;; lets execute the touched-gc task, we should see that two of
;; them are marked to be deleted
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
@@ -572,7 +572,7 @@
;; Now that file-gc have deleted the file-media-object usage,
;; lets execute the touched-gc task, we should see that two of
;; them are marked to be deleted.
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
@@ -665,7 +665,7 @@
;; because of the deduplication (we have uploaded 2 times the
;; same files).
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -715,7 +715,7 @@
;; Now that objects-gc have deleted the object thumbnail lets
;; execute the touched-gc task
(let [res (th/run-task! "storage-gc-touched" {})]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 1 (:freeze res))))
;; check file media objects
@@ -750,7 +750,7 @@
;; Now that file-gc have deleted the object thumbnail lets
;; execute the touched-gc task
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:delete res))))
;; check file media objects
@@ -922,9 +922,8 @@
(t/is (= 0 (:processed result))))
;; run permanent deletion
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 3 (:processed result)))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 3 (:processed result))))
;; query the list of file libraries of a after hard deletion
(let [data {::th/type :get-file-libraries
@@ -1135,7 +1134,7 @@
(th/sleep 300)
;; run the task
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
;; check that object thumbnails are still here
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id (:id file)})]
@@ -1164,7 +1163,7 @@
(t/is (= 2 (count rows))))
;; run the task again
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
;; check that we have all object thumbnails
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id (:id file)})]
@@ -1227,7 +1226,7 @@
(t/is (= 2 (count rows)))))
(t/testing "gc task"
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
(let [rows (th/db-query :file-thumbnail {:file-id (:id file)})]
(t/is (= 2 (count rows)))
@@ -1274,7 +1273,7 @@
;; The FileGC task will schedule an inner taskq
(th/run-pending-tasks!)
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -1368,7 +1367,7 @@
;; we ensure that once object-gc is passed and marked two storage
;; objects to delete
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
@@ -1490,7 +1489,7 @@
(t/is (some? (not-empty (:objects component))))))
;; Re-run the file-gc task
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
(let [row (th/db-get :file {:id (:id file)})]
(t/is (true? (:has-media-trimmed row))))
@@ -1520,7 +1519,7 @@
;; Now, we have deleted the usage of component if we pass file-gc,
;; that component should be deleted
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
;; Check that component is properly removed
(let [data {::th/type :get-file
@@ -1611,8 +1610,8 @@
:component-id c-id})}])
;; Run the file-gc on file and library
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-2)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-2)})))
;; Check that component exists
(let [data {::th/type :get-file
@@ -1685,7 +1684,7 @@
;; Now, we have deleted the usage of component if we pass file-gc,
;; that component should be deleted
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
;; Check that component is properly removed
(let [data {::th/type :get-file
@@ -1834,8 +1833,8 @@
(t/is (not= (:id fill) (:id fmedia)))))
;; Run the file-gc on file and library
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-2)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-2)})))
;; Now proceed to delete file and absorb it
(let [data {::th/type :delete-file
@@ -1926,7 +1925,7 @@
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
(t/is (= (:deleted-at row) now)))))))
(t/deftest restore-deleted-files
(t/deftest deleted-files-restore
(let [prof (th/create-profile* 1 {:is-active true})
team-id (:default-team-id prof)
proj-id (:default-project-id prof)
@@ -1989,78 +1988,3 @@
(let [row (th/db-exec-one! ["select * from file where id = ?" file-id])]
(t/is (nil? (:deleted-at row)))))))
(t/deftest restore-deleted-files-and-projets
(let [profile (th/create-profile* 1 {:is-active true})
team-id (:default-team-id profile)
now (ct/inst "2025-10-31T00:00:00Z")]
(binding [ct/*clock* (clock/fixed now)]
(let [project (th/create-project* 1 {:profile-id (:id profile)
:team-id team-id})
file (th/create-file* 1 {:profile-id (:id profile)
:project-id (:id project)})
data {::th/type :delete-project
:id (:id project)
::rpc/profile-id (:id profile)}
out (th/command! data)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(t/is (nil? (:result out)))
(th/run-pending-tasks!)
;; get deleted files
(let [data {::th/type :get-team-deleted-files
::rpc/profile-id (:id profile)
:team-id team-id}
out (th/command! data)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [[row1 :as result] (:result out)]
(t/is (= 1 (count result)))
(t/is (= (:will-be-deleted-at row1) #penpot/inst "2025-11-07T00:00:00Z"))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (= (:modified-at row1) #penpot/inst "2025-10-31T00:00:00Z"))))
;; Check if project is deleted
(let [[row1 :as rows] (th/db-query :project {:id (:id project)})]
;; (pp/pprint rows)
(t/is (= 1 (count rows)))
(t/is (= (:deleted-at row1) #penpot/inst "2025-11-07T00:00:00Z"))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (= (:modified-at row1) #penpot/inst "2025-10-31T00:00:00Z")))
;; Restore files
(let [data {::th/type :restore-deleted-team-files
::rpc/profile-id (:id profile)
:team-id team-id
:ids #{(:id file)}}
out (th/command! data)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [result (:result out)]
(t/is (fn? result))
(let [events (th/consume-sse result)]
;; (pp/pprint events)
(t/is (= 2 (count events)))
(t/is (= :end (first (last events))))
(t/is (= (:ids data) (last (last events)))))))
(let [[row1 :as rows] (th/db-query :file {:project-id (:id project)})]
;; (pp/pprint rows)
(t/is (= 1 (count rows)))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (nil? (:deleted-at row1))))
;; Check if project is restored
(let [[row1 :as rows] (th/db-query :project {:id (:id project)})]
;; (pp/pprint rows)
(t/is (= 1 (count rows)))
(t/is (= (:created-at row1) #penpot/inst "2025-10-31T00:00:00Z"))
(t/is (nil? (:deleted-at row1))))))))

View File

@@ -8,14 +8,12 @@
(:require
[app.common.pprint :as pp]
[app.common.thumbnails :as thc]
[app.common.time :as ct]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.rpc.commands.auth :as cauth]
[app.setup.clock :as clock]
[app.storage :as sto]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
@@ -85,8 +83,7 @@
(t/is (map? (:result out))))
;; run the task again
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! "storage-gc-touched" {}))]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 2 (:freeze res))))
(let [[row1 row2 :as rows] (th/db-query :file-tagged-object-thumbnail
@@ -117,9 +114,9 @@
;; Run the File GC task that should remove unused file object
;; thumbnails
(th/run-task! :file-gc {:file-id (:id file)})
(th/run-task! :file-gc {:min-age 0 :file-id (:id file)})
(let [result (th/run-task! :objects-gc {})]
(let [result (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 3 (:processed result))))
;; check if row2 related thumbnail row still exists
@@ -136,8 +133,7 @@
(t/is (some? (sto/get-object storage (:media-id row2))))
;; run the task again
(let [res (binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 31}))]
(th/run-task! :storage-gc-touched {}))]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:delete res)))
(t/is (= 0 (:freeze res))))
@@ -147,9 +143,8 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(let [result (th/run-task! :storage-gc-deleted {:min-age 0})]
(t/is (= 1 (:deleted result))))
(t/is (nil? (sto/get-object storage (:media-id row1))))
(t/is (some? (sto/get-object storage (:media-id row2))))
@@ -221,9 +216,9 @@
;; Run the File GC task that should remove unused file object
;; thumbnails
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
(let [result (th/run-task! :objects-gc {})]
(let [result (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 2 (:processed result))))
;; check if row1 related thumbnail row still exists
@@ -235,7 +230,7 @@
(t/is (= (:object-id data1) (:object-id row)))
(t/is (uuid? (:media-id row1))))
(let [result (th/run-task! :storage-gc-touched {})]
(let [result (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 1 (:delete result))))
;; Check if storage objects still exists after file-gc
@@ -247,9 +242,8 @@
;; Run the storage gc deleted task, it should permanently delete
;; all storage objects related to the deleted thumbnails
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted result)))))
(let [result (th/run-task! :storage-gc-deleted {:min-age 0})]
(t/is (= 1 (:deleted result))))
(t/is (some? (sto/get-object storage (:media-id row2)))))))

View File

@@ -6,13 +6,11 @@
(ns backend-tests.rpc-font-test
(:require
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -131,7 +129,7 @@
;; (th/print-result! out)
(t/is (nil? (:error out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 6 (:freeze res))))
(let [params {::th/type :delete-font
@@ -143,17 +141,16 @@
(t/is (nil? (:error out)))
(t/is (nil? (:result out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed res))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 2 (:processed res))))
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 6 (:delete res)))))))
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 6 (:delete res))))))
(t/deftest font-deletion-2
(let [prof (th/create-profile* 1 {:is-active true})
@@ -192,7 +189,7 @@
;; (th/print-result! out)
(t/is (nil? (:error out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 6 (:freeze res))))
(let [params {::th/type :delete-font
@@ -204,17 +201,16 @@
(t/is (nil? (:error out)))
(t/is (nil? (:result out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res)))))))
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res))))))
(t/deftest font-deletion-3
(let [prof (th/create-profile* 1 {:is-active true})
@@ -252,7 +248,7 @@
(t/is (nil? (:error out1)))
(t/is (nil? (:error out2)))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 6 (:freeze res))))
(let [params {::th/type :delete-font-variant
@@ -264,14 +260,13 @@
(t/is (nil? (:error out)))
(t/is (nil? (:result out))))
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [res (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 1 (:processed res))))
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res)))))))
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 3 (:delete res))))))

View File

@@ -6,13 +6,11 @@
(ns backend-tests.rpc-project-test
(:require
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[backend-tests.helpers :as th]
[clojure.test :as t]))
@@ -228,9 +226,8 @@
(t/is (= 0 (count result)))))
;; run permanent deletion
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 1 (:processed result)))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 1 (:processed result))))
;; query the list of files of a after hard deletion
(let [data {::th/type :get-project-files

View File

@@ -13,7 +13,6 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[app.tokens :as tokens]
[backend-tests.helpers :as th]
@@ -526,9 +525,8 @@
(t/is (= :not-found (:type edata)))))
;; run permanent deletion
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 2 (:processed result)))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 2 (:processed result))))
;; query the list of projects of a after hard deletion
(let [data {::th/type :get-projects
@@ -583,9 +581,8 @@
(t/is (= 1 (count rows)))
(t/is (ct/inst? (:deleted-at (first rows)))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:days 8}))]
(let [result (th/run-task! :objects-gc {})]
(t/is (= 7 (:processed result)))))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 7 (:processed result))))))
(t/deftest create-team-access-request
(with-mocks [mock {:target 'app.email/send! :return nil}]

View File

@@ -11,7 +11,6 @@
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.setup.clock :as clock]
[app.storage :as sto]
[backend-tests.helpers :as th]
[clojure.test :as t]
@@ -54,13 +53,19 @@
(configure-storage-backend))
content (sto/content "content")
object (sto/put-object! storage {::sto/content content
::sto/expired-at (ct/in-future {:hours 1})
::sto/expired-at (ct/in-future {:seconds 1})
:content-type "text/plain"})]
(t/is (sto/object? object))
(t/is (ct/inst? (:expired-at object)))
(t/is (ct/is-after? (:expired-at object) (ct/now)))
(t/is (nil? (sto/get-object storage (:id object))))))
(t/is (= object (sto/get-object storage (:id object))))
(th/sleep 1000)
(t/is (nil? (sto/get-object storage (:id object))))
(t/is (nil? (sto/get-object-data storage object)))
(t/is (nil? (sto/get-object-url storage object)))
(t/is (nil? (sto/get-object-path storage object)))))
(t/deftest put-and-delete-object
(let [storage (-> (:app.storage/storage th/*system*)
@@ -93,25 +98,20 @@
::sto/expired-at (ct/now)
:content-type "text/plain"})
object2 (sto/put-object! storage {::sto/content content2
::sto/expired-at (ct/in-future {:hours 2})
::sto/expired-at (ct/in-past {:hours 2})
:content-type "text/plain"})
object3 (sto/put-object! storage {::sto/content content3
::sto/expired-at (ct/in-future {:hours 1})
::sto/expired-at (ct/in-past {:hours 1})
:content-type "text/plain"})]
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 0}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(th/sleep 200)
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res))))
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
(t/is (= 2 (:count res))))
(binding [ct/*clock* (clock/fixed (ct/in-future {:minutes 61}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 1 (:deleted res)))))
(let [res (th/db-exec-one! ["select count(*) from storage_object;"])]
(t/is (= 1 (:count res))))))
(t/is (= 2 (:count res))))))
(t/deftest touched-gc-task-1
(let [storage (-> (:app.storage/storage th/*system*)
@@ -158,7 +158,7 @@
{:id (:id result-1)})
;; run the objects gc task for permanent deletion
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 1 (:processed res))))
;; check that we still have all the storage objects
@@ -182,6 +182,7 @@
(let [res (th/db-exec-one! ["select count(*) from storage_object where deleted_at is not null"])]
(t/is (= 0 (:count res)))))))
(t/deftest touched-gc-task-2
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
@@ -242,12 +243,11 @@
{:id (:id result-2)})
;; run the objects gc task for permanent deletion
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! :objects-gc {:min-age 0})]
(t/is (= 1 (:processed res))))
;; revert touched state to all storage objects
(th/db-exec-one! ["update storage_object set touched_at=?" (ct/now)])
(th/db-exec-one! ["update storage_object set touched_at=now()"])
;; Run the task again
(let [res (th/run-task! :storage-gc-touched {})]
@@ -293,10 +293,10 @@
result-2 (:result out2)]
;; now we proceed to manually mark all storage objects touched
(th/db-exec! ["update storage_object set touched_at=?" (ct/now)])
(th/db-exec! ["update storage_object set touched_at=now()"])
;; run the touched gc task
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res))))
@@ -305,48 +305,16 @@
(t/is (= 2 (count rows)))))
;; now we proceed to manually delete all file_media_object
(th/db-exec! ["update file_media_object set deleted_at = ?" (ct/now)])
(th/db-exec! ["update file_media_object set deleted_at = now()"])
(let [res (th/run-task! :objects-gc {})]
(let [res (th/run-task! "objects-gc" {:min-age 0})]
(t/is (= 2 (:processed res))))
;; run the touched gc task
(let [res (th/run-task! :storage-gc-touched {})]
(let [res (th/run-task! "storage-gc-touched" {:min-age 0})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
;; check that we have all no objects
(let [rows (th/db-exec! ["select * from storage_object where deleted_at is null"])]
(t/is (= 0 (count rows))))))
(t/deftest tempfile-bucket-test
(let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend))
content1 (sto/content "content1")
now (ct/now)
object1 (sto/put-object! storage {::sto/content content1
::sto/touched-at (ct/plus now {:minutes 1})
:bucket "tempfile"
:content-type "text/plain"})]
(binding [ct/*clock* (clock/fixed now)]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 0 (:delete res)))))
(binding [ct/*clock* (clock/fixed (ct/plus now {:minutes 1}))]
(let [res (th/run-task! :storage-gc-touched {})]
(t/is (= 0 (:freeze res)))
(t/is (= 1 (:delete res)))))
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 1}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))
(binding [ct/*clock* (clock/fixed (ct/plus now {:hours 2}))]
(let [res (th/run-task! :storage-gc-deleted {})]
(t/is (= 0 (:deleted res)))))))

View File

@@ -17,7 +17,7 @@
org.slf4j/slf4j-api {:mvn/version "2.0.17"}
pl.tkowalcz.tjahzi/log4j2-appender {:mvn/version "0.9.40"}
selmer/selmer {:mvn/version "1.12.69"}
selmer/selmer {:mvn/version "1.12.62"}
criterium/criterium {:mvn/version "0.4.6"}
metosin/jsonista {:mvn/version "0.3.13"}
@@ -30,7 +30,7 @@
integrant/integrant {:mvn/version "1.0.0"}
funcool/tubax {:mvn/version "2021.05.20-0"}
funcool/cuerdas {:mvn/version "2026.415"}
funcool/cuerdas {:mvn/version "2025.06.16-414"}
funcool/promesa
{:git/sha "46048fc0d4bf5466a2a4121f5d52aefa6337f2e8"
:git/url "https://github.com/funcool/promesa"}
@@ -48,8 +48,12 @@
com.sun.mail/jakarta.mail {:mvn/version "2.0.2"}
org.la4j/la4j {:mvn/version "0.6.0"}
;; exception printing
fipp/fipp {:mvn/version "0.6.29"}
me.flowthing/pp {:mvn/version "2024-11-13.77"}
io.aviso/pretty {:mvn/version "1.4.4"}
environ/environ {:mvn/version "1.2.0"}}
:paths ["src" "vendor" "target/classes"]

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -ex
corepack enable;
corepack install;
yarn install;
yarn run test;

View File

@@ -9,10 +9,10 @@
(:refer-clojure :exclude [get-in select-keys str with-open max])
#?(:cljs (:require-macros [app.common.data.macros]))
(:require
#?(:clj [cljs.analyzer.api :as aapi])
#?(:clj [clojure.core :as c]
:cljs [cljs.core :as c])
[app.common.data :as d]
[cljs.analyzer.api :as aapi]
[cuerdas.core :as str]))
(defmacro select-keys
@@ -44,43 +44,42 @@
[& params]
`(str/concat ~@params))
#?(:clj
(defmacro export
"A helper macro that allows reexport a var in a current namespace."
[v]
(if (boolean (:ns &env))
(defmacro export
"A helper macro that allows reexport a var in a current namespace."
[v]
(if (boolean (:ns &env))
;; Code for ClojureScript
(let [mdata (aapi/resolve &env v)
arglists (second (get-in mdata [:meta :arglists]))
sym (symbol (c/name v))
andsym (symbol "&")
procarg #(if (= % andsym) % (gensym "param"))]
(if (pos? (count arglists))
`(def
~(with-meta sym (:meta mdata))
(fn ~@(for [args arglists]
(let [args (map procarg args)]
(if (some #(= andsym %) args)
(let [[sargs dargs] (split-with #(not= andsym %) args)]
`([~@sargs ~@dargs] (apply ~v ~@sargs ~@(rest dargs))))
`([~@args] (~v ~@args)))))))
`(def ~(with-meta sym (:meta mdata)) ~v)))
;; Code for ClojureScript
(let [mdata (aapi/resolve &env v)
arglists (second (get-in mdata [:meta :arglists]))
sym (symbol (c/name v))
andsym (symbol "&")
procarg #(if (= % andsym) % (gensym "param"))]
(if (pos? (count arglists))
`(def
~(with-meta sym (:meta mdata))
(fn ~@(for [args arglists]
(let [args (map procarg args)]
(if (some #(= andsym %) args)
(let [[sargs dargs] (split-with #(not= andsym %) args)]
`([~@sargs ~@dargs] (apply ~v ~@sargs ~@(rest dargs))))
`([~@args] (~v ~@args)))))))
`(def ~(with-meta sym (:meta mdata)) ~v)))
;; Code for Clojure
(let [vr (resolve v)
m (meta vr)
n (:name m)
n (with-meta n
(cond-> {}
(:dynamic m) (assoc :dynamic true)
(:protocol m) (assoc :protocol (:protocol m))))]
`(let [m# (meta ~vr)]
(def ~n (deref ~vr))
(alter-meta! (var ~n) merge (dissoc m# :name))
;; (when (:macro m#)
;; (.setMacro (var ~n)))
~vr)))))
;; Code for Clojure
(let [vr (resolve v)
m (meta vr)
n (:name m)
n (with-meta n
(cond-> {}
(:dynamic m) (assoc :dynamic true)
(:protocol m) (assoc :protocol (:protocol m))))]
`(let [m# (meta ~vr)]
(def ~n (deref ~vr))
(alter-meta! (var ~n) merge (dissoc m# :name))
;; (when (:macro m#)
;; (.setMacro (var ~n)))
~vr))))
(defmacro fmt
"String interpolation helper. Can only be used with strings known at

View File

@@ -53,7 +53,6 @@
"plugins/runtime"
"tokens/numeric-input"
"design-tokens/v1"
"text-editor/v2-html-paste"
"text-editor/v2"
"render-wasm/v1"
"variants/v1"})
@@ -76,7 +75,6 @@
(def frontend-only-features
#{"styles/v2"
"plugins/runtime"
"text-editor/v2-html-paste"
"text-editor/v2"
"tokens/numeric-input"
"render-wasm/v1"})
@@ -126,7 +124,6 @@
:feature-plugins "plugins/runtime"
:feature-design-tokens "design-tokens/v1"
:feature-text-editor-v2 "text-editor/v2"
:feature-text-editor-v2-html-paste "text-editor/v2-html-paste"
:feature-render-wasm "render-wasm/v1"
:feature-variants "variants/v1"
:feature-token-input "tokens/numeric-input"

View File

@@ -485,13 +485,6 @@
(commit-change change1)
(commit-change change2))))
(defn add-tokens-lib
[state tokens-lib]
(-> state
(commit-change
{:type :set-tokens-lib
:tokens-lib tokens-lib})))
(defn delete-shape
[file id]
(commit-change

View File

@@ -371,7 +371,7 @@
[:set-tokens-lib
[:map {:title "SetTokensLib"}
[:type [:= :set-tokens-lib]]
[:tokens-lib [:maybe ctob/schema:tokens-lib]]]]
[:tokens-lib ::sm/any]]] ;; TODO: we should define a plain object schema for tokens-lib
[:set-token
[:map {:title "SetTokenChange"}
@@ -463,16 +463,35 @@
;; Changes Processing Impl
#_:clj-kondo/ignore
(defn- validate-shape
[{:keys [id] :as shape} page-id]
(when-not (cts/valid-shape? shape)
(ex/raise :type :assertion
:code :data-validation
:hint (str "invalid shape found '" id "'")
:page-id page-id
:shape-id id
::sm/explain (cts/explain-shape shape))))
(defn validate-shapes!
[data-old data-new items]
(letfn [(validate-shape! [[page-id id]]
(let [shape-old (dm/get-in data-old [:pages-index page-id :objects id])
shape-new (dm/get-in data-new [:pages-index page-id :objects id])]
;; If object has changed or is new verify is correct
(when (and (some? shape-new)
(not= shape-old shape-new))
(when-not (and (cts/valid-shape? shape-new)
(cts/shape? shape-new))
(ex/raise :type :assertion
:code :data-validation
:hint (str "invalid shape found after applying changes on file "
(:id data-new))
:file-id (:id data-new)
::sm/explain (cts/explain-shape shape-new))))))]
(->> (into #{} (map :page-id) items)
(mapcat (fn [page-id]
(filter #(= page-id (:page-id %)) items)))
(mapcat (fn [{:keys [type id page-id] :as item}]
(sequence
(map (partial vector page-id))
(case type
(:add-obj :mod-obj :del-obj) (cons id nil)
(:mov-objects :reg-objects) (:shapes item)
nil))))
(run! validate-shape!))))
(defn- process-touched-change
[data {:keys [id page-id component-id]}]
@@ -499,8 +518,14 @@
(check-changes items))
(binding [*touched-changes* (volatile! #{})]
(let [result (reduce #(or (process-change %1 %2) %1) data items)]
(reduce process-touched-change result @*touched-changes*)))))
(let [result (reduce #(or (process-change %1 %2) %1) data items)
result (reduce process-touched-change result @*touched-changes*)]
;; Validate result shapes (only on the backend)
;;
;; TODO: (PERF) add changed shapes tracking and only validate
;; the tracked changes instead of iterate over all shapes
#?(:clj (validate-shapes! data result items))
result))))
;; --- Comment Threads
@@ -588,10 +613,9 @@
(defmethod process-change :add-obj
[data {:keys [id obj page-id component-id frame-id parent-id index ignore-touched]}]
;; NOTE: we only perform hard validation on backend
#?(:clj (validate-shape obj page-id))
(let [update-container #(ctst/add-shape id obj % frame-id parent-id index ignore-touched)]
(let [update-container
(fn [container]
(ctst/add-shape id obj container frame-id parent-id index ignore-touched))]
(when *state*
(swap! *state* collect-shape-media-refs obj page-id))
@@ -614,9 +638,6 @@
(when (and *state* page-id)
(swap! *state* collect-shape-media-refs shape page-id))
;; NOTE: we only perform hard validation on backend
#?(:clj (validate-shape shape page-id))
(assoc objects id shape))
objects))
@@ -671,6 +692,8 @@
(d/update-in-when data [:pages-index page-id] fix-container)
(d/update-in-when data [:components component-id] fix-container))))
;; FIXME: remove, seems like this method is already unused
;; reg-objects operation "regenerates" the geometry and selrect of the parent groups
(defmethod process-change :reg-objects
[data {:keys [page-id component-id shapes]}]
;; FIXME: Improve performance
@@ -699,60 +722,48 @@
(update-group [group objects]
(let [lookup (d/getf objects)
children (get group :shapes)
group (cond
;; If the group is empty we don't make any changes. Will be removed by a later process
(empty? children)
group
children (get group :shapes)]
(cond
;; If the group is empty we don't make any changes. Will be removed by a later process
(empty? children)
group
(= :bool (:type group))
(path/update-bool-shape group objects)
(= :bool (:type group))
(path/update-bool-shape group objects)
(:masked-group group)
(->> (map lookup children)
(set-mask-selrect group))
(:masked-group group)
(->> (map lookup children)
(set-mask-selrect group))
:else
(->> (map lookup children)
(gsh/update-group-selrect group)))]
#?(:clj (validate-shape group page-id))
group))]
:else
(->> (map lookup children)
(gsh/update-group-selrect group)))))]
(if page-id
(d/update-in-when data [:pages-index page-id :objects] reg-objects)
(d/update-in-when data [:components component-id :objects] reg-objects))))
(defmethod process-change :mov-objects
;; FIXME: ignore-touched is no longer used, so we can consider it deprecated
[data {:keys [parent-id shapes index page-id component-id #_ignore-touched after-shape allow-altering-copies syncing]}]
[data {:keys [parent-id shapes index page-id component-id ignore-touched after-shape allow-altering-copies syncing]}]
(letfn [(calculate-invalid-targets [objects shape-id]
(let [reduce-fn #(into %1 (calculate-invalid-targets objects %2))]
(->> (get-in objects [shape-id :shapes])
(reduce reduce-fn #{shape-id}))))
;; Avoid placing a shape as a direct or indirect child of itself, or
;; inside its main component if it's in a copy, or inside a copy, or
;; from a copy
;; Avoid placing a shape as a direct or indirect child of itself,
;; or inside its main component if it's in a copy,
;; or inside a copy, or from a copy
(is-valid-move? [objects shape-id]
(let [invalid-targets (calculate-invalid-targets objects shape-id)
shape (get objects shape-id)]
(and shape
(not (invalid-targets parent-id))
(not (cfh/components-nesting-loop? objects shape-id parent-id))
(or
;; In some cases (like a component
;; swap) it's allowed to change the
;; structure of a copy
allow-altering-copies
;; DEPRECATED, remove once v2.12 released
syncing
(and
;; We don't want to change the structure of component copies
(not (ctk/in-component-copy? (get objects (:parent-id shape))))
;; We need to check the origin and target frames
(not (ctk/in-component-copy? (get objects parent-id))))))))
(or allow-altering-copies ;; In some cases (like a component swap) it's allowed to change the structure of a copy
syncing ;; If we are syncing the changes of a main component, it's allowed to change the structure of a copy
(and
(not (ctk/in-component-copy? (get objects (:parent-id shape)))) ;; We don't want to change the structure of component copies
(not (ctk/in-component-copy? (get objects parent-id)))))))) ;; We need to check the origin and target frames
(insert-items [prev-shapes index shapes]
(let [prev-shapes (or prev-shapes [])]
@@ -761,13 +772,17 @@
(cfh/append-at-the-end prev-shapes shapes))))
(add-to-parent [parent index shapes]
(update parent :shapes
(fn [parent-shapes]
(-> parent-shapes
(insert-items index shapes)
;; We need to ensure that no `nil` in the shapes list
;; after adding all the incoming shapes to the parent.
(d/vec-without-nils)))))
(let [parent (-> parent
(update :shapes insert-items index shapes)
;; We need to ensure that no `nil` in the
;; shapes list after adding all the
;; incoming shapes to the parent.
(update :shapes d/vec-without-nils))]
(cond-> parent
(and (:shape-ref parent)
(#{:group :frame} (:type parent))
(not ignore-touched))
(dissoc :remote-synced))))
(remove-from-old-parent [old-objects objects shape-id]
(let [prev-parent-id (dm/get-in old-objects [shape-id :parent-id])]
@@ -775,63 +790,58 @@
;; the new destination target parent id.
(if (= prev-parent-id parent-id)
objects
(d/update-in-when objects [prev-parent-id :shapes]
(fn [shapes]
(-> shapes
(d/without-obj shape-id)
(d/vec-without-nils)))))))
(let [sid shape-id
pid prev-parent-id
obj (get objects pid)
component? (and (:shape-ref obj)
(= (:type obj) :group)
(not ignore-touched))]
(-> objects
(d/update-in-when [pid :shapes] d/without-obj sid)
(d/update-in-when [pid :shapes] d/vec-without-nils)
(cond-> component? (d/update-when pid #(dissoc % :remote-synced))))))))
(update-parent-id [objects id]
(d/update-when objects id assoc :parent-id parent-id))
(-> objects
(d/update-when id assoc :parent-id parent-id)))
;; Updates the frame-id references that might be outdated
(update-frame-id [frame-id objects id]
(let [obj (some-> (get objects id)
(assoc :frame-id frame-id))]
(assign-frame-id [frame-id objects id]
(let [objects (d/update-when objects id assoc :frame-id frame-id)
obj (get objects id)]
(cond-> objects
(some? obj)
(assoc id obj)
;; If we moving a frame, we DO NOT NEED update
;; children because the children will point correctly
;; to the frame what we are currently moving
(not (cfh/frame-shape? obj))
(as-> $$ (reduce (partial update-frame-id frame-id) $$ (:shapes obj))))))
(validate-shape [objects #_:clj-kondo/ignore shape-id]
#?(:clj (when-let [shape (get objects shape-id)]
(validate-shape shape page-id)))
objects)
;; If we moving frame, the parent frame is the root
;; and we DO NOT NEED update children because the
;; children will point correctly to the frame what we
;; are currently moving
(not= :frame (:type obj))
(as-> $$ (reduce (partial assign-frame-id frame-id) $$ (:shapes obj))))))
(move-objects [objects]
(let [parent (get objects parent-id)]
;; Do not proceed with the move if parent does not
;; exists; this can happen on a race condition when an
;; inflight move operations lands when parent is deleted
(if (and (seq shapes) (every? (partial is-valid-move? objects) shapes) parent)
(let [index (or (some-> (d/index-of (:shapes parent) after-shape) inc) index)
frame-id (if (cfh/frame-shape? parent)
(:id parent)
(:frame-id parent))]
(as-> objects $
;; Add the new shapes to the parent object.
(d/update-when $ parent-id #(add-to-parent % index shapes))
(let [valid? (every? (partial is-valid-move? objects) shapes)
parent (get objects parent-id)
after-shape-index (d/index-of (:shapes parent) after-shape)
index (if (nil? after-shape-index) index (inc after-shape-index))
frame-id (if (= :frame (:type parent))
(:id parent)
(:frame-id parent))]
;; Update each individual shape link to the new parent
(reduce update-parent-id $ shapes)
(if (and valid? (seq shapes))
(as-> objects $
;; Add the new shapes to the parent object.
(d/update-when $ parent-id #(add-to-parent % index shapes))
;; Analyze the old parents and clear the old links
;; only if the new parent is different form old
;; parent.
(reduce (partial remove-from-old-parent objects) $ shapes)
;; Update each individual shape link to the new parent
(reduce update-parent-id $ shapes)
;; Ensure that all shapes of the new parent has a
;; correct link to the topside frame.
(reduce (partial update-frame-id frame-id) $ shapes)
;; Perform validation of the affected shapes
(reduce validate-shape $ shapes)))
;; Analyze the old parents and clear the old links
;; only if the new parent is different form old
;; parent.
(reduce (partial remove-from-old-parent objects) $ shapes)
;; Ensure that all shapes of the new parent has a
;; correct link to the topside frame.
(reduce (partial assign-frame-id frame-id) $ shapes))
objects)))]
(if page-id

View File

@@ -72,11 +72,9 @@
(= :bool (dm/get-prop shape :type))))
(defn text-shape?
([shape]
(and (some? shape)
(= :text (dm/get-prop shape :type))))
([objects id]
(text-shape? (get objects id))))
[shape]
(and (some? shape)
(= :text (dm/get-prop shape :type))))
(defn rect-shape?
[shape]

View File

@@ -33,9 +33,7 @@
:login-with-ldap
;; Uses any generic authentication provider that implements OIDC protocol as credentials.
:login-with-oidc
;; Enables custom SSO flow
:login-with-custom-sso
;; Allows registration with OIDC (takes effect only when general `registration` is disabled)
;; Allows registration with Open ID
:oidc-registration
;; This logs to console the invitation tokens. It's useful in case the SMTP is not configured.
:log-invitation-tokens})
@@ -120,8 +118,11 @@
:terms-and-privacy-checkbox
;; Only for developtment.
:tiered-file-data-storage
:token-units
:token-base-font-size
:token-color
:token-typography-types
:token-typography-composite
:token-shadow
:transit-readable-response
:user-feedback
@@ -129,6 +130,7 @@
:v2-migration
:webhooks
;; TODO: deprecate this flag and consolidate the code
:export-file-v3
:render-wasm-dpr
:hide-release-modal
:subscriptions
@@ -168,8 +170,9 @@
:enable-google-fonts-provider
:enable-component-thumbnails
:enable-render-wasm-dpr
:enable-token-color
:enable-inspect-styles
:enable-token-units
:enable-token-typography-types
:enable-token-typography-composite
:enable-feature-fdata-objects-map])
(defn parse

View File

@@ -162,7 +162,6 @@
(dm/export gtr/inverse-transform-matrix)
(dm/export gtr/transform-rect)
(dm/export gtr/calculate-geometry)
(dm/export gtr/calculate-selrect)
(dm/export gtr/update-group-selrect)
(dm/export gtr/update-mask-selrect)
(dm/export gtr/apply-transform)

View File

@@ -12,11 +12,8 @@
[app.common.files.changes-builder :as pcb]
[app.common.files.helpers :as cfh]
[app.common.files.variant :as cfv]
[app.common.geom.matrix :as gmt]
[app.common.geom.point :as gpt]
[app.common.geom.rect :as grc]
[app.common.geom.shapes :as gsh]
[app.common.geom.shapes.common :as gco]
[app.common.logging :as log]
[app.common.logic.shapes :as cls]
[app.common.logic.variant-properties :as clvp]
@@ -29,7 +26,6 @@
[app.common.types.library :as ctl]
[app.common.types.page :as ctp]
[app.common.types.pages-list :as ctpl]
[app.common.types.path.segment :as segment]
[app.common.types.shape :as cts]
[app.common.types.shape-tree :as ctst]
[app.common.types.shape.interactions :as ctsi]
@@ -1516,7 +1512,7 @@
:shapes [(:id shape)]
:index index-after
:ignore-touched true
:allow-altering-copies true}))
:syncing true}))
(update :undo-changes conj (make-change
container
{:type :mov-objects
@@ -1524,7 +1520,7 @@
:shapes [(:id shape)]
:index index-before
:ignore-touched true
:allow-altering-copies true})))]
:syncing true})))]
(if (and (ctk/touched-group? parent :shapes-group) omit-touched?)
changes
@@ -1880,44 +1876,6 @@
roperations'
uoperations')))))))
(defn- set-path-new-values
[current-shape prev-shape transform]
(let [new-content (segment/transform-content
(:content current-shape)
(gmt/transform-in (gpt/point 0 0) transform))
new-points (-> (segment/content->selrect new-content)
(grc/rect->points))
points-center (gco/points->center new-points)
new-selrect (gsh/calculate-selrect new-points points-center)
shape (assoc current-shape
:content new-content
:points new-points
:selrect new-selrect)
prev-center (segment/content-center (:content prev-shape))
delta (gpt/subtract points-center (first new-points))
new-pos (gpt/subtract prev-center delta)]
(gsh/absolute-move shape new-pos)))
(defn- switch-path-change-value
[prev-shape ;; The shape before the switch
current-shape ;; The shape after the switch (a clean copy)
ref-shape ;; The referenced shape on the main component
;; before the switch
attr]
(let [old-width (-> ref-shape :selrect :width)
new-width (-> prev-shape :selrect :width)
old-height (-> ref-shape :selrect :height)
new-height (-> prev-shape :selrect :height)
transform (-> (gpt/point (/ new-width old-width)
(/ new-height old-height))
(gmt/scale-matrix))
shape (set-path-new-values current-shape prev-shape transform)]
(get shape attr)))
(defn- switch-text-change-value
[prev-content ;; The :content of the text before the switch
@@ -2069,10 +2027,6 @@
(= :content attr)
(touched attr-group))
path-change?
(and (= :path (:type current-shape))
(contains? #{:points :selrect :content} attr))
;; position-data is a special case because can be affected by :geometry-group and :content-group
;; so, if the position-data changes but the geometry is touched we need to reset the position-data
;; so it's calculated again
@@ -2101,12 +2055,6 @@
(:content origin-ref-shape)
touched)
path-change?
(switch-path-change-value previous-shape
current-shape
origin-ref-shape
attr)
:else
(get previous-shape attr)))

View File

@@ -9,7 +9,7 @@
[app.common.files.changes-builder :as pcb]
[app.common.types.tokens-lib :as ctob]))
(defn- generate-update-active-sets
(defn generate-update-active-sets
"Copy the active sets from the currently active themes and move them
to the hidden token theme and update the theme with
`update-theme-fn`.
@@ -28,45 +28,12 @@
(pcb/set-token-theme (ctob/get-id hidden-theme)
hidden-theme'))))
(defn generate-set-enabled-token-set
"Enable or disable a token set at `set-name` in `tokens-lib` without modifying a user theme."
[changes tokens-lib set-name enabled?]
(if enabled?
(generate-update-active-sets changes tokens-lib #(ctob/enable-set % set-name))
(generate-update-active-sets changes tokens-lib #(ctob/disable-set % set-name))))
(defn generate-toggle-token-set
"Toggle a token set at `set-name` in `tokens-lib` without modifying a user theme."
"Toggle a token set at `set-name` in `tokens-lib` without modifying a
user theme."
[changes tokens-lib set-name]
(generate-update-active-sets changes tokens-lib #(ctob/toggle-set % set-name)))
(defn- generate-update-active-token-theme
"Change the active state of a theme in `tokens-lib`. If after the change there is
any active theme other than the hidden one, deactivate the hidden theme."
[changes tokens-lib update-fn]
(let [active-token-themes (some-> tokens-lib
(update-fn)
(ctob/get-active-theme-paths))
active-token-themes' (if (= active-token-themes #{ctob/hidden-theme-path})
active-token-themes
(disj active-token-themes ctob/hidden-theme-path))]
(pcb/set-active-token-themes changes active-token-themes')))
(defn generate-set-active-token-theme
"Activate or deactivate a token theme in `tokens-lib`."
[changes tokens-lib id active?]
(if active?
(generate-update-active-token-theme changes tokens-lib
#(ctob/activate-theme % id))
(generate-update-active-token-theme changes tokens-lib
#(ctob/deactivate-theme % id))))
(defn generate-toggle-token-theme
"Toggle the active state of a token theme in `tokens-lib`."
[changes tokens-lib id]
(generate-update-active-token-theme changes tokens-lib
#(ctob/toggle-theme-active % id)))
(defn toggle-token-set-group
"Toggle a token set group at `group-path` in `tokens-lib` for a `tokens-lib-theme`."
[group-path tokens-lib tokens-lib-theme]

View File

@@ -281,20 +281,7 @@
(defn check-fn
"Create a predefined check function"
[s & {:keys [hint type code]}]
(let [s #?(:clj
(schema s)
:cljs
(try
(schema s)
(catch :default cause
(let [data (ex-data cause)]
(if (= :malli.core/invalid-schema (:type data))
(throw (ex-info
(str "Invalid schema\n"
(pp/pprint-str (:data data)))
{}))
(throw cause))))))
(let [s (schema s)
validator* (delay (m/validator s))
explainer* (delay (m/explainer s))
hint (or ^boolean hint "check error")
@@ -855,6 +842,38 @@
choices))]
{:pred pred}))})
;; (register!
;; {:type ::inst
;; :pred tm/instant?
;; :type-properties
;; {:title "inst"
;; :description "Satisfies Inst protocol"
;; :error/message "should be an instant"
;; :gen/gen (->> (sg/small-int :min 0 :max 100000)
;; (sg/fmap (fn [v] (tm/parse-inst v))))
;; :decode/string tm/parse-inst
;; :encode/string tm/format-inst
;; :decode/json tm/parse-inst
;; :encode/json tm/format-inst
;; ::oapi/type "string"
;; ::oapi/format "iso"}})
;; (register!
;; {:type ::timestamp
;; :pred tm/instant?
;; :type-properties
;; {:title "inst"
;; :description "Satisfies Inst protocol, the same as ::inst but encodes to epoch"
;; :error/message "should be an instant"
;; :gen/gen (->> (sg/small-int)
;; (sg/fmap (fn [v] (tm/parse-inst v))))
;; :decode/string tm/parse-inst
;; :encode/string inst-ms
;; :decode/json tm/parse-inst
;; :encode/json inst-ms
;; ::oapi/type "string"
;; ::oapi/format "number"}})
#?(:clj
(register!
@@ -932,7 +951,7 @@
:pred #(and (string? %) (not (str/blank? %)))
:property-pred
(fn [{:keys [min max] :as props}]
(if (or min max)
(if (seq props)
(fn [value]
(let [size (count value)]
(cond

View File

@@ -362,24 +362,24 @@
component (ctkl/get-component component-file (:component-id top-instance) true)
remote-shape (get-ref-shape component-file component shape)
component-container (get-component-container component-file component)
[remote-shape component-container component-file]
[remote-shape component-container]
(if (some? remote-shape)
[remote-shape component-container component-file]
[remote-shape component-container]
;; If not found, try the case of this being a fostered or swapped children
(let [head-instance (ctn/get-head-shape (:objects container) shape)
component-file (get-in libraries [(:component-file head-instance) :data])
head-component (ctkl/get-component component-file (:component-id head-instance) true)
remote-shape' (get-ref-shape component-file head-component shape)
component-container' (get-component-container component-file head-component)]
[remote-shape' component-container' component-file]))]
(let [head-instance (ctn/get-head-shape (:objects container) shape)
component-file (get-in libraries [(:component-file head-instance) :data])
head-component (ctkl/get-component component-file (:component-id head-instance) true)
remote-shape' (get-ref-shape component-file head-component shape)
component-container (get-component-container component-file component)]
[remote-shape' component-container]))]
(if (nil? remote-shape)
nil
(if (nil? (:shape-ref remote-shape))
(cond-> remote-shape
(and remote-shape with-context?)
(with-meta {:file {:id (:id component-file)
:data component-file}
(with-meta {:file {:id (:id file-data)
:data file-data}
:container component-container}))
(find-remote-shape component-container libraries remote-shape :with-context? with-context?)))))
@@ -1082,35 +1082,33 @@
detach-shape
(fn [objects shape]
(let [shape' (cond-> shape
(not= file-id (:fill-color-ref-file shape))
(dissoc :fill-color-ref-id :fill-color-ref-file)
(l/debug :hint "detach-shape"
:file-id file-id
:component-ref-file (get-component-ref-file objects shape)
::l/sync? true)
(cond-> shape
(not= file-id (:fill-color-ref-file shape))
(dissoc :fill-color-ref-id :fill-color-ref-file)
(not= file-id (:stroke-color-ref-file shape))
(dissoc :stroke-color-ref-id :stroke-color-ref-file)
(not= file-id (:stroke-color-ref-file shape))
(dissoc :stroke-color-ref-id :stroke-color-ref-file)
(not= file-id (get-component-ref-file objects shape))
(dissoc :component-id :component-file :shape-ref :component-root)
(not= file-id (get-component-ref-file objects shape))
(dissoc :component-id :component-file :shape-ref :component-root)
(= :text (:type shape))
(update :content detach-text))]
(when (not= shape shape')
(l/dbg :hint "detach shape"
:file-id (str file-id)
:shape-id (str (:id shape))))
shape'))
(= :text (:type shape))
(update :content detach-text)))
detach-objects
(fn [objects]
(d/update-vals objects #(detach-shape objects %)))
(update-vals objects #(detach-shape objects %)))
detach-pages
(fn [pages-index]
(d/update-vals pages-index #(update % :objects detach-objects)))]
(update-vals pages-index #(update % :objects detach-objects)))]
(update-in file [:data :pages-index] detach-pages)))
(-> file
(update-in [:data :pages-index] detach-pages))))
;; Base font size

View File

@@ -11,7 +11,6 @@
[app.common.exceptions :as ex]
[app.common.flags :as flags]
[app.common.schema :as sm]
[app.common.schema.generators :as sg]
[app.common.types.color :as types.color]
[app.common.types.fills.impl :as impl]
[clojure.core :as c]
@@ -50,19 +49,12 @@
(= 1 (count result))))
(def schema:fill
[:and schema:fill-attrs [:fn has-valid-fill-attrs?]])
[:and schema:fill-attrs
[:fn has-valid-fill-attrs?]])
(def check-fill
(sm/check-fn schema:fill))
(def ^:private schema:fills-as-vector
[:vector {:gen/max 2} schema:fill])
(def schema:fills
[:or {:gen/gen (sg/generator schema:fills-as-vector)}
schema:fills-as-vector
[:fn impl/fills?]])
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CONSTRUCTORS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -732,89 +732,89 @@
[shape scale-text-content value]
(update shape :content scale-text-content value))
(defn scale-text-content
[content value]
(->> content
(txt/transform-nodes txt/is-text-node? (partial transform-text-node value))
(txt/transform-nodes txt/is-paragraph-node? (partial transform-paragraph-node value))))
(defn apply-scale-content
[shape value]
;; Scale can only be positive
(let [value (mth/abs value)]
(cond-> shape
(cfh/text-shape? shape)
(update-text-content scale-text-content value)
:always
(gsc/update-corners-scale value)
(d/not-empty? (:strokes shape))
(gss/update-strokes-width value)
(d/not-empty? (:shadow shape))
(gse/update-shadows-scale value)
(some? (:blur shape))
(gse/update-blur-scale value)
(ctl/flex-layout? shape)
(ctl/update-flex-scale value)
(ctl/grid-layout? shape)
(ctl/update-grid-scale value)
:always
(ctl/update-flex-child value))))
(defn remove-children-set
[shapes children-to-remove]
(let [remove? (set children-to-remove)]
(d/removev remove? shapes)))
(defn apply-modifier
[shape operation]
(let [type (dm/get-prop operation :type)]
(case type
:rotation
(let [rotation (dm/get-prop operation :value)]
(update shape :rotation #(mod (+ (or % 0) rotation) 360)))
:add-children
(let [value (dm/get-prop operation :value)
index (dm/get-prop operation :index)
shape
(if (some? index)
(update shape :shapes
(fn [shapes]
(if (vector? shapes)
(d/insert-at-index shapes index value)
(d/concat-vec shapes value))))
(update shape :shapes d/concat-vec value))]
;; Remove duplication
(update shape :shapes #(into [] (apply d/ordered-set %))))
:remove-children
(let [value (dm/get-prop operation :value)]
(update shape :shapes remove-children-set value))
:scale-content
(let [value (dm/get-prop operation :value)]
(apply-scale-content shape value))
:change-property
(let [property (dm/get-prop operation :property)
value (dm/get-prop operation :value)]
(assoc shape property value))
;; :default => no change to shape
shape)))
(defn apply-structure-modifiers
"Apply structure changes to a shape"
[shape modifiers]
(as-> shape $
(reduce apply-modifier $ (dm/get-prop modifiers :structure-parent))
(reduce apply-modifier $ (dm/get-prop modifiers :structure-child))))
(letfn [(scale-text-content
[content value]
(->> content
(txt/transform-nodes txt/is-text-node? (partial transform-text-node value))
(txt/transform-nodes txt/is-paragraph-node? (partial transform-paragraph-node value))))
(apply-scale-content
[shape value]
;; Scale can only be positive
(let [value (mth/abs value)]
(cond-> shape
(cfh/text-shape? shape)
(update-text-content scale-text-content value)
:always
(gsc/update-corners-scale value)
(d/not-empty? (:strokes shape))
(gss/update-strokes-width value)
(d/not-empty? (:shadow shape))
(gse/update-shadows-scale value)
(some? (:blur shape))
(gse/update-blur-scale value)
(ctl/flex-layout? shape)
(ctl/update-flex-scale value)
(ctl/grid-layout? shape)
(ctl/update-grid-scale value)
:always
(ctl/update-flex-child value))))]
(let [remove-children
(fn [shapes children-to-remove]
(let [remove? (set children-to-remove)]
(d/removev remove? shapes)))
apply-modifier
(fn [shape operation]
(let [type (dm/get-prop operation :type)]
(case type
:rotation
(let [rotation (dm/get-prop operation :value)]
(update shape :rotation #(mod (+ (or % 0) rotation) 360)))
:add-children
(let [value (dm/get-prop operation :value)
index (dm/get-prop operation :index)
shape
(if (some? index)
(update shape :shapes
(fn [shapes]
(if (vector? shapes)
(d/insert-at-index shapes index value)
(d/concat-vec shapes value))))
(update shape :shapes d/concat-vec value))]
;; Remove duplication
(update shape :shapes #(into [] (apply d/ordered-set %))))
:remove-children
(let [value (dm/get-prop operation :value)]
(update shape :shapes remove-children value))
:scale-content
(let [value (dm/get-prop operation :value)]
(apply-scale-content shape value))
:change-property
(let [property (dm/get-prop operation :property)
value (dm/get-prop operation :value)]
(assoc shape property value))
;; :default => no change to shape
shape)))]
(as-> shape $
(reduce apply-modifier $ (dm/get-prop modifiers :structure-parent))
(reduce apply-modifier $ (dm/get-prop modifiers :structure-child))))))

View File

@@ -267,4 +267,3 @@
(-> (stp/convert-to-path shape objects)
(update :content impl/path-data))))
(dm/export impl/decode-segments)

View File

@@ -565,9 +565,6 @@
(def check-content
(sm/check-fn schema:content))
(def decode-segments
(sm/lazy-decoder schema:segments sm/json-transformer))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CONSTRUCTORS & PREDICATES
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -19,7 +19,7 @@
[app.common.schema.generators :as sg]
[app.common.transit :as t]
[app.common.types.color :as clr]
[app.common.types.fills :refer [schema:fills fill->color]]
[app.common.types.fills :refer [schema:fill fill->color]]
[app.common.types.grid :as ctg]
[app.common.types.path :as path]
[app.common.types.plugins :as ctpg]
@@ -192,7 +192,8 @@
[:locked {:optional true} :boolean]
[:hidden {:optional true} :boolean]
[:masked-group {:optional true} :boolean]
[:fills {:optional true} schema:fills]
[:fills {:optional true}
[:vector {:gen/max 2} schema:fill]]
[:proportion {:optional true} ::sm/safe-number]
[:proportion-lock {:optional true} :boolean]
[:constraints-h {:optional true}

View File

@@ -7,7 +7,7 @@
(ns app.common.types.shape.text
(:require
[app.common.schema :as sm]
[app.common.types.fills :refer [schema:fills]]))
[app.common.types.fills :refer [schema:fill]]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SCHEMA
@@ -32,7 +32,8 @@
[:type [:= "paragraph"]]
[:key {:optional true} :string]
[:fills {:optional true}
[:maybe schema:fills]]
[:maybe
[:vector {:gen/max 2} schema:fill]]]
[:font-family {:optional true} ::sm/text]
[:font-size {:optional true} ::sm/text]
[:font-style {:optional true} ::sm/text]
@@ -48,7 +49,8 @@
[:text :string]
[:key {:optional true} :string]
[:fills {:optional true}
[:maybe schema:fills]]
[:maybe
[:vector {:gen/max 2} schema:fill]]]
[:font-family {:optional true} ::sm/text]
[:font-size {:optional true} ::sm/text]
[:font-style {:optional true} ::sm/text]
@@ -69,7 +71,7 @@
[:y ::sm/safe-number]
[:width ::sm/safe-number]
[:height ::sm/safe-number]
[:fills schema:fills]
[:fills [:vector {:gen/max 2} schema:fill]]
[:font-family {:optional true} ::sm/text]
[:font-size {:optional true} ::sm/text]
[:font-style {:optional true} ::sm/text]

View File

@@ -90,10 +90,6 @@
[{:fill-color clr/black
:fill-opacity 1}])
(def default-paragraph-attrs
{:text-align "left"
:text-direction "ltr"})
(def default-text-attrs
{:font-id "sourcesanspro"
:font-family "sourcesanspro"

View File

@@ -266,6 +266,10 @@
typography-token-keys
#{:line-height}))
;; TODO: Created to extract the font-size feature from the typography feature flag.
;; Delete this once the typography feature flag is removed.
(def ff-typography-keys (set/difference typography-keys font-size-keys))
(def ^:private schema:number
(-> (reduce mu/union [[:map [:line-height {:optional true} token-name-ref]]
schema:rotation])
@@ -306,10 +310,6 @@
schema:text-decoration
schema:dimensions])
(defn token-attr?
[attr]
(contains? all-keys attr))
(defn shape-attr->token-attrs
([shape-attr] (shape-attr->token-attrs shape-attr nil))
([shape-attr changed-sub-attr]
@@ -403,15 +403,15 @@
:text text-attributes
nil))
(defn appliable-attrs-for-shape
(defn appliable-attrs
"Returns intersection of shape `attributes` for `shape-type`."
[attributes shape-type is-layout]
(set/intersection attributes (shape-type->attributes shape-type is-layout)))
(defn any-appliable-attr-for-shape?
(defn any-appliable-attr?
"Checks if `token-type` supports given shape `attributes`."
[attributes token-type is-layout]
(d/not-empty? (appliable-attrs-for-shape attributes token-type is-layout)))
(seq (appliable-attrs attributes token-type is-layout)))
;; Token attrs that are set inside content blocks of text shapes, instead
;; at the shape level.

View File

@@ -7,11 +7,10 @@
(ns app.common.types.tokens-lib
(:require
#?(:clj [app.common.fressian :as fres])
#?(:clj [clojure.data.json :as c.json])
#?(:clj [clojure.data.json :as json])
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.files.helpers :as cfh]
[app.common.json :as json]
[app.common.path-names :as cpn]
[app.common.schema :as sm]
[app.common.schema.generators :as sg]
@@ -199,8 +198,8 @@
:tokens tokens})
#?@(:clj
[c.json/JSONWriter
(-write [this writter options] (c.json/-write (datafy this) writter options))])
[json/JSONWriter
(-write [this writter options] (json/-write (datafy this) writter options))])
INamedItem
(get-id [_]
@@ -759,7 +758,7 @@
(theme-active? [_ id] "predicate if token theme is active")
(activate-theme [_ id] "adds theme from the active-themes")
(deactivate-theme [_ id] "removes theme from the active-themes")
(toggle-theme-active [_ id] "toggles theme in the active-themes")
(toggle-theme-active? [_ id] "toggles theme in the active-themes")
(get-hidden-theme [_] "get the hidden temporary theme"))
(def schema:token-themes
@@ -902,7 +901,6 @@
(delete-token [_ set-id token-id] "delete a token from a set")
(toggle-set-in-theme [_ theme-id set-name] "toggle a set used / not used in a theme")
(get-active-themes-set-names [_] "set of set names that are active in the the active themes")
(token-set-active? [_ set-name] "if a set is active in any of the active themes")
(sets-at-path-all-active? [_ group-path] "compute active state for child sets at `group-path`.
Will return a value that matches this schema:
`:none` None of the nested sets are active
@@ -913,7 +911,6 @@ Will return a value that matches this schema:
(get-tokens [_ set-id] "return a map of tokens in the set, indexed by token-name"))
(declare parse-multi-set-dtcg-json)
(declare read-multi-set-dtcg)
(declare export-dtcg-json)
(deftype TokensLib [sets themes active-themes]
@@ -925,23 +922,23 @@ Will return a value that matches this schema:
:active-themes active-themes})
#?@(:clj
[c.json/JSONWriter
(-write [this writter options] (c.json/-write (export-dtcg-json this) writter options))])
[json/JSONWriter
(-write [this writter options] (json/-write (export-dtcg-json this) writter options))])
ITokenSets
;; Naming conventions:
;; (TODO: this will disappear after refactoring the internal structure of TokensLib).
;; Set name: the complete name as a string, without prefix \"some-group/some-subgroup/some-set\".
;; Set final name or fname: the last part of the name \"some-set\".
;; Set path: the groups part of the name, as a vector [\"some-group\" \"some-subgroup\"].
;; Set path str: the set path as a string \"some-group/some-subgroup\".
;; Set full path: the path including the fname, as a vector [\"some-group\", \"some-subgroup\", \"some-set\"].
;; Set full path str: the set full path as a string \"some-group/some-subgroup/some-set\".
;
;; Set prefix: the two-characters prefix added to a full path item \"G-\" / \"S-\".
;; Prefixed set path or ppath: a path wit added prefixes [\"G-some-group\", \"G-some-subgroup\"].
;; Prefixed set full path or pfpath: a full path wit prefixes [\"G-some-group\", \"G-some-subgroup\", \"S-some-set\"].
;; Prefixed set final name or pfname: a final name with prefix \"S-some-set\".
; Naming conventions:
; (TODO: this will disappear after refactoring the internal structure of TokensLib).
; Set name: the complete name as a string, without prefix \"some-group/some-subgroup/some-set\".
; Set final name or fname: the last part of the name \"some-set\".
; Set path: the groups part of the name, as a vector [\"some-group\" \"some-subgroup\"].
; Set path str: the set path as a string \"some-group/some-subgroup\".
; Set full path: the path including the fname, as a vector [\"some-group\", \"some-subgroup\", \"some-set\"].
; Set full path str: the set full path as a string \"some-group/some-subgroup/some-set\".
; Set prefix: the two-characters prefix added to a full path item \"G-\" / \"S-\".
; Prefixed set path or ppath: a path wit added prefixes [\"G-some-group\", \"G-some-subgroup\"].
; Prefixed set full path or pfpath: a full path wit prefixes [\"G-some-group\", \"G-some-subgroup\", \"S-some-set\"].
; Prefixed set final name or pfname: a final name with prefix \"S-some-set\".
(add-set [_ token-set]
(assert (token-set? token-set) "expected valid token-set")
(let [path (get-set-prefixed-path token-set)]
@@ -1209,7 +1206,7 @@ Will return a value that matches this schema:
(when-let [theme (get-theme this id)]
(contains? active-themes (get-theme-path theme))))
(toggle-theme-active [this id]
(toggle-theme-active? [this id]
(if (theme-active? this id)
(deactivate-theme this id)
(activate-theme this id)))
@@ -1273,10 +1270,6 @@ Will return a value that matches this schema:
(mapcat :sets)
(get-active-themes this)))
(token-set-active? [this set-name]
(let [set-names (get-active-themes-set-names this)]
(contains? set-names set-name)))
(sets-at-path-all-active? [this group-path]
(let [active-set-names (get-active-themes-set-names this)
prefixed-path-str (set-group-path->set-group-prefixed-path-str group-path)]
@@ -1410,12 +1403,8 @@ Will return a value that matches this schema:
;; NOTE: we can't assign statically at eval time the value of a
;; function that is declared but not defined; so we need to pass
;; an anonymous function and delegate the resolution to runtime
{:encode/json #(some-> % export-dtcg-json)
:decode/json #(some-> % read-multi-set-dtcg)
;; FIXME: add better, more reallistic generator
:gen/gen (->> (sg/small-int)
(sg/fmap (fn [_]
(make-tokens-lib))))}}))
{:encode/json #(export-dtcg-json %)
:decode/json #(parse-multi-set-dtcg-json %)}}))
(defn duplicate-set
"Make a new set with a unique name, copying data from the given set in the lib."
@@ -1459,23 +1448,18 @@ Will return a value that matches this schema:
["value" :map]
["type" :string]]]))
(def ^:private schema:dtcg-node
[:schema {:registry
{::simple-value
[:or :string :int :double]
::value
[:or
[:ref ::simple-value]
[:vector ::simple-value]
[:map-of :string [:or
[:ref ::simple-value]
[:vector ::simple-value]]]]}}
[:map
["$type" :string]
["$value" [:ref ::value]]]])
(def ^:private dtcg-node?
(sm/validator schema:dtcg-node))
(sm/validator
[:or
[:map
["$value" :string]
["$type" :string]]
[:map
["$value" [:sequential [:map ["$type" :string]]]]
["$type" :string]]
[:map
["$value" :map]
["$type" :string]]]))
(defn- get-json-format
"Searches through decoded token file and returns:
@@ -1662,43 +1646,6 @@ Will return a value that matches this schema:
(assert (= (get-json-format decoded-json-tokens) :json-format/legacy) "expected a legacy format for `decoded-json-tokens`")
(parse-single-set-dtcg-json set-name (legacy-json->dtcg-json decoded-json-tokens)))
(def ^:private schema:multi-set-dtcg
"Schema for penpot multi-set dtcg json decoded data/
Mainly used for validate the structure of the incoming data before
proceed to parse it to our internal data structures."
[:schema {:registry
{::node
[:or
[:map-of :string [:ref ::node]]
schema:dtcg-node]}}
[:map
["$themes" {:optional true}
[:vector
[:map {:title "Theme"}
["id" {:optional true} :string]
["name" :string]
["description" :string]
["isSource" {:optional true} :boolean]
["selectedTokenSets"
[:map-of :string [:enum "enabled" "disabled"]]]]]]
["$metadata" {:optional true}
[:map {:title "Metadata"}
["tokenSetOrder" {:optional true} [:vector :string]]
["activeThemes" {:optional true} [:vector :string]]
["activeSets" {:optional true} [:vector :string]]]]
[:malli.core/default
[:map-of :string [:ref ::node]]]]])
(def ^:private check-multi-set-dtcg-data
(sm/check-fn schema:multi-set-dtcg))
(def ^:private decode-multi-set-dtcg-data
(sm/decoder schema:multi-set-dtcg
sm/json-transformer))
;; FIXME: remove `-json` suffix
(defn parse-multi-set-dtcg-json
"Parse a decoded json file with multi sets in DTCG format into a TokensLib."
[decoded-json]
@@ -1738,10 +1685,10 @@ Will return a value that matches this schema:
(uuid/next))
:name (get theme "name")
:group (get theme "group")
:is-source (or (get theme "isSource")
;; NOTE: backward compatibility
(get theme "is-source"))
:is-source (get theme "is-source")
:external-id (get theme "id")
:modified-at (some-> (get theme "modified-at")
(ct/inst))
:sets (into #{}
(comp (map key)
xf-normalize-set-name
@@ -1789,25 +1736,6 @@ Will return a value that matches this schema:
library))
(defn read-multi-set-dtcg
"Read penpot multi-set dctg tokens. Accepts string or JSON decoded
data (without any case transformation). Used as schema decoder and
in the SDK."
[data]
(if (instance? TokensLib data)
data
(let [data (if (string? data)
(json/decode data :key-fn identity)
data)
data #?(:cljs (if (object? data)
(json/->clj data :key-fn identity)
data)
:clj data)
data (decode-multi-set-dtcg-data data)]
(-> (check-multi-set-dtcg-data data)
(parse-multi-set-dtcg-json)))))
(defn- parse-multi-set-legacy-json
"Parse a decoded json file with multi sets in legacy format into a TokensLib."
[decoded-json]
@@ -1820,7 +1748,6 @@ Will return a value that matches this schema:
(parse-multi-set-dtcg-json (merge other-data
dtcg-sets-data))))
;; FIXME: remove `-json` suffix
(defn parse-decoded-json
"Guess the format and content type of the decoded json file and parse it into a TokensLib.
The `file-name` is used to determine the set name when the json file contains a single set."
@@ -1890,15 +1817,15 @@ Will return a value that matches this schema:
(filter #(and (instance? TokenTheme %)
(not (hidden-theme? %))))
(map (fn [token-theme]
;; NOTE: this probaly can be implemented as type method
(d/without-nils
{"id" (:external-id token-theme)
"name" (:name token-theme)
"group" (:group token-theme)
"description" (:description token-theme)
"isSource" (:is-source token-theme)
"selectedTokenSets" (reduce #(assoc %1 %2 "enabled") {} (:sets token-theme))}))))
(let [theme-map (->> token-theme
(into {})
walk/stringify-keys)]
(-> theme-map
(set/rename-keys {"sets" "selectedTokenSets"
"external-id" "id"})
(update "selectedTokenSets" (fn [sets]
(->> (for [s sets] [s "enabled"])
(into {})))))))))
themes
(->> (get-theme-tree tokens-lib)
(tree-seq d/ordered-map? vals)
@@ -1908,34 +1835,29 @@ Will return a value that matches this schema:
active-themes
(-> (get-active-theme-paths tokens-lib)
(disj hidden-theme-path))]
[themes active-themes]))
{:themes themes
:active-themes active-themes}))
(defn export-dtcg-multi-file
"Convert a TokensLib into a plain clojure map, suitable to be encoded as a multi json files each encoded in DTCG format."
[tokens-lib]
(let [[themes active-themes]
(dtcg-export-themes tokens-lib)
sets
(->> (get-sets tokens-lib)
(map (fn [token-set]
(let [name (get-name token-set)
tokens (get-tokens- token-set)]
[(str name ".json") (tokens-tree tokens :update-token-fn token->dtcg-token)])))
(into {}))]
(let [{:keys [themes active-themes]} (dtcg-export-themes tokens-lib)
sets (->> (get-sets tokens-lib)
(map (fn [token-set]
(let [name (get-name token-set)
tokens (get-tokens- token-set)]
[(str name ".json") (tokens-tree tokens :update-token-fn token->dtcg-token)])))
(into {}))]
(-> sets
(assoc "$themes.json" themes)
(assoc "$metadata.json"
{"tokenSetOrder" (get-set-names tokens-lib)
"activeThemes" active-themes
"activeSets" (get-active-themes-set-names tokens-lib)}))))
(assoc "$metadata.json" {"tokenSetOrder" (get-set-names tokens-lib)
"activeThemes" active-themes
"activeSets" (get-active-themes-set-names tokens-lib)}))))
(defn export-dtcg-json
"Convert a TokensLib into a plain clojure map, suitable to be encoded as a multi sets json string in DTCG format."
[tokens-lib]
(let [[themes active-themes]
(dtcg-export-themes tokens-lib)
(let [{:keys [themes active-themes]} (dtcg-export-themes tokens-lib)
name-set-tuples
(->> (get-set-tree tokens-lib)

View File

@@ -311,22 +311,16 @@
[variant]
(cpn/merge-path-item (:name variant) (str/replace (:variant-name variant) #", " " / ")))
(def ^:private boolean-pairs
[["on" "off"]
["yes" "no"]
["true" "false"]])
(defn find-boolean-pair
"Given a vector, return a map that contains the boolean equivalency if the values match
with any of the boolean pairs. Returns nil if none match."
[[a b :as v]]
(let [a' (-> a str/trim str/lower)
b' (-> b str/trim str/lower)]
"Given a vector, return the map from 'bool-values' that contains both as keys.
Returns nil if none match."
[v]
(let [bool-values [{"on" true "off" false}
{"yes" true "no" false}
{"true" true "false" false}]]
(when (= (count v) 2)
(some (fn [[t f]]
(cond (and (= a' t)
(= b' f)) {a true b false}
(and (= b' t)
(= a' f)) {b true a false}
:else nil))
boolean-pairs))))
(some (fn [b]
(when (and (contains? b (first v))
(contains? b (last v)))
b))
bool-values))))

View File

@@ -1440,7 +1440,8 @@
result (ctob/export-dtcg-json tokens-lib)
expected {"$themes" [{"description" ""
"group" "group-1"
"isSource" false
"is-source" false
"modified-at" now
"id" "test-id-00"
"name" "theme-1"
"selectedTokenSets" {"core" "enabled"}}]
@@ -1557,11 +1558,12 @@
:external-id "test-id-01"
:modified-at now
:sets #{"core"}))
(ctob/toggle-theme-active (thi/id :theme-1)))
(ctob/toggle-theme-active? (thi/id :theme-1)))
result (ctob/export-dtcg-json tokens-lib)
expected {"$themes" [{"description" ""
"group" "group-1"
"isSource" false
"is-source" false
"modified-at" now
"id" "test-id-01"
"name" "theme-1"
"selectedTokenSets" {"core" "enabled"}}]
@@ -1610,11 +1612,12 @@
:external-id "test-id-01"
:modified-at now
:sets #{"some/set"}))
(ctob/toggle-theme-active (thi/id :theme-1)))
(ctob/toggle-theme-active? (thi/id :theme-1)))
result (ctob/export-dtcg-multi-file tokens-lib)
expected {"$themes.json" [{"description" ""
"group" "group-1"
"isSource" false
"is-source" false
"modified-at" now
"id" "test-id-01"
"name" "theme-1"
"selectedTokenSets" {"some/set" "enabled"}}]

View File

@@ -163,11 +163,9 @@
(t/deftest find-boolean-pair
(t/testing "find-boolean-pair"
(t/is (= (ctv/find-boolean-pair ["off" "on"]) {"on" true "off" false}))
(t/is (= (ctv/find-boolean-pair ["OfF" "oN"]) {"oN" true "OfF" false}))
(t/is (= (ctv/find-boolean-pair [" ofF" "oN "]) {"oN " true " ofF" false}))
(t/is (= (ctv/find-boolean-pair ["on" "off"]) {"on" true "off" false}))
(t/is (= (ctv/find-boolean-pair ["yes" "no"]) {"yes" true "no" false}))
(t/is (= (ctv/find-boolean-pair ["false" "true"]) {"true" true "false" false}))
(t/is (= (ctv/find-boolean-pair ["off" "on"]) {"on" true "off" false}))
(t/is (= (ctv/find-boolean-pair ["on" "off"]) {"on" true "off" false}))
(t/is (= (ctv/find-boolean-pair ["off" "on" "other"]) nil))
(t/is (= (ctv/find-boolean-pair ["hello" "bye"]) nil))))
(t/is (= (ctv/find-boolean-pair ["yes" "no"]) {"yes" true "no" false}))
(t/is (= (ctv/find-boolean-pair ["false" "true"]) {"true" true "false" false}))
(t/is (= (ctv/find-boolean-pair ["hello" "bye"]) nil))))

View File

@@ -101,45 +101,13 @@ RUN set -eux; \
corepack enable; \
rm -rf /tmp/nodejs.tar.gz;
################################################################################
## CADDYSERVER SETUP
################################################################################
FROM base AS setup-caddy
ENV CADDY_VERSION=2.10.2
RUN set -eux; \
ARCH="$(dpkg --print-architecture)"; \
case "${ARCH}" in \
aarch64|arm64) \
BINARY_URL="https://github.com/caddyserver/caddy/releases/download/v${CADDY_VERSION}/caddy_${CADDY_VERSION}_linux_arm64.tar.gz"; \
;; \
amd64|x86_64) \
BINARY_URL="https://github.com/caddyserver/caddy/releases/download/v${CADDY_VERSION}/caddy_${CADDY_VERSION}_linux_amd64.tar.gz"; \
;; \
*) \
echo "Unsupported arch: ${ARCH}"; \
exit 1; \
;; \
esac; \
curl -LfsSo /tmp/caddy.tar.gz ${BINARY_URL}; \
mkdir -p /tmp/caddy; \
cd /tmp/caddy; \
tar -xf /tmp/caddy.tar.gz; \
chown -R root /tmp/caddy; \
mv /tmp/caddy/caddy /usr/bin/; \
rm -rf /tmp/caddy.tar.gz; \
rm -rf /tmp/caddy;
################################################################################
## JVM SETUP
################################################################################
FROM base AS setup-jvm
ENV CLOJURE_VERSION=1.12.3.1577
ENV CLOJURE_VERSION=1.12.2.1565
RUN set -eux; \
ARCH="$(dpkg --print-architecture)"; \
@@ -425,7 +393,6 @@ COPY --from=setup-utils /opt/utils /opt/utils
COPY --from=setup-rust /opt/cargo /opt/cargo
COPY --from=setup-rust /opt/rustup /opt/rustup
COPY --from=setup-rust /opt/emsdk /opt/emsdk
COPY --from=setup-caddy /usr/bin/caddy /usr/bin/caddy
COPY files/nginx.conf /etc/nginx/nginx.conf
COPY files/nginx-mime.types /etc/nginx/mime.types
@@ -436,9 +403,6 @@ COPY files/vimrc /root/.vimrc
COPY files/tmux.conf /root/.tmux.conf
COPY files/sudoers /etc/sudoers
COPY files/Caddyfile /home/
COPY files/selfsigned.crt /home/
COPY files/selfsigned.key /home/
COPY files/start-tmux.sh /home/start-tmux.sh
COPY files/start-tmux-back.sh /home/start-tmux-back.sh
COPY files/entrypoint.sh /home/entrypoint.sh

View File

@@ -33,8 +33,6 @@ services:
- 3447:3447
- 3448:3448
- 3449:3449
- 3449:3449/udp
- 3450:3450
- 6006:6006
- 6060:6060
- 6061:6061
@@ -84,11 +82,6 @@ services:
- 9000:9000
- 9001:9001
networks:
default:
aliases:
- minio
postgres:
image: postgres:16.8
command: postgres -c config_file=/etc/postgresql.conf
@@ -117,11 +110,6 @@ services:
volumes:
- "valkey_data:/data"
networks:
default:
aliases:
- redis
mailer:
image: sj26/mailcatcher:latest
restart: always
@@ -130,12 +118,6 @@ services:
ports:
- "1080:1080"
networks:
default:
aliases:
- mailer
# https://github.com/rroemhild/docker-test-openldap
ldap:
image: rroemhild/test-openldap:2.1
@@ -149,9 +131,3 @@ services:
nofile:
soft: 1024
hard: 1024
networks:
default:
aliases:
- ldap

View File

@@ -1,12 +0,0 @@
{
auto_https off
}
localhost:3449 {
reverse_proxy localhost:4449
tls /home/selfsigned.crt /home/selfsigned.key
}
http://localhost:3450 {
reverse_proxy localhost:4449
}

View File

@@ -2,5 +2,4 @@
set -e
nginx
caddy start -c /home/Caddyfile
tail -f /dev/null;
tail -f /dev/null

View File

@@ -12,7 +12,7 @@ http {
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 100;
keepalive_timeout 65;
types_hash_max_size 2048;
server_tokens off;
@@ -55,7 +55,7 @@ http {
proxy_cache_key "$host$request_uri";
server {
listen 4449 default_server;
listen 3449 default_server;
server_name _;
client_max_body_size 300M;
@@ -141,10 +141,6 @@ http {
proxy_pass http://127.0.0.1:5000;
}
location /nitrate/ {
proxy_pass http://127.0.0.1:3000/;
}
location /playground {
alias /home/penpot/penpot/experiments/;
add_header Cache-Control "no-cache, max-age=0";
@@ -223,19 +219,26 @@ http {
add_header X-Cache-Status $upstream_cache_status;
}
location ~* \.(jpg|png|svg|ttf|woff|woff2)$ {
add_header Cache-Control "public, max-age=604800" always; # 7 days
location ~ ^/js/config.js$ {
add_header Cache-Control "no-store, no-cache, max-age=0" always;
}
location ~* \.(js|css|wasm)$ {
add_header Cache-Control "no-store" always;
location ~* \.(js|css|jpg|svg|png|mjs|map)$ {
# We set no cache only on devenv
add_header Cache-Control "no-store, no-cache, max-age=0" always;
# add_header Cache-Control "max-age=604800" always; # 7 days
}
location ~ ^/(/|css|fonts|images|js|wasm|mjs|map) {
}
location ~ ^/[^/]+/(.*)$ {
return 301 " /404";
}
add_header Cache-Control "no-store" always;
add_header Last-Modified $date_gmt;
add_header Cache-Control "no-store, no-cache, max-age=0" always;
if_modified_since off;
try_files $uri /index.html$is_args$args /index.html =404;
}
}

Some files were not shown because too many files have changed in this diff Show More