Compare commits

..

42 Commits

Author SHA1 Message Date
Belén Albeza
32fe91398a further adjustments to record video 2024-10-23 13:07:29 +02:00
Belén Albeza
b36c8cd52a modify stress test (rs) to record video 2024-10-22 17:25:55 +02:00
Belén Albeza
f5acfd0787 stashed 2024-10-22 17:25:55 +02:00
AzazelN28
4939bc06ac wip: 20_000 test 2024-10-11 12:27:14 +02:00
Belén Albeza
cd63fb78d2 render 20k rects (rust) 2024-10-11 12:15:25 +02:00
Belén Albeza
3298785436 rust benchmark wip 2024-10-11 11:44:22 +02:00
Alejandro Alonso
eeb0d21013 Playing with pdf render 2024-10-11 09:07:41 +02:00
Alejandro Alonso
a11c2af542 Playing with image export and svg generation 2024-10-10 14:23:32 +02:00
Alejandro Alonso
6d5b0204e9 Playing with image export and svg generation 2024-10-10 14:12:38 +02:00
Alejandro Alonso
dfe5d861f2 Playing with image export and svg generation 2024-10-10 14:06:16 +02:00
Alejandro Alonso
445691430b Refactoring texts 2024-10-10 09:36:56 +02:00
Alejandro Alonso
88722bcf4f Refactoring texts 2024-10-10 09:35:50 +02:00
Alejandro Alonso
43903014c6 Path test 2024-10-10 09:09:53 +02:00
AzazelN28
cf8b62f1a8 wip: renderer redone completely 2024-10-09 15:18:36 +02:00
Alejandro Alonso
39b627cb1a Rendering some texts 2024-10-09 13:28:19 +02:00
AzazelN28
81680cffe9 wip: draw rect 2024-10-08 15:49:11 +02:00
Alejandro Alonso
dc014bd4eb Draw colors from memory too 2024-10-08 13:22:11 +02:00
Alejandro Alonso
0027e77861 Draw shapes from memory 2024-10-08 11:32:25 +02:00
Belén Albeza
fa9004d12c Pass struct to wasm (rust) 2024-10-04 12:50:16 +02:00
AzazelN28
c7f801dd44 wip: add build script for rust module 2024-10-03 16:05:08 +02:00
Alejandro Alonso
0f0b23e38b WIP: improve flush 2024-10-03 11:45:29 +02:00
Alejandro Alonso
1f8fe2dc4c WIP: basic color support 2024-10-03 08:00:32 +02:00
Belén Albeza
e84622061d Fix wrong order for args for draw_react (rust) 2024-10-02 17:04:29 +02:00
Belén Albeza
305de33200 fix zoom drawing glitch (rust) 2024-10-02 16:10:56 +02:00
Belén Albeza
80bbfe7a6f draw shapes with zoom (rust) 2024-10-02 15:40:38 +02:00
Belén Albeza
26ab39a45d re-render canvas when panning (rust) 2024-10-02 15:25:01 +02:00
Belén Albeza
739b8d7c02 fix vbox being nil when calling translate 2024-10-02 14:50:38 +02:00
Belén Albeza
e0a9f63015 add gitignore for rust project 2024-10-02 14:49:35 +02:00
Alejandro Alonso
928709a0f2 WIP: exposing translate 2024-10-02 14:16:51 +02:00
Alejandro Alonso
579b157ab7 WIP: exposing translate 2024-10-02 14:11:19 +02:00
Alejandro Alonso
0bf442e626 WIP Fix typo render-v2 2024-10-02 12:38:07 +02:00
Alejandro Alonso
2184af6602 WIP refactor rerender render-v2 2024-10-02 12:35:05 +02:00
AzazelN28
78fb938d16 wip: fix wrong namespace 2024-10-02 12:06:19 +02:00
Alejandro Alonso
dd9185e058 WIP refactor rerender render-v2 2024-10-02 12:01:49 +02:00
Alejandro Alonso
5f8d56b366 WIP: proper initialization 2024-10-02 11:17:36 +02:00
AzazelN28
bc0fde68c7 wip: add common namespace and fix cpp errors 2024-10-02 10:53:34 +02:00
AzazelN28
024a2ae848 wip: fix build script 2024-10-02 10:06:33 +02:00
AzazelN28
4d56bf66f4 wip: fix README and build script 2024-10-02 09:46:11 +02:00
Belén Albeza
c83ef201a1 wip: target emscripten for rust poc 2024-10-02 07:43:07 +02:00
AzazelN28
6d26abb9e3 wip: fix wrong call to renderer instead of renderer-cpp 2024-10-01 15:59:35 +02:00
AzazelN28
1b1f08388f wip: fix renderer-cpp config flag 2024-10-01 15:44:00 +02:00
AzazelN28
472c769c9a wip: c++ initial version 2024-10-01 15:35:41 +02:00
2818 changed files with 211613 additions and 636557 deletions

131
.circleci/config.yml Normal file
View File

@@ -0,0 +1,131 @@
version: 2
jobs:
build:
docker:
- image: penpotapp/devenv:latest
- image: cimg/postgres:14.5
environment:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
- image: cimg/redis:7.0.5
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "backend/deps.edn" }}-{{ checksum "frontend/deps.edn"}}-{{ checksum "common/deps.edn"}}
# fallback to using the latest cache if no exact match is found
- v1-dependencies-
- run: cd .clj-kondo && cat config.edn
- run: cat .cljfmt.edn
- run: clj-kondo --version
- run:
name: "backend fmt check"
working_directory: "./backend"
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "exporter fmt check"
working_directory: "./exporter"
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "common fmt check"
working_directory: "./common"
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "frontend fmt check"
working_directory: "./frontend"
command: |
yarn install
yarn run fmt:clj:check
yarn run fmt:js:check
- run:
name: "common linter check"
working_directory: "./common"
command: |
yarn install
yarn run lint:clj
- run:
name: "frontend linter check"
working_directory: "./frontend"
command: |
yarn install
yarn run lint:scss
yarn run lint:clj
- run:
name: "backend linter check"
working_directory: "./backend"
command: |
yarn install
yarn run lint:clj
- run:
name: "exporter linter check"
working_directory: "./exporter"
command: |
yarn install
yarn run lint:clj
- run:
name: "common tests"
working_directory: "./common"
command: |
yarn test
clojure -M:dev:test
- run:
name: "frontend tests"
working_directory: "./frontend"
command: |
yarn install
yarn test
- run:
name: "frontend integration tests"
working_directory: "./frontend"
command: |
yarn install
yarn run build:app:assets
clojure -M:dev:shadow-cljs release main
yarn playwright install --with-deps chromium
yarn e2e:test
- run:
name: "backend tests"
working_directory: "./backend"
command: |
clojure -M:dev:test
environment:
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
- save_cache:
paths:
- ~/.m2
key: v1-dependencies-{{ checksum "backend/deps.edn" }}-{{ checksum "frontend/deps.edn"}}-{{ checksum "common/deps.edn"}}

View File

@@ -45,16 +45,10 @@
:potok/reify-type
{:level :error}
:missing-protocol-method
{:level :off}
:unresolved-namespace
{:level :warning
:exclude [data_readers]}
:unused-value
{:level :off}
:single-key-in
{:level :warning}
@@ -64,15 +58,6 @@
:redundant-do
{:level :off}
:redundant-ignore
{:level :off}
:redundant-nested-call
{:level :off}
:redundant-str-call
{:level :off}
:earmuffed-var-not-dynamic
{:level :off}

View File

@@ -4,6 +4,7 @@
:remove-consecutive-blank-lines? false
:extra-indents {rumext.v2/fnc [[:inner 0]]
cljs.test/async [[:inner 0]]
app.common.schema/register! [[:inner 0] [:inner 1]]
promesa.exec/thread [[:inner 0]]
specify! [[:inner 0] [:inner 1]]}
}

View File

@@ -11,9 +11,3 @@ end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.{rs}]
indent_size = 4
indent_style = space
end_of_line = lf

View File

@@ -1,20 +1,29 @@
### Related Ticket
<!--
<!-- Reference the related GitHub/Taiga ticket. -->
Some key notes before you open a PR:
### Summary
1. Select which branch should this PR be merged in? By default, you should always merge to the develop branch.
2. PR name follows [convention](http://karma-runner.github.io/4.0/dev/git-commit-msg.html)
3. All tests pass locally, UI and Unit tests
4. All business logic and validations must be on the server-side
5. Update necessary Documentation
6. Put `closes #XXXX` in your comment to auto-close the issue that your PR fixes
### Steps to reproduce
### Checklist
Also, if you're new here
- [ ] Choose the correct target branch; use `develop` by default.
- [ ] Provide a brief summary of the changes introduced.
- [ ] Add a detailed explanation of how to reproduce the issue and/or verify the fix, if applicable.
- [ ] Include screenshots or videos, if applicable.
- [ ] Add or modify existing integration tests in case of bugs or new features, if applicable.
- [ ] Refactor any modified SCSS files following the refactor guide.
- [ ] Check CI passes successfully.
- [ ] Update the `CHANGES.md` file, referencing the related GitHub issue, if applicable.
- Contribution Guide => https://github.com/uxbox/uxbox/blob/develop/CONTRIBUTING.md
<!-- For more details, check the contribution guidelines: https://github.com/penpot/penpot/blob/develop/CONTRIBUTING.md -->
-->
> Please provide enough information so that others can review your pull request:
<!-- You can skip this if you're fixing a typo or updating existing documentation -->
> Explain the **details** for making this change. What existing problem does the pull request solve?
<!-- Example: When "Adding a function to do X", explain why it is necessary to have a way to do X. -->
> Screenshots/GIFs
<!-- Add images/recordings to better visualize the change: expected/current behviour -->

View File

@@ -1,93 +0,0 @@
name: Bundles Builder
on:
# Create bundle from manual action
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
build_wasm:
description: 'BUILD_WASM. Valid values: yes, no'
type: string
required: false
default: 'yes'
build_storybook:
description: 'BUILD_STORYBOOK. Valid values: yes, no'
type: string
required: false
default: 'yes'
workflow_call:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
build_wasm:
description: 'BUILD_WASM. Valid values: yes, no'
type: string
required: false
default: 'yes'
build_storybook:
description: 'BUILD_STORYBOOK. Valid values: yes, no'
type: string
required: false
default: 'yes'
jobs:
build-bundle:
name: Build and Upload Penpot Bundle
runs-on: ubuntu-24.04
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.gh_ref }}
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
echo "bundle_version=$(git describe --tags --always)" >> $GITHUB_OUTPUT
- name: Build bundle
env:
BUILD_WASM: ${{ inputs.build_wasm }}
BUILD_STORYBOOK: ${{ inputs.build_storybook }}
run: ./manage.sh build-bundle
- name: Prepare directories for zipping
run: |
mkdir zips
mv bundles penpot
- name: Create zip bundle
run: |
echo "📦 Packaging Penpot bundle..."
zip -r zips/penpot.zip penpot
- name: Upload Penpot bundle to S3
run: |
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_ref }}.zip --metadata bundle-version=${{ steps.vars.outputs.bundle_version }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 📦 *[PENPOT] Error building penpot bundles.*
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
Bundle version: `${{ steps.vars.outputs.bundle_version }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,21 +0,0 @@
name: _DEVELOP
on:
schedule:
- cron: '16 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "develop"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: "develop"

View File

@@ -1,36 +0,0 @@
name: DevEnv Docker Image Builder
on:
workflow_dispatch:
jobs:
build-and-push:
name: Build and push DevEnv Docker image
environment: release-admins
runs-on: ubuntu-24.04
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.PUB_DOCKER_USERNAME }}
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
- name: Build and push DevEnv Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'penpotapp/devenv'
with:
context: ./docker/devenv/
file: ./docker/devenv/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ env.DOCKER_IMAGE }}:latest
cache-from: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache,mode=max

View File

@@ -1,152 +0,0 @@
name: Docker Images Builder
on:
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
workflow_call:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
jobs:
build-and-push:
name: Build and Push Penpot Docker Images
runs-on: ubuntu-24.04-arm
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.gh_ref }}
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Download Penpot Bundles
id: bundles
env:
FILE_NAME: penpot-${{ steps.vars.outputs.gh_ref }}.zip
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
run: |
tmp=$(aws s3api head-object \
--bucket ${{ secrets.S3_BUCKET }} \
--key "$FILE_NAME" \
--query 'Metadata."bundle-version"' \
--output text)
echo "bundle_version=$tmp" >> $GITHUB_OUTPUT
pushd docker/images
aws s3 cp s3://${{ secrets.S3_BUCKET }}/$FILE_NAME .
unzip $FILE_NAME > /dev/null
mv penpot/backend bundle-backend
mv penpot/frontend bundle-frontend
mv penpot/exporter bundle-exporter
mv penpot/storybook bundle-storybook
popd
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Registry
uses: docker/login-action@v3
with:
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v5
with:
images:
frontend
backend
exporter
storybook
labels: |
bundle_version=${{ steps.bundles.outputs.bundle_version }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'backend'
BUNDLE_PATH: './bundle-backend'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.backend
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Build and push Frontend Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'frontend'
BUNDLE_PATH: './bundle-frontend'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.frontend
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Build and push Exporter Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'exporter'
BUNDLE_PATH: './bundle-exporter'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.exporter
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Build and push Storybook Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'storybook'
BUNDLE_PATH: './bundle-storybook'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.storybook
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🐳 *[PENPOT] Error building penpot docker images.*
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
📦 Bundle: `${{ steps.bundles.outputs.bundle_version }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,21 +0,0 @@
name: _NITRATE MODULE
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "nitrate-module"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: "nitrate-module"

View File

@@ -1,21 +0,0 @@
name: _STAGING
on:
schedule:
- cron: '36 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "staging"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: "staging"

View File

@@ -1,46 +0,0 @@
name: _TAG
on:
push:
tags:
- '*'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}
notify:
name: Notifications
runs-on: ubuntu-24.04
needs: build-docker
steps:
- name: Notify Mattermost
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
🐳 *[PENPOT] Docker image available.*
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra
publish-final-tag:
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
needs: build-docker
uses: ./.github/workflows/release.yml
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}

View File

@@ -1,50 +0,0 @@
name: 'Commit Message Check'
on:
pull_request:
types:
- opened
- edited
- reopened
- synchronize
pull_request_target:
types:
- opened
- edited
- reopened
- synchronize
push:
branches:
- main
- develop
- staging
jobs:
check-commit-message:
name: Check Commit Message
runs-on: ubuntu-latest
steps:
- name: Check Commit Type
uses: gsactions/commit-message-checker@v2
with:
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert).+[^.])$'
flags: 'gm'
error: 'Commit should match CONTRIBUTING.md guideline'
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
accessToken: ${{ secrets.GITHUB_TOKEN }} # github access token is only required if checkAllCommitMessages is true
# - name: Check Line Length
# uses: gsactions/commit-message-checker@v2
# with:
# pattern: '^[^#].{74}'
# error: 'The maximum line length of 74 characters is exceeded.'
# excludeDescription: 'true' # optional: this excludes the description body of a pull request
# excludeTitle: 'true' # optional: this excludes the title of a pull request
# checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
# accessToken: ${{ secrets.GITHUB_TOKEN }} # github access token is only required if checkAllCommitMessages is ue
# - name: Check for Resolves / Fixes
# uses: gsactions/commit-message-checker@v2
# with:
# pattern: '^.+(Resolves|Fixes): \#[0-9]+$'
# error: 'You need at least one "Resolves|Fixes: #<issue number>" line.'

View File

@@ -1,114 +0,0 @@
name: Release Publisher
on:
workflow_dispatch:
inputs:
gh_ref:
description: 'Tag to release'
type: string
required: true
workflow_call:
inputs:
gh_ref:
description: 'Tag to release'
type: string
required: true
permissions:
contents: write
jobs:
release:
environment: release-admins
runs-on: ubuntu-24.04
outputs:
version: ${{ steps.vars.outputs.gh_ref }}
release_notes: ${{ steps.extract_release_notes.outputs.release_notes }}
steps:
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ steps.vars.outputs.gh_ref }}
# --- Publicly release the docker images ---
- name: Configure ECR credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.DOCKER_USERNAME }}
aws-secret-access-key: ${{ secrets.DOCKER_PASSWORD }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Install Skopeo
run: |
sudo apt-get update -y
sudo apt-get install -y skopeo
- name: Copy images from AWS ECR to Docker Hub
env:
AWS_REGION: ${{ secrets.AWS_REGION }}
DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
PUB_DOCKER_USERNAME: ${{ secrets.PUB_DOCKER_USERNAME }}
PUB_DOCKER_PASSWORD: ${{ secrets.PUB_DOCKER_PASSWORD }}
TAG: ${{ steps.vars.outputs.gh_ref }}
run: |
aws ecr get-login-password --region $AWS_REGION | \
skopeo login --username AWS --password-stdin \
$DOCKER_REGISTRY
echo "$PUB_DOCKER_PASSWORD" | skopeo login --username "$PUB_DOCKER_USERNAME" --password-stdin docker.io
IMAGES=("frontend" "backend" "exporter" "storybook")
for image in "${IMAGES[@]}"; do
skopeo copy --all \
docker://$DOCKER_REGISTRY/$image:$TAG \
docker://docker.io/penpotapp/$image:$TAG
for alias in main latest; do
skopeo copy --all \
docker://$DOCKER_REGISTRY/$image:$TAG \
docker://docker.io/penpotapp/$image:$alias
done
done
# --- Release notes extraction ---
- name: Extract release notes from CHANGES.md
id: extract_release_notes
env:
TAG: ${{ steps.vars.outputs.gh_ref }}
run: |
RELEASE_NOTES=$(awk "/^## $TAG$/{flag=1; next} /^## /{flag=0} flag" CHANGES.md | awk '{$1=$1};1')
if [ -z "$RELEASE_NOTES" ]; then
RELEASE_NOTES="No changes for $TAG according to CHANGES.md"
fi
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# --- Create GitHub release ---
- name: Create GitHub release
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ steps.vars.outputs.gh_ref }}
name: ${{ steps.vars.outputs.gh_ref }}
body: ${{ steps.extract_release_notes.outputs.release_notes }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🚀 *[PENPOT] Error releasing penpot.*
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,289 +0,0 @@
name: "CI"
defaults:
run:
shell: bash
on:
pull_request:
types:
- opened
- synchronize
push:
branches:
- develop
- staging
concurrency:
group: ${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
lint:
name: "Linter"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Check clojure code format
run: |
./scripts/lint
test-common:
name: "Common Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests on JVM
working-directory: ./common
run: |
clojure -M:dev:test
- name: Run tests on NODE
working-directory: ./common
run: |
./scripts/test
test-frontend:
name: "Frontend Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Unit Tests
working-directory: ./frontend
run: |
./scripts/test
- name: Component Tests
working-directory: ./frontend
run: |
./scripts/test-components
test-render-wasm:
name: "Render WASM Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Format
working-directory: ./render-wasm
run: |
cargo fmt --check
- name: Lint
working-directory: ./render-wasm
run: |
./lint
- name: Test
working-directory: ./render-wasm
run: |
./test
test-backend:
name: "Backend Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
services:
postgres:
image: postgres:17
# Provide the password for postgres
env:
POSTGRES_USER: penpot_test
POSTGRES_PASSWORD: penpot_test
POSTGRES_DB: penpot_test
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis:
image: valkey/valkey:9
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests
working-directory: ./backend
env:
PENPOT_TEST_DATABASE_URI: "postgresql://postgres/penpot_test"
PENPOT_TEST_DATABASE_USERNAME: penpot_test
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
PENPOT_TEST_REDIS_URI: "redis://redis/1"
run: |
clojure -M:dev:test --reporter kaocha.report/documentation
test-library:
name: "Library Tests"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run tests
working-directory: ./library
run: |
./scripts/test
build-integration:
name: "Build Integration Bundle"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Build Bundle
working-directory: ./frontend
run: |
./scripts/build 0.0.0
- name: Store Bundle Cache
uses: actions/cache@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
test-integration-1:
name: "Integration Tests 1/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="1/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-1
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-2:
name: "Integration Tests 2/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="2/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-2
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-3:
name: "Integration Tests 3/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="3/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-3
path: frontend/test-results/
overwrite: true
retention-days: 3
test-integration-4:
name: "Integration Tests 4/4"
runs-on: ubuntu-24.04
container: penpotapp/devenv:latest
needs: build-integration
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Restore Cache
uses: actions/cache/restore@v4
with:
key: "integration-bundle-${{ github.sha }}"
path: frontend/resources/public
- name: Run Tests
working-directory: ./frontend
run: |
./scripts/test-e2e --shard="4/4";
- name: Upload test result
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-tests-result-4
path: frontend/test-results/
overwrite: true
retention-days: 3

13
.gitignore vendored
View File

@@ -30,8 +30,6 @@
/*.zip
/.clj-kondo/.cache
/_dump
/notes
/playground/
/backend/*.md
/backend/*.sql
/backend/*.txt
@@ -42,7 +40,6 @@
/backend/resources/public/assets
/backend/resources/public/media
/backend/target/
/backend/experiments
/bundle*
/cd.md
/clj-profiler/
@@ -53,6 +50,9 @@
/exporter/target
/frontend/.storybook/preview-body.html
/frontend/.storybook/preview-head.html
/frontend/cypress/fixtures/validuser.json
/frontend/cypress/videos/*/
/frontend/cypress/videos/*/
/frontend/dist/
/frontend/npm-debug.log
/frontend/out/
@@ -68,16 +68,9 @@
/vendor/**/target
/vendor/svgclean/bundle*.js
/web
/library/target/
/library/*.zip
/external
clj-profiler/
node_modules
/test-results/
/playwright-report/
/blob-report/
/playwright/.cache/
/render-wasm/target/
/**/.yarn/*
/.pnpm-store

1
.nvmrc
View File

@@ -1 +0,0 @@
v22.19.0

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +0,0 @@
# Penpot's Code of Conduct
Check it at: https://help.penpot.app/contributing-guide/coc/

View File

@@ -1,59 +1,62 @@
# Contributing Guide #
Thank you for your interest in contributing to Penpot. This is a
generic guide that details how to contribute to the project in a way that
is efficient for everyone. If you are looking for specific documentation on
different parts of the platform, please refer to the `docs/` directory,
or the rendered version at the [Help Center](https://help.penpot.app/).
generic guide that details how to contribute to Penpot in a way that
is efficient for everyone. If you want a specific documentation for
different parts of the platform, please refer to `docs/` directory.
## Reporting Bugs ##
We are using [GitHub Issues](https://github.com/penpot/penpot/issues)
for our public bugs. We keep a close eye on them and try to make it
for our public bugs. We keep a close eye on this and try to make it
clear when we have an internal fix in progress. Before filing a new
task, try to make sure your problem doesn't already exist.
If you found a bug, please report it, as far as possible, with:
If you found a bug, please report it, as far as possible with:
- a detailed explanation of steps to reproduce the error
- the browser and browser version used
- a dev tools console exception stack trace (if available)
- a browser and the browser version used
- a dev tools console exception stack trace (if it is available)
If you found a bug which you think is better to discuss in private (for
example, security bugs), consider first sending an email to
If you found a bug that you consider better discuss in private (for
example: security bugs), consider first send an email to
`support@penpot.app`.
**We don't have a formal bug bounty program for security reports; this
is an open source application, and your contribution will be recognized
**We don't have formal bug bounty program for security reports; this
is an open source application and your contribution will be recognized
in the changelog.**
## Pull Requests ##
## Pull requests ##
If you want to propose a change or bug fix via a pull request (PR),
you should first carefully read the section **Developer's Certificate of
Origin**. You must also format your code and commits according to the
instructions below.
If you want propose a change or bug fix with the Pull-Request system
firstly you should carefully read the **DCO** section and format your
commits accordingly.
If you intend to fix a bug, it's fine to submit a pull request right
away, but we still recommend filing an issue detailing what you're
If you intend to fix a bug it's fine to submit a pull request right
away but we still recommend to file an issue detailing what you're
fixing. This is helpful in case we don't accept that specific fix but
want to keep track of the issue.
If you want to implement or start working on a new feature, please
open a **question*- / **discussion*- issue for it. No PR
will be accepted without a prior discussion about the changes,
whether it is a new feature, an already planned one, or a quick win.
If you want to implement or start working in a new feature, please
open a **question** / **discussion** issue for it. No pull-request
will be accepted without previous chat about the changes,
independently if it is a new feature, already planned feature or small
quick win.
If it is your first PR, you can learn how to proceed from
[this free video
series](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github)
If is going to be your first pull request, You can learn how from this
free video series:
https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github
We will use the `easy fix` mark for tag for indicate issues that are
easy for beginners.
We use the `easy fix` tag to indicate issues that are appropriate for beginners.
## Commit Guidelines ##
We have very precise rules on how our git commit messages must be formatted.
We have very precise rules over how our git commit messages can be formatted.
The commit message format is:
@@ -68,37 +71,33 @@ The commit message format is:
Where type is:
- :bug: `:bug:` a commit that fixes a bug
- :sparkles: `:sparkles:` a commit that adds an improvement
- :tada: `:tada:` a commit with a new feature
- :sparkles: `:sparkles:` a commit that an improvement
- :tada: `:tada:` a commit with new feature
- :recycle: `:recycle:` a commit that introduces a refactor
- :lipstick: `:lipstick:` a commit with cosmetic changes
- :ambulance: `:ambulance:` a commit that fixes a critical bug
- :ambulance: `:ambulance:` a commit that fixes critical bug
- :books: `:books:` a commit that improves or adds documentation
- :construction: `:construction:` a WIP commit
- :construction: `:construction:`: a wip commit
- :boom: `:boom:` a commit with breaking changes
- :wrench: `:wrench:` a commit for config updates
- :zap: `:zap:` a commit with performance improvements
- :whale: `:whale:` a commit for Docker-related stuff
- :paperclip: `:paperclip:` a commit with other non-relevant changes
- :arrow_up: `:arrow_up:` a commit with dependency updates
- :arrow_down: `:arrow_down:` a commit with dependency downgrades
- :fire: `:fire:` a commit that removes files or code
- :globe_with_meridians: `:globe_with_meridians:` a commit that adds or updates
translations
- :whale: `:whale:` a commit for docker related stuff
- :rewind: `:rewind:` a commit that reverts changes
- :paperclip: `:paperclip:` a commit with other not relevant changes
- :arrow_up: `:arrow_up:` a commit with dependencies updates
More info:
- https://gist.github.com/parmentf/035de27d6ed1dce0b36a
- https://gist.github.com/rxaviers/7360908
Each commit should have:
- A concise subject using the imperative mood.
- The subject should capitalize the first letter, omit the period
at the end, and be no longer than 65 characters.
- A concise subject using imperative mood.
- The subject should have capitalized the first letter, without period
at the end and no larger than 65 characters.
- A blank line between the subject line and the body.
- An entry in the CHANGES.md file if applicable, referencing the
GitHub or Taiga issue/user story using these same rules.
- An entry on the CHANGES.md file if applicable, referencing the
github or taiga issue/user-story using the these same rules.
Examples of good commit messages:
@@ -111,30 +110,8 @@ Examples of good commit messages:
- `:ambulance: Fix critical bug on user registration process`
- `:tada: Add new approach for user registration`
## Formatting and Linting ##
You will want to make sure your code is formatted and linted before submitting
a PR. We use [cljfmt](https://github.com/weavejester/cljfmt) and
[clj-kondo](https://github.com/clj-kondo/clj-kondo) for this. After installing
them on your system, you can run them with:
```bash
# Check formatting
yarn fmt:clj:check
# Check and fix formatting
yarn fmt:clj
# Run the linter
yarn lint:clj
```
There are more choices in `package.json`.
Ideally, you should run these commands as git pre-commit hooks. A convenient way
of defining them is to use [Husky](https://typicode.github.io/husky/#/).
## Code of Conduct ##
## Code of conduct ##
As contributors and maintainers of this project, we pledge to respect
all people who contribute through reporting issues, posting feature
@@ -154,11 +131,11 @@ unprofessional conduct.
Project maintainers have the right and responsibility to remove, edit,
or reject comments, commits, code, wiki edits, issues, and other
contributions that are not aligned with this Code of Conduct. Project
contributions that are not aligned to this Code of Conduct. Project
maintainers who do not follow the Code of Conduct may be removed from
the project team.
This Code of Conduct applies both within project spaces and in public
This code of conduct applies both within project spaces and in public
spaces when an individual is representing the project or its
community.
@@ -167,11 +144,12 @@ may be reported by opening an issue or contacting one or more of the
project maintainers.
This Code of Conduct is adapted from the Contributor Covenant, version
1.1.0, available from [http://contributor-covenant.org/version/1/1/0/](http://contributor-covenant.org/version/1/1/0/)
1.1.0, available from http://contributor-covenant.org/version/1/1/0/
## Developer's Certificate of Origin (DCO)
By submitting code you agree to and can certify the following:
## Developer's Certificate of Origin (DCO) ##
By submitting code you are agree and can certify the below:
Developer's Certificate of Origin 1.1
@@ -199,15 +177,13 @@ By submitting code you agree to and can certify the following:
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
Then, all your code patches (**documentation is excluded**) should
Then, all your code patches (**documentation are excluded**) should
contain a sign-off at the end of the patch/commit description body. It
can be automatically added by adding the `-s` parameter to `git commit`.
can be automatically added on adding `-s` parameter to `git commit`.
This is an example of what the line should look like:
This is an example of the aspect of the line:
```
Signed-off-by: Andrey Antukh <niwi@niwi.nz>
```
Signed-off-by: Andrey Antukh <niwi@niwi.nz>
Please, use your real name (sorry, no pseudonyms or anonymous
contributions are allowed).

View File

@@ -8,46 +8,42 @@
<img alt="penpot header image" src="https://penpot.app/images/readme/github-light-mode.png">
</picture>
<p align="center">
<a href="https://www.mozilla.org/en-US/MPL/2.0" rel="nofollow"><img alt="License: MPL-2.0" src="https://img.shields.io/badge/MPL-2.0-blue.svg" style="max-width:100%;"></a>
<a href="https://community.penpot.app" rel="nofollow"><img alt="Penpot Community" src="https://img.shields.io/discourse/posts?server=https%3A%2F%2Fcommunity.penpot.app" style="max-width:100%;"></a>
<a href="https://tree.taiga.io/project/penpot/" title="Managed with Taiga.io" rel="nofollow"><img alt="Managed with Taiga.io" src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg" style="max-width:100%;"></a>
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow"><img alt="Gitpod ready-to-code" src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod" style="max-width:100%;"></a>
</p>
<p align="center"><a href="https://www.mozilla.org/en-US/MPL/2.0" rel="nofollow"><img src="https://camo.githubusercontent.com/3fcf3d6b678ea15fde3cf7d6af0e242160366282d62a7c182d83a50bfee3f45e/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f4d504c2d322e302d626c75652e737667" alt="License: MPL-2.0" data-canonical-src="https://img.shields.io/badge/MPL-2.0-blue.svg" style="max-width:100%;"></a>
<a href="https://gitter.im/penpot/community" rel="nofollow"><img src="https://camo.githubusercontent.com/5b0aecb33434f82a7b158eab7247544235ada0cf7eeb9ce8e52562dd67f614b7/68747470733a2f2f6261646765732e6769747465722e696d2f736572656e6f2d78797a2f636f6d6d756e6974792e737667" alt="Gitter" data-canonical-src="https://badges.gitter.im/sereno-xyz/community.svg" style="max-width:100%;"></a>
<a href="https://tree.taiga.io/project/penpot/" title="Managed with Taiga.io" rel="nofollow"><img src="https://camo.githubusercontent.com/4a1d1112f0272e3393b1e8da312ff4435418e9e2eb4c0964881e3680f90a653c/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f6d616e61676564253230776974682d54414947412e696f2d3730396631342e737667" alt="Managed with Taiga.io" data-canonical-src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg" style="max-width:100%;"></a>
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow"><img src="https://camo.githubusercontent.com/daadb4894128d1e19b72d80236f5959f1f2b47f9fe081373f3246131f0189f6c/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f476974706f642d72656164792d2d746f2d2d636f64652d626c75653f6c6f676f3d676974706f64" alt="Gitpod ready-to-code" data-canonical-src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod" style="max-width:100%;"></a></p>
<p align="center">
<a href="https://penpot.app/"><b>Website</b></a> •
<a href="https://help.penpot.app/user-guide/"><b>User Guide</b></a> •
<a href="https://penpot.app/learning-center"><b>Learning Center</b></a> •
<a href="https://penpot.app/"><b>Website</b></a> •
<a href="https://help.penpot.app/technical-guide/getting-started/"><b>Getting Started</b></a> •
<a href="https://help.penpot.app/user-guide/"><b>User Guide</b></a> •
<a href="https://help.penpot.app/user-guide/introduction/info/"><b>Tutorials & Info</b></a> •
<a href="https://community.penpot.app/"><b>Community</b></a>
</p>
<p align="center">
<a href="https://www.youtube.com/@Penpot"><b>Youtube</b></a> •
<a href="https://peertube.kaleidos.net/a/penpot_app/video-channels"><b>Peertube</b></a> •
<a href="https://www.linkedin.com/company/penpot/"><b>Linkedin</b></a> •
<a href="https://instagram.com/penpot.app"><b>Instagram</b></a> •
<a href="https://fosstodon.org/@penpot/"><b>Mastodon</b></a> •
<a href="https://bsky.app/profile/penpot.app"><b>Bluesky</b></a> •
<a href="https://www.youtube.com/@Penpot"><b>Youtube</b></a> •
<a href="https://peertube.kaleidos.net/a/penpot_app/video-channels"><b>Peertube</b></a> •
<a href="https://www.linkedin.com/company/penpot/"><b>Linkedin</b></a> •
<a href="https://instagram.com/penpot.app"><b>Instagram</b></a> •
<a href="https://fosstodon.org/@penpot/"><b>Mastodon</b></a> •
<a href="https://twitter.com/penpotapp"><b>X</b></a>
</p>
<br />
[Penpot video](https://github.com/user-attachments/assets/7c67fd7c-04d3-4c9b-88ec-b6f5e23f8332
)
[Penpot video](https://github.com/penpot/penpot/assets/5446186/b8ad0764-585e-4ddc-b098-9b4090d337cc)
<br />
Penpot is the first **open-source** design tool for design and code collaboration. Designers can create stunning designs, interactive prototypes, design systems at scale, while developers enjoy ready-to-use code and make their workflow easy and fast. And all of this with no handoff drama.
Available on browser or self-hosted, Penpot works with open standards like SVG, CSS, HTML and JSON, and its free!
Penpot is available on browser and [self host](https://penpot.app/self-host). Its web-based and works with open standards (SVG, CSS and HTML). And last but not least, its free!
The latest updates take Penpot even further. Its the first design tool to integrate native [design tokens](https://penpot.dev/collaboration/design-tokens)—a single source of truth to improve efficiency and collaboration between product design and development.
With the [huge 2.0 release](https://penpot.app/dev-diaries), Penpot took the platform to a whole new level. This update introduces the ground-breaking [CSS Grid Layout feature](https://penpot.app/penpot-2.0), a complete UI redesign, a new Components system, and much more.
For organizations that need extra service for its teams, [get in touch](https://cal.com/team/penpot/talk-to-us)
Penpots latest [huge release 2.0](https://penpot.app/dev-diaries), takes the platform to a whole new level. This update introduces the ground-breaking [CSS Grid Layout feature](https://penpot.app/penpot-2.0), a complete UI redesign, a new Components system, and much more. Plus, it's faster and more accessible.
🎇 Design, code, and Open Source meet at [Penpot Fest](https://penpot.app/penpotfest)! Be part of the 2025 edition in Madrid, Spain, on October 9-10.
🎇 **Penpot Fest** is our design, code & Open Source event. Check out the highlights from [Penpot Fest 2023 edition](https://www.youtube.com/watch?v=sOpLZaK5mDc)!
## Table of contents ##
@@ -62,9 +58,6 @@ For organizations that need extra service for its teams, [get in touch](https://
Penpot expresses designs as code. Designers can do their best work and see it will be beautifully implemented by developers in a two-way collaboration.
### Plugin system ###
[Penpot plugins](https://penpot.app/penpothub/plugins) let you expand the platform's capabilities, give you the flexibility to integrate it with other apps, and design custom solutions.
### Designed for developers ###
Penpot was built to serve both designers and developers and create a fluid design-code process. You have the choice to enjoy real-time collaboration or play "solo".
@@ -77,23 +70,23 @@ Provide your team or organization with a completely owned collaborative design t
### Integrations ###
Penpot offers integration into the development toolchain, thanks to its support for webhooks and an API accessible through access tokens.
### Building Design Systems: design tokens, components and variants ###
Penpot brings design systems to code-minded teams: a single source of truth with native Design Tokens, Components, and Variants for scalable, reusable, and consistent UI across projects and platforms.
### Whats great for design ###
With Penpot you can design libraries to share and reuse; turn design elements into components and tokens to allow reusability and scalability; and build realistic user flows and interactions.
<br />
<p align="center">
<img src="https://github.com/user-attachments/assets/cce75ad6-f783-473f-8803-da9eb8255fef">
<img src="https://img.plasmic.app/img-optimizer/v1/img?src=https%3A%2F%2Fimg.plasmic.app%2Fimg-optimizer%2Fv1%2Fimg%2F9dd677c36afb477e9666ccd1d3f009ad.png" alt="Open Source" style="width: 65%;">
</p>
<br />
## Getting started ##
Penpot is the only design & prototype platform that is deployment agnostic. You can use it in our [SAAS](https://design.penpot.app) or deploy it anywhere.
### Install with Elestio ###
Penpot is the only design & prototype platform that is deployment agnostic. You can use it or deploy it anywhere.
Learn how to install it with Docker, Kubernetes, Elestio or other options on [our website](https://penpot.app/self-host).
Learn how to install it with Elestio and Docker, or other options on [our website](https://penpot.app/self-host).
<br />
<p align="center">
@@ -125,21 +118,15 @@ You will find the following categories:
</p>
<br />
### Code of Conduct ###
Anyone who contributes to Penpot, whether through code, in the community, or at an event, must adhere to the
[code of conduct](https://help.penpot.app/contributing-guide/coc/) and foster a positive and safe environment.
## Contributing ##
Any contribution will make a difference to improve Penpot. How can you get involved?
Any contribution will make a difference to improve Penpot. How can you get involved?
Choose your way:
Choose your way:
- Create and [share Libraries & Templates](https://penpot.app/libraries-templates.html) that will be helpful for the community
- Invite your [team to join](https://design.penpot.app/#/auth/register)
- Give this repo a star and follow us on Social Media: [Mastodon](https://fosstodon.org/@penpot/), [Youtube](https://www.youtube.com/c/Penpot), [Instagram](https://instagram.com/penpot.app), [Linkedin](https://www.linkedin.com/company/penpotdesign), [Peertube](https://peertube.kaleidos.net/a/penpot_app), [X](https://twitter.com/penpotapp) and [BlueSky](https://bsky.app/profile/penpot.app)
- Star this repo and follow us on Social Media: [Mastodon](https://fosstodon.org/@penpot/), [Youtube](https://www.youtube.com/c/Penpot), [Instagram](https://instagram.com/penpot.app), [Linkedin](https://www.linkedin.com/company/penpotdesign), [Peertube](https://peertube.kaleidos.net/a/penpot_app) and [X](https://twitter.com/penpotapp).
- Participate in the [Community](https://community.penpot.app/) space by asking and answering questions; reacting to others articles; opening your own conversations and following along on decisions affecting the project.
- Report bugs with our easy [guide for bugs hunting](https://help.penpot.app/contributing-guide/reporting-bugs/) or [GitHub issues](https://github.com/penpot/penpot/issues)
- Become a [translator](https://help.penpot.app/contributing-guide/translations)

View File

@@ -3,10 +3,10 @@
:deps
{penpot/common {:local/root "../common"}
org.clojure/clojure {:mvn/version "1.12.2"}
org.clojure/clojure {:mvn/version "1.12.0-alpha12"}
org.clojure/tools.namespace {:mvn/version "1.5.0"}
com.github.luben/zstd-jni {:mvn/version "1.5.7-4"}
com.github.luben/zstd-jni {:mvn/version "1.5.6-3"}
io.prometheus/simpleclient {:mvn/version "0.16.0"}
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
@@ -17,41 +17,33 @@
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
io.lettuce/lettuce-core {:mvn/version "6.8.1.RELEASE"}
;; Minimal dependencies required by lettuce, we need to include them
;; explicitly because clojure dependency management does not support
;; yet the BOM format.
io.micrometer/micrometer-core {:mvn/version "1.14.2"}
io.micrometer/micrometer-observation {:mvn/version "1.14.2"}
io.lettuce/lettuce-core {:mvn/version "6.3.2.RELEASE"}
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
com.google.guava/guava {:mvn/version "33.4.8-jre"}
funcool/yetti
{:git/tag "v11.8"
:git/sha "1d1b33f"
{:git/tag "v10.0"
:git/sha "520613f"
:git/url "https://github.com/funcool/yetti.git"
:exclusions [org.slf4j/slf4j-api]}
com.github.seancorfield/next.jdbc
{:mvn/version "1.3.1070"}
com.github.seancorfield/next.jdbc {:mvn/version "1.3.939"}
metosin/reitit-core {:mvn/version "0.7.0"}
nrepl/nrepl {:mvn/version "1.1.2"}
cider/cider-nrepl {:mvn/version "0.48.0"}
metosin/reitit-core {:mvn/version "0.9.1"}
nrepl/nrepl {:mvn/version "1.4.0"}
org.postgresql/postgresql {:mvn/version "42.7.3"}
org.xerial/sqlite-jdbc {:mvn/version "3.46.0.0"}
org.postgresql/postgresql {:mvn/version "42.7.7"}
org.xerial/sqlite-jdbc {:mvn/version "3.50.3.0"}
com.zaxxer/HikariCP {:mvn/version "7.0.2"}
com.zaxxer/HikariCP {:mvn/version "5.1.0"}
io.whitfin/siphash {:mvn/version "2.0.0"}
buddy/buddy-hashers {:mvn/version "2.0.167"}
buddy/buddy-sign {:mvn/version "3.6.1-359"}
buddy/buddy-sign {:mvn/version "3.5.351"}
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.2"}
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.8"}
org.jsoup/jsoup {:mvn/version "1.21.2"}
org.jsoup/jsoup {:mvn/version "1.17.2"}
org.im4java/im4java
{:git/tag "1.4.0-penpot-2"
:git/sha "e2b3e16"
@@ -61,12 +53,13 @@
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
dawran6/emoji {:mvn/version "0.2.0"}
markdown-clj/markdown-clj {:mvn/version "1.12.4"}
dawran6/emoji {:mvn/version "0.1.5"}
markdown-clj/markdown-clj {:mvn/version "1.12.1"}
;; Pretty Print specs
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
software.amazon.awssdk/s3 {:mvn/version "2.33.10"}}
software.amazon.awssdk/s3 {:mvn/version "2.25.63"}
}
:paths ["src" "resources" "target/classes"]
:aliases
@@ -81,14 +74,12 @@
:build
{:extra-deps
{io.github.clojure/tools.build {:mvn/version "0.10.10"}}
{io.github.clojure/tools.build {:git/tag "v0.10.3" :git/sha "15ead66"}}
:ns-default build}
:test
{:main-opts ["-m" "kaocha.runner"]
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
"--sun-misc-unsafe-memory-access=allow"
"--enable-native-access=ALL-UNNAMED"]
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"]
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
:outdated
@@ -97,8 +88,8 @@
:jmx-remote
{:jvm-opts ["-Dcom.sun.management.jmxremote"
"-Dcom.sun.management.jmxremote.port=9090"
"-Dcom.sun.management.jmxremote.rmi.port=9090"
"-Dcom.sun.management.jmxremote.port=9091"
"-Dcom.sun.management.jmxremote.rmi.port=9091"
"-Dcom.sun.management.jmxremote.local.only=false"
"-Dcom.sun.management.jmxremote.authenticate=false"
"-Dcom.sun.management.jmxremote.ssl=false"

View File

@@ -6,14 +6,12 @@
(ns user
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.debug :as debug]
[app.common.exceptions :as ex]
[app.common.files.helpers :as cfh]
[app.common.fressian :as fres]
[app.common.geom.matrix :as gmt]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.perf :as perf]
[app.common.pprint :as pp]
@@ -21,22 +19,20 @@
[app.common.schema.desc-js-like :as smdj]
[app.common.schema.desc-native :as smdn]
[app.common.schema.generators :as sg]
[app.common.schema.openapi :as oapi]
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.json :as json]
[app.common.transit :as t]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
[app.main :as main]
[app.srepl.helpers :as h]
[app.srepl.main :refer :all]
[app.srepl.helpers :as srepl.helpers]
[app.srepl.main :as srepl]
[app.util.blob :as blob]
[app.util.time :as dt]
[clj-async-profiler.core :as prof]
[clojure.contrib.humanize :as hum]
[clojure.datafy :refer [datafy]]
[clojure.java.io :as io]
[clojure.pprint :refer [pprint print-table]]
[clojure.repl :refer :all]
@@ -141,6 +137,7 @@
;; :v6 v6
;; }])))
(defn calculate-frames
[{:keys [data]}]
(->> (vals (:pages-index data))

View File

@@ -4,7 +4,7 @@
"license": "MPL-2.0",
"author": "Kaleidos INC",
"private": true,
"packageManager": "yarn@4.9.2+sha512.1fc009bc09d13cfd0e19efa44cbfc2b9cf6ca61482725eb35bbc5e257e093ebf4130db6dfe15d604ff4b79efd8e1e8e99b25fa7d0a6197c9f9826358d4d65c3c",
"packageManager": "yarn@4.3.1",
"repository": {
"type": "git",
"url": "https://github.com/penpot/penpot"

View File

@@ -193,7 +193,7 @@
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
Click the link below to confirm the change.</div>
Click to the link below to confirm the change:</div>
</td>
</tr>
<tr>
@@ -217,7 +217,8 @@
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
If you did not request this change, consider changing your password for security reasons.</div>
If you received this email by mistake, please consider changing your password for security
reasons.</div>
</td>
</tr>
<tr>

View File

@@ -2,11 +2,12 @@ Hello {{name|abbreviate:25}}!
We received a request to change your current email to {{ pending-email }}.
Click the link below to confirm the change.
Click to the link below to confirm the change:
{{ public-uri }}/#/auth/verify-token?token={{token}}
If you did not request this change, consider changing your password for security reasons.
If you received this email by mistake, please consider changing your password
for security reasons.
Enjoy!
The Penpot team.

View File

@@ -1,244 +0,0 @@
<!doctype html>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml"
xmlns:o="urn:schemas-microsoft-com:office:office">
<head>
<title>
</title>
<!--[if !mso]><!-- -->
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!--<![endif]-->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type="text/css">
#outlook a {
padding: 0;
}
body {
margin: 0;
padding: 0;
-webkit-text-size-adjust: 100%;
-ms-text-size-adjust: 100%;
}
table,
td {
border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
}
img {
border: 0;
height: auto;
line-height: 100%;
outline: none;
text-decoration: none;
-ms-interpolation-mode: bicubic;
}
p {
display: block;
margin: 13px 0;
}
</style>
<!--[if mso]>
<xml>
<o:OfficeDocumentSettings>
<o:AllowPNG/>
<o:PixelsPerInch>96</o:PixelsPerInch>
</o:OfficeDocumentSettings>
</xml>
<![endif]-->
<!--[if lte mso 11]>
<style type="text/css">
.mj-outlook-group-fix { width:100% !important; }
</style>
<![endif]-->
<!--[if !mso]><!-->
<link href="https://fonts.googleapis.com/css?family=Source%20Sans%20Pro" rel="stylesheet" type="text/css">
<style type="text/css">
@import url(https://fonts.googleapis.com/css?family=Source%20Sans%20Pro);
</style>
<!--<![endif]-->
<style type="text/css">
@media only screen and (min-width:480px) {
.mj-column-per-100 {
width: 100% !important;
max-width: 100%;
}
.mj-column-px-425 {
width: 425px !important;
max-width: 425px;
}
}
</style>
<style type="text/css">
@media only screen and (max-width:480px) {
table.mj-full-width-mobile {
width: 100% !important;
}
td.mj-full-width-mobile {
width: auto !important;
}
}
</style>
</head>
<body style="background-color:#E5E5E5;">
<div style="background-color:#E5E5E5;">
<!--[if mso | IE]>
<table
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
>
<tr>
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
<![endif]-->
<div style="margin:0px auto;max-width:600px;">
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation" style="width:100%;">
<tbody>
<tr>
<td style="direction:ltr;font-size:0px;padding:0;text-align:center;">
<!--[if mso | IE]>
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
<tr>
<td
class="" style="vertical-align:top;width:600px;"
>
<![endif]-->
<div class="mj-column-per-100 mj-outlook-group-fix"
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
width="100%">
<tr>
<td align="left" style="font-size:0px;padding:16px;word-break:break-word;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:collapse;border-spacing:0px;">
<tbody>
<tr>
<td style="width:97px;">
<img height="32" src="{{ public-uri }}/images/email/uxbox-title.png"
style="border:0;display:block;outline:none;text-decoration:none;height:32px;width:100%;font-size:13px;"
width="97" />
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<table
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
>
<tr>
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
<![endif]-->
<div style="background:#FFFFFF;background-color:#FFFFFF;margin:0px auto;max-width:600px;">
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation"
style="background:#FFFFFF;background-color:#FFFFFF;width:100%;">
<tbody>
<tr>
<td style="direction:ltr;font-size:0px;padding:20px 0;text-align:center;">
<!--[if mso | IE]>
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
<tr>
<td
class="" style="vertical-align:top;width:600px;"
>
<![endif]-->
<div class="mj-column-per-100 mj-outlook-group-fix"
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
width="100%">
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:24px;font-weight:600;line-height:150%;text-align:left;color:#000000;">
Hello {{name|abbreviate:25}}!</div>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
<span style="font-weight:bold;">{{ source-user }}</span> has mentioned you on a comment at "{{ comment-reference }}".</div>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div style="font-family:Source Sans Pro, sans-serif;font-size:16px;font-style:italic;line-height:150%;text-align:left;color:#212426;
border-top: 1px solid #bfbfbf; border-bottom: 1px solid #bfbfbf; padding: 32px 0px;">
{{ comment-content }}
</div>
</td>
</tr>
<tr>
<td align="center" vertical-align="middle"
style="font-size:0px;padding:10px 25px;word-break:break-word;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
valign="middle">
<a href="{{ comment-url }}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> GO TO THE COMMENT </a>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
The Penpot team.</div>
</td>
</tr>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
{% include "app/email/includes/footer.html" %}
</div>
</body>
</html>

View File

@@ -1 +0,0 @@
Mentioned in comment

View File

@@ -1,13 +0,0 @@
Hello {{name|abbreviate:25}}!
{{ source-user }} has mentioned you on a comment at "{{ comment-reference }}".
--
{{ comment-content }}
--
{{ comment-url }}
The Penpot team.

View File

@@ -1,244 +0,0 @@
<!doctype html>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml"
xmlns:o="urn:schemas-microsoft-com:office:office">
<head>
<title>
</title>
<!--[if !mso]><!-- -->
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!--<![endif]-->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type="text/css">
#outlook a {
padding: 0;
}
body {
margin: 0;
padding: 0;
-webkit-text-size-adjust: 100%;
-ms-text-size-adjust: 100%;
}
table,
td {
border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
}
img {
border: 0;
height: auto;
line-height: 100%;
outline: none;
text-decoration: none;
-ms-interpolation-mode: bicubic;
}
p {
display: block;
margin: 13px 0;
}
</style>
<!--[if mso]>
<xml>
<o:OfficeDocumentSettings>
<o:AllowPNG/>
<o:PixelsPerInch>96</o:PixelsPerInch>
</o:OfficeDocumentSettings>
</xml>
<![endif]-->
<!--[if lte mso 11]>
<style type="text/css">
.mj-outlook-group-fix { width:100% !important; }
</style>
<![endif]-->
<!--[if !mso]><!-->
<link href="https://fonts.googleapis.com/css?family=Source%20Sans%20Pro" rel="stylesheet" type="text/css">
<style type="text/css">
@import url(https://fonts.googleapis.com/css?family=Source%20Sans%20Pro);
</style>
<!--<![endif]-->
<style type="text/css">
@media only screen and (min-width:480px) {
.mj-column-per-100 {
width: 100% !important;
max-width: 100%;
}
.mj-column-px-425 {
width: 425px !important;
max-width: 425px;
}
}
</style>
<style type="text/css">
@media only screen and (max-width:480px) {
table.mj-full-width-mobile {
width: 100% !important;
}
td.mj-full-width-mobile {
width: auto !important;
}
}
</style>
</head>
<body style="background-color:#E5E5E5;">
<div style="background-color:#E5E5E5;">
<!--[if mso | IE]>
<table
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
>
<tr>
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
<![endif]-->
<div style="margin:0px auto;max-width:600px;">
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation" style="width:100%;">
<tbody>
<tr>
<td style="direction:ltr;font-size:0px;padding:0;text-align:center;">
<!--[if mso | IE]>
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
<tr>
<td
class="" style="vertical-align:top;width:600px;"
>
<![endif]-->
<div class="mj-column-per-100 mj-outlook-group-fix"
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
width="100%">
<tr>
<td align="left" style="font-size:0px;padding:16px;word-break:break-word;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:collapse;border-spacing:0px;">
<tbody>
<tr>
<td style="width:97px;">
<img height="32" src="{{ public-uri }}/images/email/uxbox-title.png"
style="border:0;display:block;outline:none;text-decoration:none;height:32px;width:100%;font-size:13px;"
width="97" />
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<table
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
>
<tr>
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
<![endif]-->
<div style="background:#FFFFFF;background-color:#FFFFFF;margin:0px auto;max-width:600px;">
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation"
style="background:#FFFFFF;background-color:#FFFFFF;width:100%;">
<tbody>
<tr>
<td style="direction:ltr;font-size:0px;padding:20px 0;text-align:center;">
<!--[if mso | IE]>
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
<tr>
<td
class="" style="vertical-align:top;width:600px;"
>
<![endif]-->
<div class="mj-column-per-100 mj-outlook-group-fix"
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
width="100%">
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:24px;font-weight:600;line-height:150%;text-align:left;color:#000000;">
Hello {{name|abbreviate:25}}!</div>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
<span style="font-weight:bold;">{{ source-user }}</span> has commented at "{{ comment-reference }}".</div>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div style="font-family:Source Sans Pro, sans-serif;font-size:16px;font-style:italic;line-height:150%;text-align:left;color:#212426;
border-top: 1px solid #bfbfbf; border-bottom: 1px solid #bfbfbf; padding: 32px 0px;">
{{ comment-content }}
</div>
</td>
</tr>
<tr>
<td align="center" vertical-align="middle"
style="font-size:0px;padding:10px 25px;word-break:break-word;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4#31EFB8" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
valign="middle">
<a href="{{ comment-url }}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> GO TO THE COMMENT </a>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
The Penpot team.</div>
</td>
</tr>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
{% include "app/email/includes/footer.html" %}
</div>
</body>
</html>

View File

@@ -1 +0,0 @@
New comment

View File

@@ -1,13 +0,0 @@
Hello {{name|abbreviate:25}}!
{{ source-user }} has commented at "{{ comment-reference }}".
--
{{ comment-content }}
--
{{ comment-url }}
The Penpot team.

View File

@@ -1,244 +0,0 @@
<!doctype html>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml"
xmlns:o="urn:schemas-microsoft-com:office:office">
<head>
<title>
</title>
<!--[if !mso]><!-- -->
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!--<![endif]-->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type="text/css">
#outlook a {
padding: 0;
}
body {
margin: 0;
padding: 0;
-webkit-text-size-adjust: 100%;
-ms-text-size-adjust: 100%;
}
table,
td {
border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
}
img {
border: 0;
height: auto;
line-height: 100%;
outline: none;
text-decoration: none;
-ms-interpolation-mode: bicubic;
}
p {
display: block;
margin: 13px 0;
}
</style>
<!--[if mso]>
<xml>
<o:OfficeDocumentSettings>
<o:AllowPNG/>
<o:PixelsPerInch>96</o:PixelsPerInch>
</o:OfficeDocumentSettings>
</xml>
<![endif]-->
<!--[if lte mso 11]>
<style type="text/css">
.mj-outlook-group-fix { width:100% !important; }
</style>
<![endif]-->
<!--[if !mso]><!-->
<link href="https://fonts.googleapis.com/css?family=Source%20Sans%20Pro" rel="stylesheet" type="text/css">
<style type="text/css">
@import url(https://fonts.googleapis.com/css?family=Source%20Sans%20Pro);
</style>
<!--<![endif]-->
<style type="text/css">
@media only screen and (min-width:480px) {
.mj-column-per-100 {
width: 100% !important;
max-width: 100%;
}
.mj-column-px-425 {
width: 425px !important;
max-width: 425px;
}
}
</style>
<style type="text/css">
@media only screen and (max-width:480px) {
table.mj-full-width-mobile {
width: 100% !important;
}
td.mj-full-width-mobile {
width: auto !important;
}
}
</style>
</head>
<body style="background-color:#E5E5E5;">
<div style="background-color:#E5E5E5;">
<!--[if mso | IE]>
<table
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
>
<tr>
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
<![endif]-->
<div style="margin:0px auto;max-width:600px;">
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation" style="width:100%;">
<tbody>
<tr>
<td style="direction:ltr;font-size:0px;padding:0;text-align:center;">
<!--[if mso | IE]>
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
<tr>
<td
class="" style="vertical-align:top;width:600px;"
>
<![endif]-->
<div class="mj-column-per-100 mj-outlook-group-fix"
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
width="100%">
<tr>
<td align="left" style="font-size:0px;padding:16px;word-break:break-word;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:collapse;border-spacing:0px;">
<tbody>
<tr>
<td style="width:97px;">
<img height="32" src="{{ public-uri }}/images/email/uxbox-title.png"
style="border:0;display:block;outline:none;text-decoration:none;height:32px;width:100%;font-size:13px;"
width="97" />
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<table
align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600"
>
<tr>
<td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;">
<![endif]-->
<div style="background:#FFFFFF;background-color:#FFFFFF;margin:0px auto;max-width:600px;">
<table align="center" border="0" cellpadding="0" cellspacing="0" role="presentation"
style="background:#FFFFFF;background-color:#FFFFFF;width:100%;">
<tbody>
<tr>
<td style="direction:ltr;font-size:0px;padding:20px 0;text-align:center;">
<!--[if mso | IE]>
<table role="presentation" border="0" cellpadding="0" cellspacing="0">
<tr>
<td
class="" style="vertical-align:top;width:600px;"
>
<![endif]-->
<div class="mj-column-per-100 mj-outlook-group-fix"
style="font-size:0px;text-align:left;direction:ltr;display:inline-block;vertical-align:top;width:100%;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation" style="vertical-align:top;"
width="100%">
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:24px;font-weight:600;line-height:150%;text-align:left;color:#000000;">
Hello {{name|abbreviate:25}}!</div>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
<span style="font-weight:bold;">{{ source-user }}</span> has created a comment in a thread you've been mentioned at "{{ comment-reference }}".</div>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div style="font-family:Source Sans Pro, sans-serif;font-size:16px;font-style:italic;line-height:150%;text-align:left;color:#212426;
border-top: 1px solid #bfbfbf; border-bottom: 1px solid #bfbfbf; padding: 32px 0px;">
{{ comment-content }}
</div>
</td>
</tr>
<tr>
<td align="center" vertical-align="middle"
style="font-size:0px;padding:10px 25px;word-break:break-word;">
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4#31EFB8" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
valign="middle">
<a href="{{ comment-url }}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> GO TO THE COMMENT </a>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td align="left" style="font-size:0px;padding:10px 25px;word-break:break-word;">
<div
style="font-family:Source Sans Pro, sans-serif;font-size:16px;line-height:150%;text-align:left;color:#000000;">
The Penpot team.</div>
</td>
</tr>
</table>
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
{% include "app/email/includes/footer.html" %}
</div>
</body>
</html>

View File

@@ -1 +0,0 @@
New response in comment

View File

@@ -1,13 +0,0 @@
Hello {{name|abbreviate:25}}!
{{ source-user }} has created a comment in a thread you've been mentioned at "{{ comment-reference }}".
--
{{ comment-content }}
--
{{ comment-url }}
The Penpot team.

View File

@@ -8,41 +8,38 @@
<body>
<p>
<strong>Feedback from:</strong><br />
<span>
<span>Name: </span>
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
</span>
<br />
<span>
<span>Email: </span>
<span>{{profile.email}}</span>
</span>
<br />
<span>
<span>ID: </span>
<span><code>{{profile.id}}</code></span>
</span>
{% if profile %}
<span>
<span>Name: </span>
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
</span>
<br />
<span>
<span>Email: </span>
<span>{{profile.email}}</span>
</span>
<br />
<span>
<span>ID: </span>
<span><code>{{profile.id}}</code></span>
</span>
{% else %}
<span>
<span>Email: </span>
<span>{{profile.email}}</span>
</span>
{% endif %}
</p>
<p>
<strong>Subject:</strong><br />
<span>{{feedback-subject|abbreviate:300}}</span>
<span>{{subject|abbreviate:300}}</span>
</p>
<p>
<strong>Type:</strong><br />
<span>{{feedback-type|abbreviate:300}}</span>
</p>
{% if feedback-error-href %}
<p>
<strong>Error HREF:</strong><br />
<span>{{feedback-error-href|abbreviate:500}}</span>
</p>
{% endif %}
<p>
<strong>Message:</strong><br />
{{feedback-content|linebreaks-br}}
{{content|linebreaks-br|safe}}
</p>
</body>
</html>

View File

@@ -1 +1 @@
[PENPOT FEEDBACK]: {{feedback-subject}}
[PENPOT FEEDBACK]: {{subject}}

View File

@@ -1,11 +1,9 @@
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
Subject: {{feedback-subject}}
Type: {{feedback-type}}
{% if profile %}
Feedback profile: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
{% else %}
Feedback from: {{email}}
{% endif %}
{% if feedback-error-href %}
HREF: {{feedback-error-href}}
{% endif -%}
Subject: {{subject}}
Message:
{{feedback-content}}
{{content}}

View File

@@ -195,12 +195,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/auth/verify-token?token={{token}}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> ACCEPT INVITE </a>
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Accept invite </a>
</td>
</tr>
</table>
@@ -240,4 +240,4 @@
</div>
</body>
</html>
</html>

View File

@@ -1 +1 @@
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}
Invitation to join {{team}}

View File

@@ -196,12 +196,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/dashboard/team/{{team-id}}/projects"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> GO TO THE TEAM </a>
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Go to the Team </a>
</td>
</tr>
</table>

View File

@@ -196,12 +196,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/auth/recovery?token={{token}}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> RESET PASSWORD </a>
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Reset password </a>
</td>
</tr>
</table>

View File

@@ -196,12 +196,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/auth/verify-token?token={{token}}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> VERIFY EMAIL </a>
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Verify email </a>
</td>
</tr>
</table>

View File

@@ -204,12 +204,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/view?file-id={{file-id}}&page-id={{page-id}}&section=interactions&index=0&share=true"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> SEND A VIEW-ONLY LINK </a>
<a href="{{ public-uri }}/#/view/{{file-id}}?page-id={{page-id}}&section=interactions&index=0&share=true"
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Send a View-Only link </a>
</td>
</tr>
</table>
@@ -251,4 +251,4 @@
</div>
</body>
</html>
</html>

View File

@@ -6,7 +6,7 @@ Since this file is in your Penpot team, you can provide access by sending a view
To proceed, please click the link below to generate and send the view-only link:
{{ public-uri }}/#/view?file-id={{file-id}}&page-id={{page-id}}&section=interactions&index=0&share=true
{{ public-uri }}/#/view/{{file-id}}?page-id={{page-id}}&section=interactions&index=0&share=true

View File

@@ -227,12 +227,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/view?file-id={{file-id}}&page-id={{page-id}}&section=interactions&index=0&share=true"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> SEND A VIEW-ONLY LINK </a>
<a href="{{ public-uri }}/#/view/{{file-id}}?page-id={{page-id}}&section=interactions&index=0&share=true"
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Send a View-Only link </a>
</td>
</tr>
</table>
@@ -274,4 +274,4 @@
</div>
</body>
</html>
</html>

View File

@@ -19,7 +19,7 @@ Alternatively, you can create and share a view-only link to the file. This will
Click the link below to generate and send the link:
{{ public-uri }}/#/view?file-id={{file-id}}&page-id={{page-id}}&section=interactions&index=0&share=true
{{ public-uri }}/#/view/{{file-id}}?page-id={{page-id}}&section=interactions&index=0&share=true

View File

@@ -211,12 +211,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/dashboard/members?team-id={{team-id}}&invite-email={{requested-by-email|urlescape }}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> GIVE ACCESS TO “{{team-name|abbreviate:25}}” TEAM </a>
<a href="{{ public-uri }}/#/dashboard/team/{{team-id}}/members?invite-email={{requested-by-email|urlescape }}"
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Give access to “{{team-name|abbreviate:25}}” Team </a>
</td>
</tr>
</table>
@@ -244,12 +244,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/view?file-id={{file-id}}&page-id={{page-id}}&section=interactions&index=0&share=true"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> SEND A VIEW-ONLY LINK </a>
<a href="{{ public-uri }}/#/view/{{file-id}}?page-id={{page-id}}&section=interactions&index=0&share=true"
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Send a View-Only link </a>
</td>
</tr>
</table>
@@ -292,4 +292,4 @@
</div>
</body>
</html>
</html>

View File

@@ -13,7 +13,7 @@ This will automatically include {{requested-by|abbreviate:25}} in the team, so t
Click the link below to provide team access:
{{ public-uri }}/#/dashboard/members?team-id={{team-id}}&invite-email={{requested-by-email|urlescape}}
{{ public-uri }}/#/dashboard/team/{{team-id}}/members?invite-email={{requested-by-email|urlescape}}
@@ -23,7 +23,8 @@ Alternatively, you can create and share a view-only link to the file. This will
Click the link below to generate and send the link:
{{ public-uri }}/#/view?file-id={{file-id}}&page-id={{page-id}}&section=interactions&index=0&share=true
{{ public-uri }}/#/view/{{file-id}}?page-id={{page-id}}&section=interactions&index=0&share=true
If you do not wish to grant access at this time, you can simply disregard this email.

View File

@@ -202,12 +202,12 @@
<table border="0" cellpadding="0" cellspacing="0" role="presentation"
style="border-collapse:separate;line-height:100%;">
<tr>
<td align="center" bgcolor="#6911d4" role="presentation"
style="border:none;border-radius:8px;cursor:auto;mso-padding-alt:10px 25px;background:#6911d4;"
<td align="center" bgcolor="#31EFB8" role="presentation"
style="border:none;border-radius:3px;cursor:auto;mso-padding-alt:10px 25px;background:#31EFB8;"
valign="middle">
<a href="{{ public-uri }}/#/dashboard/members?team-id={{team-id}}&invite-email={{requested-by-email|urlescape}}"
style="display:inline-block;background:#6911d4;color:#FFFFFF;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:8px;"
target="_blank"> GIVE ACCESS TO “{{team-name|abbreviate:25}}” TEAM </a>
<a href="{{ public-uri }}/#/dashboard/team/{{team-id}}/members?invite-email={{requested-by-email|urlescape}}"
style="display:inline-block;background:#31EFB8;color:#1F1F1F;font-family:Source Sans Pro, sans-serif;font-size:16px;font-weight:normal;line-height:120%;margin:0;text-decoration:none;text-transform:none;padding:10px 25px;mso-padding-alt:0px;border-radius:3px;"
target="_blank"> Give access to “{{team-name|abbreviate:25}}” </a>
</td>
</tr>
</table>
@@ -249,4 +249,4 @@
</div>
</body>
</html>
</html>

View File

@@ -4,7 +4,7 @@ Hello!
To provide access, please click the link below:
{{ public-uri }}/#/dashboard/members?team-id={{team-id}}&invite-email={{requested-by-email|urlescape}}
{{ public-uri }}/#/dashboard/team/{{team-id}}/members?invite-email={{requested-by-email|urlescape}}
If you do not wish to grant access at this time, you can simply disregard this email.

View File

@@ -1,18 +1,15 @@
[{:id "tokens-starter-kit"
:name "Design tokens starter kit"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"}
{:id "penpot-design-system"
:name "Penpot Design System | Pencil"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Pencil-Penpot-Design-System.penpot"}
{:id "wireframing-kit"
[{:id "wireframing-kit"
:name "Wireframe library"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}
:file-uri "https://github.com/penpot/penpot-files/raw/main/wireframing-kit.penpot"}
{:id "prototype-examples"
:name "Prototype template"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Prototype%20examples%20v1.1.penpot"}
:file-uri "https://github.com/penpot/penpot-files/raw/main/prototype-examples.penpot"}
{:id "plants-app"
:name "UI mockup example"
:file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"}
{:id "penpot-design-system"
:name "Design system example"
:file-uri "https://github.com/penpot/penpot-files/raw/main/Penpot-Design-system.penpot"}
{:id "tutorial-for-beginners"
:name "Tutorial for beginners"
:file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"}
@@ -39,7 +36,7 @@
:file-uri "https://github.com/penpot/penpot-files/raw/main/Open%20Color%20Scheme%20(v1.9.1).penpot"}
{:id "flex-layout-playground"
:name "Flex Layout Playground"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Flex%20Layout%20Playground%20v2.0.penpot"}
:file-uri "https://github.com/penpot/penpot-files/raw/main/Flex%20Layout%20Playground.penpot"}
{:id "welcome"
:name "Welcome"
:file-uri "https://github.com/penpot/penpot-files/raw/main/welcome.penpot"}]

View File

@@ -4,7 +4,7 @@
<meta charset="utf-8" />
<meta name="robots" content="noindex,nofollow">
<meta http-equiv="x-ua-compatible" content="ie=edge" />
<title>{{label|upper}} API Documentation</title>
<title>Builtin API Documentation - Penpot</title>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
@@ -19,7 +19,7 @@
<body>
<main>
<header>
<h1>{{label|upper}}: API Documentation (v{{version}})</h1>
<h1>Penpot API Documentation (v{{version}})</h1>
<small class="menu">
[
<nav>
@@ -31,10 +31,9 @@
</header>
<section class="doc-content">
<h2>INTRODUCTION</h2>
<p>This documentation is intended to be a general overview of
the {{label}} API. If you prefer, you can
use <a href="{{openapi}}">Swagger/OpenAPI</a> as
alternative.</p>
<p>This documentation is intended to be a general overview of the penpot RPC API.
If you prefer, you can use <a href="/api/openapi.json">OpenAPI</a>
and/or <a href="/api/openapi">SwaggerUI</a> as alternative.</p>
<h2>GENERAL NOTES</h2>
@@ -44,7 +43,7 @@
that starts with <b>get-</b> in the name, can use GET HTTP
method which in many cases benefits from the HTTP cache.</p>
{% block auth-section %}
<h3>Authentication</h3>
<p>The penpot backend right now offers two way for authenticate the request:
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
@@ -57,10 +56,9 @@
<p>The access token can be obtained on the appropriate section on profile settings
and it should be provided using <b>`Authorization`</b> header with <b>`Token
&lt;token-string&gt;`</b> value.</p>
{% endblock %}
<h3>Content Negotiation</h3>
<p>This API operates indistinctly with: <b>`application/json`</b>
<p>The penpot API by default operates indistinctly with: <b>`application/json`</b>
and <b>`application/transit+json`</b> content types. You should specify the
desired content-type on the <b>`Accept`</b> header, the transit encoding is used
by default.</p>
@@ -77,16 +75,13 @@
standard <a href="https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API">Fetch
API</a></p>
{% block limits-section %}
<h3>Limits</h3>
<p>The rate limit work per user basis (this means that different api keys share
the same rate limit). For now the limits are not documented because we are
studying and analyzing the data. As a general rule, it should not be abused, if an
abusive use is detected, we will proceed to block the user's access to the
API.</p>
{% endblock %}
{% block webhooks-section %}
<h3>Webhooks</h3>
<p>All methods that emit webhook events are marked with flag <b>WEBHOOK</b>, the
data structure defined on each method represents the <i>payload</i> of the
@@ -102,11 +97,9 @@
"profileId": "db601c95-045f-808b-8002-361312e63531"
}
</pre>
{% endblock %}
</section>
<section class="rpc-doc-content">
<h2>METHODS REFERENCE:</h2>
<h2>RPC METHODS REFERENCE:</h2>
<ul class="rpc-items">
{% for item in methods %}
{% include "app/templates/api-doc-entry.tmpl" with item=item %}

View File

@@ -7,7 +7,7 @@ Debug Main Page
{% block content %}
<nav>
<div class="title">
<h1>ADMIN DEBUG INTERFACE (VERSION: {{version}})</h1>
<h1>ADMIN DEBUG INTERFACE</h1>
</div>
</nav>
<main class="dashboard">
@@ -17,6 +17,38 @@ Debug Main Page
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
</fieldset>
<fieldset>
<legend>Download file data:</legend>
<desc>Given an FILE-ID, downloads the file data as file. The file data is encoded using transit.</desc>
<form method="get" action="/dbg/file/data">
<div class="row">
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
</div>
<div class="row">
<input type="submit" name="download" value="Download" />
<input type="submit" name="clone" value="Clone" />
</div>
</form>
</fieldset>
<fieldset>
<legend>Upload File Data:</legend>
<desc>Create a new file on your draft projects using the file downloaded from the previous section.</desc>
<form method="post" enctype="multipart/form-data" action="/dbg/file/data">
<div class="row">
<input type="file" name="file" value="" />
</div>
<div class="row">
<label>Import with same id?</label>
<input type="checkbox" name="reuseid" />
</div>
<div class="row">
<input type="submit" value="Upload" />
</div>
</form>
</fieldset>
<fieldset>
<legend>Profile Management</legend>
<form method="post" action="/dbg/actions/resend-email-verification">
@@ -45,88 +77,10 @@ Debug Main Page
</form>
</fieldset>
<fieldset>
<legend>VIRTUAL CLOCK</legend>
<desc>
<p>
CURRENT CLOCK: <b>{{current-clock}}</b>
<br />
CURRENT OFFSET: <b>{{current-offset}}</b>
<br />
CURRENT TIME: <b>{{current-time}}</b>
</p>
<p>Examples: 3h, -7h, 24h (allowed suffixes: h, s)</p>
</desc>
<form method="post" action="/dbg/actions/set-virtual-clock">
<div class="row">
<input type="text" name="offset" placeholder="3h" value="" />
</div>
<div class="row">
<label for="force-verify">Are you sure?</label>
<input id="force-verify" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" name="submit" value="Submit" />
<input type="submit" name="reset" value="Reset" />
</div>
</form>
</fieldset>
</section>
<section class="widget">
<fieldset>
<legend>Download RAW file data:</legend>
<desc>Given an FILE-ID, downloads the file AS-IS (no validation
checks, just exports the file data and related objects in raw)
<br/>
<br/>
<b>WARNING: this operation does not performs any checks</b>
</desc>
<form method="get" action="/dbg/actions/file-raw-export-import">
<div class="row">
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
</div>
<div class="row">
<input type="submit" name="download" value="Download" />
<input type="submit" name="clone" value="Clone" />
</div>
</form>
</fieldset>
<fieldset>
<legend>Upload File Data:</legend>
<desc>Create a new file on your draft projects using the file downloaded from the previous section.
<br/>
<br/>
<b>WARNING: this operation does not performs any checks</b>
</desc>
<form method="post" enctype="multipart/form-data" action="/dbg/actions/file-raw-export-import">
<div class="row">
<input type="file" name="file" value="" />
</div>
<div class="row">
<label>Import with same id?</label>
<input type="checkbox" name="reuseid" />
</div>
<div class="row">
<input type="submit" value="Upload" />
</div>
</form>
</fieldset>
</section>
<section class="widget">
<fieldset>
<legend>Export binfile:</legend>
@@ -134,7 +88,7 @@ Debug Main Page
the related libraries in a single custom formatted binary
file.</desc>
<form method="get" action="/dbg/actions/file-export">
<form method="get" action="/dbg/file/export">
<div class="row set-of-inputs">
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
<input type="text" style="width:300px" name="file-ids" placeholder="file-id" />
@@ -160,13 +114,37 @@ Debug Main Page
</fieldset>
<fieldset>
<legend>Import binfile:</legend>
<desc>Import penpot file in binary format.</desc>
<desc>Import penpot file in binary
format. If <strong>overwrite</strong> is checked, all files will
be overwritten using the same ids found in the file instead of
generating a new ones.</desc>
<form method="post" enctype="multipart/form-data" action="/dbg/actions/file-import">
<form method="post" enctype="multipart/form-data" action="/dbg/file/import">
<div class="row">
<input type="file" name="file" value="" />
</div>
<div class="row">
<label>Overwrite?</label>
<input type="checkbox" name="overwrite" />
<br />
<small>
Instead of creating a new file with all relations remapped,
reuses all ids and updates/overwrites the objects that are
already exists on the database.
<strong>Warning, this operation should be used with caution.</strong>
</small>
</div>
<div class="row">
<label>Migrate?</label>
<input type="checkbox" name="migrate" />
<br />
<small>
Applies the file migrations on the importation process.
</small>
</div>
<div class="row">
<input type="submit" name="upload" value="Upload" />
</div>
@@ -176,36 +154,15 @@ Debug Main Page
<section class="widget">
<fieldset>
<legend>Feature Flags for Team</legend>
<desc>Add a feature flag to a team</desc>
<form method="post" action="/dbg/actions/handle-team-features">
<div class="row">
<input type="text" style="width:300px" name="team-id" placeholder="team-id" />
</div>
<div class="row">
<select type="text" style="width:100px" name="feature">
{% for feature in supported-features %}
<option value="{{feature}}">{{feature}}</option>
{% endfor %}
</select>
</div>
<legend>Reset file version</legend>
<desc>Allows reset file data version to a specific number/</desc>
<form method="post" action="/dbg/actions/reset-file-version">
<div class="row">
<select style="width:100px" name="action">
<option value="">Action...</option>
<option value="show">Show</option>
<option value="enable">Enable</option>
<option value="disable">Disable</option>
</select>
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
</div>
<div class="row">
<label for="check-feature">Skip feature check</label>
<input id="check-feature" type="checkbox" name="skip-check" />
<br />
<small>
Do not check if the feature is supported
</small>
<input type="number" style="width:100px" name="version" placeholder="version" value="32" />
</div>
<div class="row">
@@ -217,6 +174,7 @@ Debug Main Page
</small>
</div>
<div class="row">
<input type="submit" value="Submit" />
</div>

View File

@@ -7,9 +7,7 @@ penpot - error list
{% block content %}
<nav>
<div class="title">
<h1>Error reports (last 200)
<a href="/dbg">[GO BACK]</a>
</h1>
<h1>Error reports (last 200)</h1>
</div>
</nav>
<main class="horizontal-list">

View File

@@ -1 +0,0 @@
{% extends "app/templates/api-doc.tmpl" %}

View File

@@ -1,10 +0,0 @@
{% extends "app/templates/api-doc.tmpl" %}
{% block auth-section %}
{% endblock %}
{% block limits-section %}
{% endblock %}
{% block webhooks-section %}
{% endblock %}

View File

@@ -7,7 +7,7 @@
name="description"
content="SwaggerUI"
/>
<title>{{label|upper}} API</title>
<title>PENPOT Swagger UI</title>
<style>{{swagger-css|safe}}</style>
</head>
<body>
@@ -16,7 +16,7 @@
<script>
window.onload = () => {
window.ui = SwaggerUIBundle({
url: '{{uri}}',
url: '{{public-uri}}/api/openapi.json',
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="fatal" monitorInterval="30">
<Configuration status="info" monitorInterval="30">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
@@ -25,7 +25,8 @@
<Logger name="app.storage.tmp" level="info" />
<Logger name="app.worker" level="trace" />
<Logger name="app.msgbus" level="info" />
<Logger name="app.http" level="info" />
<Logger name="app.http.websocket" level="info" />
<Logger name="app.http.sse" level="info" />
<Logger name="app.util.websocket" level="info" />
<Logger name="app.redis" level="info" />
<Logger name="app.rpc.rlimit" level="info" />

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="fatal" monitorInterval="30">
<Configuration status="info" monitorInterval="30">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
@@ -25,7 +25,8 @@
<Logger name="app.storage.tmp" level="info" />
<Logger name="app.worker" level="trace" />
<Logger name="app.msgbus" level="info" />
<Logger name="app.http" level="info" />
<Logger name="app.http.websocket" level="info" />
<Logger name="app.http.sse" level="info" />
<Logger name="app.util.websocket" level="info" />
<Logger name="app.redis" level="info" />
<Logger name="app.rpc.rlimit" level="info" />

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="fatal" monitorInterval="30">
<Configuration status="info" monitorInterval="30">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="fatal" monitorInterval="60">
<Configuration status="info" monitorInterval="60">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"

View File

@@ -1,84 +0,0 @@
#!/usr/bin/env bash
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_PUBLIC_URI=https://localhost:3449
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-login-with-password
disable-login-with-ldap \
disable-login-with-oidc \
disable-login-with-google \
disable-login-with-github \
disable-login-with-gitlab \
enable-backend-worker \
enable-backend-asserts \
disable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
enable-audit-log \
enable-transit-readable-response \
enable-demo-users \
enable-user-feedback \
disable-secure-session-cookies \
enable-smtp \
enable-prepl-server \
enable-urepl-server \
enable-rpc-climit \
enable-rpc-rlimit \
enable-quotes \
enable-soft-rpc-rlimit \
enable-auto-file-snapshot \
enable-webhooks \
enable-access-tokens \
disable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation \
enable-redis-cache \
enable-subscriptions";
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"
# Setup default upload media file size to 100MiB
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
# Setup default multipart upload size to 300MiB
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
export PENPOT_USER_FEEDBACK_DESTINATION="support@example.com"
export AWS_ACCESS_KEY_ID=penpot-devenv
export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
export JAVA_OPTS="\
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-Djdk.attach.allowAttachSelf \
-Dlog4j2.configurationFile=log4j2-devenv.xml \
-Djdk.tracePinnedThreads=full \
-Dim4java.useV7=true \
-XX:+UnlockExperimentalVMOptions \
-XX:+UseShenandoahGC \
-XX:+UseCompactObjectHeaders \
-XX:ShenandoahGCMode=generational \
-XX:-OmitStackTraceInFastThrow \
--sun-misc-unsafe-memory-access=allow \
--enable-preview \
--enable-native-access=ALL-UNNAMED";
function setup_minio() {
# Initialize MINIO config
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
if [ "$?" = "1" ]; then
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
fi
mc mb penpot-s3/penpot -p -q
}

View File

@@ -7,8 +7,6 @@ set -ex
rm -rf target;
mkdir -p target/classes;
mkdir -p target/dist;
mkdir -p target/dist/scripts;
echo "$CURRENT_VERSION" > target/classes/version.txt;
cp ../CHANGES.md target/classes/changelog.md;
@@ -17,7 +15,6 @@ mv target/penpot.jar target/dist/penpot.jar
cp resources/log4j2.xml target/dist/log4j2.xml
cp scripts/run.template.sh target/dist/run.sh;
cp scripts/manage.py target/dist/manage.py
cp scripts/svgo-cli.js target/dist/scripts/;
chmod +x target/dist/run.sh;
chmod +x target/dist/manage.py

View File

@@ -35,35 +35,40 @@ def get_prepl_conninfo():
return host, port
def send(data):
def send_eval(expr):
host, port = get_prepl_conninfo()
with socket.create_connection((host, port)) as s:
f = s.makefile(mode="rw")
json.dump(data, f)
f.write("\n")
f.flush()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.send(expr.encode("utf-8"))
s.send(b":repl/quit\n\n")
while True:
line = f.readline()
result = json.loads(line)
tag = result.get("tag", None)
with s.makefile() as f:
while True:
line = f.readline()
result = json.loads(line)
tag = result.get("tag", None)
if tag == "ret":
return result.get("val", None), result.get("exception", None)
elif tag == "out":
print(result.get("val"), end="")
else:
raise RuntimeError("unexpected response from PREPL")
if tag == "ret":
return result.get("val", None), result.get("err", None)
elif tag == "out":
print(result.get("val"), end="")
else:
raise RuntimeError("unexpected response from PREPL")
def encode(val):
return json.dumps(json.dumps(val))
def print_error(error):
print("ERR:", error["hint"])
def print_error(res):
for error in res["via"]:
print("ERR:", error["message"])
break
def run_cmd(params):
try:
res, err = send(params)
if err:
print_error(err)
expr = "(app.srepl.cli/exec {})".format(encode(params))
res, failed = send_eval(expr)
if failed:
print_error(res)
sys.exit(-1)
return res
@@ -71,27 +76,19 @@ def run_cmd(params):
print("EXC:", str(cause))
sys.exit(-2)
def create_profile(fullname, email, password, skip_tutorial=False, skip_walkthrough=False):
props = {}
if skip_tutorial:
props["viewed-tutorial?"] = True
if skip_walkthrough:
props["viewed-walkthrough?"] = True
def create_profile(fullname, email, password):
params = {
"cmd": "create-profile",
"params": {
"fullname": fullname,
"email": email,
"password": password,
**props
"password": password
}
}
res = run_cmd(params)
print(f"Created: {res['email']} / {res['id']}")
def update_profile(email, fullname, password, is_active):
params = {
"cmd": "update-profile",
@@ -99,7 +96,7 @@ def update_profile(email, fullname, password, is_active):
"email": email,
"fullname": fullname,
"password": password,
"isActive": is_active
"is_active": is_active
}
}
@@ -141,7 +138,7 @@ def derive_password(password):
params = {
"cmd": "derive-password",
"params": {
"password": password
"password": password,
}
}
@@ -178,8 +175,6 @@ parser.add_argument("-n", "--fullname", help="fullname", action="store")
parser.add_argument("-e", "--email", help="email", action="store")
parser.add_argument("-p", "--password", help="password", action="store")
parser.add_argument("-c", "--connect", help="connect to PREPL", action="store", default="tcp://localhost:6063")
parser.add_argument("--skip-tutorial", help="mark tutorial as viewed", action="store_true")
parser.add_argument("--skip-walkthrough", help="mark walkthrough as viewed", action="store_true")
args = parser.parse_args()

View File

@@ -1,13 +1,110 @@
#!/usr/bin/env bash
SCRIPT_DIR=$(dirname $0);
source $SCRIPT_DIR/_env;
export PENPOT_HOST=devenv
export PENPOT_TENANT=dev
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-login-with-ldap \
enable-login-with-password
enable-login-with-oidc \
enable-login-with-google \
enable-login-with-github \
enable-login-with-gitlab \
enable-backend-worker \
enable-backend-asserts \
enable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
enable-audit-log \
enable-transit-readable-response \
enable-demo-users \
disable-secure-session-cookies \
enable-smtp \
enable-prepl-server \
enable-urepl-server \
enable-rpc-climit \
enable-rpc-rlimit \
enable-soft-rpc-rlimit \
enable-auto-file-snapshot \
enable-webhooks \
enable-access-tokens \
enable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation";
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"
# Setup default upload media file size to 100MiB
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
# Setup default multipart upload size to 300MiB
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
# export PENPOT_DATABASE_USERNAME="penpot"
# export PENPOT_DATABASE_PASSWORD="penpot"
# export PENPOT_DATABASE_READONLY=true
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot_pre"
# export PENPOT_DATABASE_USERNAME="penpot_pre"
# export PENPOT_DATABASE_PASSWORD="penpot_pre"
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
# Initialize MINIO config
setup_minio;
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
if [ "$?" = "1" ]; then
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
fi
mc mb penpot-s3/penpot -p -q
export AWS_ACCESS_KEY_ID=penpot-devenv
export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
export OPTIONS="
-A:jmx-remote -A:dev \
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-J-Djdk.attach.allowAttachSelf \
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
-J-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
-J-XX:+EnableDynamicAgentLoading \
-J-XX:-OmitStackTraceInFastThrow \
-J-XX:+UnlockDiagnosticVMOptions \
-J-XX:+DebugNonSafepoints \
-J-Djdk.tracePinnedThreads=full"
# Enable preview
export OPTIONS="$OPTIONS -J--enable-preview"
# Setup HEAP
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
# Increase virtual thread pool size
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
# Disable C2 Compiler
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
# Disable all compilers
# export OPTIONS="$OPTIONS -J-Xint"
# Setup GC
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
# Setup GC
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
# Enable ImageMagick v7.x support
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
export JAVA_OPTS="$JAVA_OPTS -Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
export OPTIONS="-A:jmx-remote -A:dev"
export OPTIONS_EVAL="nil"
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"

48
backend/scripts/repl-test Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
source /home/penpot/environ
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
export OPTIONS="
-A:jmx-remote -A:dev \
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-J-Djdk.attach.allowAttachSelf \
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
-J-XX:-OmitStackTraceInFastThrow \
-J-XX:+UnlockDiagnosticVMOptions \
-J-XX:+DebugNonSafepoints \
-J-Djdk.tracePinnedThreads=full \
-J-XX:+UseTransparentHugePages \
-J-XX:ReservedCodeCacheSize=1g \
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
-J--enable-preview";
# Setup HEAP
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
export PENPOT_HTTP_SERVER_IO_THREADS=2
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
# Increase virtual thread pool size
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
# Disable C2 Compiler
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
# Disable all compilers
# export OPTIONS="$OPTIONS -J-Xint"
# Setup GC
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
# Setup GC
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
# Enable ImageMagick v7.x support
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
export OPTIONS_EVAL="nil"
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
set -ex
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main

View File

@@ -1,13 +0,0 @@
#!/usr/bin/env bash
SCRIPT_DIR=$(dirname $0);
source $SCRIPT_DIR/_env;
export OPTIONS="-A:dev"
entrypoint=${1:-app.main};
shift 1;
set -ex
exec clojure $OPTIONS -A:dev -M -m $entrypoint "$@";

View File

@@ -18,9 +18,9 @@ if [ -f ./environ ]; then
source ./environ
fi
export JAVA_OPTS="-Dim4java.useV7=true -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow --sun-misc-unsafe-memory-access=allow --enable-native-access=ALL-UNNAMED --enable-preview $JVM_OPTS $JAVA_OPTS"
export JVM_OPTS="-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow -Dpolyglot.engine.WarnInterpreterOnly=false --enable-preview $JVM_OPTS"
ENTRYPOINT=${1:-app.main};
set -ex
exec $JAVA_CMD $JAVA_OPTS -jar penpot.jar -m $ENTRYPOINT
exec $JAVA_CMD $JVM_OPTS -jar penpot.jar -m $ENTRYPOINT

View File

@@ -1,11 +1,68 @@
#!/usr/bin/env bash
SCRIPT_DIR=$(dirname $0);
source $SCRIPT_DIR/_env;
export PENPOT_HOST=devenv
export PENPOT_TENANT=dev
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-prepl-server \
enable-urepl-server \
enable-nrepl-server \
enable-webhooks \
enable-backend-asserts \
enable-audit-log \
enable-transit-readable-response \
enable-demo-users \
enable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
disable-secure-session-cookies \
enable-rpc-climit \
enable-smtp \
enable-file-snapshot \
enable-access-tokens \
enable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation";
export OPTIONS="
-A:jmx-remote -A:dev \
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-J-Djdk.attach.allowAttachSelf \
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
-J-XX:+EnableDynamicAgentLoading \
-J-XX:-OmitStackTraceInFastThrow \
-J-XX:+UnlockDiagnosticVMOptions \
-J-XX:+DebugNonSafepoints"
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"
# Setup default upload media file size to 100MiB
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
# Setup default multipart upload size to 300MiB
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
# Enable ImageMagick v7.x support
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
# Initialize MINIO config
setup_minio;
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
if [ "$?" = "1" ]; then
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
fi
mc mb penpot-s3/penpot -p -q
export AWS_ACCESS_KEY_ID=penpot-devenv
export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
entrypoint=${1:-app.main};
shift 1;
set -ex
exec clojure -A:jmx-remote -A:dev -M -m app.main "$@";
clojure $OPTIONS -A:dev -M -m $entrypoint;

View File

File diff suppressed because one or more lines are too long

View File

@@ -8,7 +8,7 @@
(:require
[buddy.hashers :as hashers]))
(def ^:private default-options
(def default-params
{:alg :argon2id
:memory 32768 ;; 32 MiB
:iterations 3
@@ -16,12 +16,12 @@
(defn derive-password
[password]
(hashers/derive password default-options))
(hashers/derive password default-params))
(defn verify-password
[attempt password]
(try
(hashers/verify attempt password default-options)
(hashers/verify attempt password)
(catch Throwable _
{:update false
:valid false})))

View File

@@ -8,8 +8,9 @@
(:require
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.spec :as us]
[clj-ldap.client :as ldap]
[clojure.spec.alpha :as s]
[clojure.string]
[integrant.core :as ig]))
@@ -57,26 +58,21 @@
:email email
:backend "ldap"})))
(def ^:private schema:info-data
[:map
[:fullname ::sm/text]
[:email ::sm/email]
[:backend ::sm/text]])
(s/def ::fullname ::us/not-empty-string)
(s/def ::email ::us/email)
(s/def ::backend ::us/not-empty-string)
(def ^:private valid-info-data?
(sm/lazy-validator schema:info-data))
(def ^:private explain-info-data
(sm/lazy-explainer schema:info-data))
(s/def ::info-data
(s/keys :req-un [::fullname ::email ::backend]))
(defn authenticate
[cfg params]
(with-open [conn (connect cfg)]
(when-let [user (-> (assoc cfg ::conn conn)
(retrieve-user params))]
(when-not (valid-info-data? user)
(let [explain (explain-info-data user)]
(l/warn :hint "invalid response from ldap, looks like ldap is not configured correctly" :data user)
(when-not (s/valid? ::info-data user)
(let [explain (s/explain-str ::info-data user)]
(l/warn ::l/raw (str "invalid response from ldap, looks like ldap is not configured correctly\n" explain))
(ex/raise :type :restriction
:code :wrong-ldap-response
:explain explain)))
@@ -106,31 +102,38 @@
:host (:host cfg) :port (:port cfg) :cause cause)
nil))))
(def ^:private schema:params
[:map
[:host {:optional true} :string]
[:port {:optional true} ::sm/int]
[:bind-dn {:optional true} :string]
[:bind-passwor {:optional true} :string]
[:query {:optional true} :string]
[:base-dn {:optional true} :string]
[:attrs-email {:optional true} :string]
[:attrs-username {:optional true} :string]
[:attrs-fullname {:optional true} :string]
[:ssl {:optional true} ::sm/boolean]
[:tls {:optional true} ::sm/boolean]])
(s/def ::enabled? ::us/boolean)
(s/def ::host ::us/string)
(s/def ::port ::us/integer)
(s/def ::ssl ::us/boolean)
(s/def ::tls ::us/boolean)
(s/def ::query ::us/string)
(s/def ::base-dn ::us/string)
(s/def ::bind-dn ::us/string)
(s/def ::bind-password ::us/string)
(s/def ::attrs-email ::us/string)
(s/def ::attrs-fullname ::us/string)
(s/def ::attrs-username ::us/string)
(def ^:private check-params
(sm/check-fn schema:params :hint "Invalid LDAP provider parameters"))
(s/def ::provider-params
(s/keys :opt-un [::host ::port
::ssl ::tls
::enabled?
::bind-dn
::bind-password
::query
::attrs-email
::attrs-username
::attrs-fullname]))
(defmethod ig/assert-key ::provider
[_ params]
(when (:enabled params)
(some->> params check-params)))
(s/def ::provider
(s/nilable ::provider-params))
(defmethod ig/pre-init-spec ::provider
[_]
(s/spec ::provider))
(defmethod ig/init-key ::provider
[_ cfg]
(when (:enabled cfg)
(when (:enabled? cfg)
(try-connectivity cfg)))
(sm/register! ::provider schema:params)

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,131 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.binfile.cleaner
"A collection of helpers for perform cleaning of artifacts; mainly
for recently imported shapes."
(:require
[app.common.data :as d]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PRE DECODE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- pre-clean-bool-content
[shape]
(if-let [content (get shape :bool-content)]
(-> shape
(assoc :content content)
(dissoc :bool-content))
shape))
(defn- pre-clean-shadow-color
[shape]
(d/update-when shape :shadow
(fn [shadows]
(mapv (fn [shadow]
(update shadow :color
(fn [color]
(let [ref-id (get color :id)
ref-file (get color :file-id)]
(-> (d/without-qualified color)
(select-keys [:opacity :color :gradient :image :ref-id :ref-file])
(cond-> ref-id
(assoc :ref-id ref-id))
(cond-> ref-file
(assoc :ref-file ref-file)))))))
shadows))))
(defn clean-shape-pre-decode
"Applies a pre-decode phase migration to the shape"
[shape]
(cond-> shape
(= "bool" (:type shape))
(pre-clean-bool-content)
(contains? shape :shadow)
(pre-clean-shadow-color)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; POST DECODE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- fix-shape-shadow-color
"Some shapes can come with invalid `id` property on shadow colors
caused by incorrect uuid parsing bug that should be already fixed;
this function removes the invalid id from the data structure."
[shape]
(let [fix-color
(fn [{:keys [id] :as color}]
(if (uuid? id)
color
(if (and (string? id)
(re-matches uuid/regex id))
(assoc color :id (uuid/uuid id))
(dissoc color :id))))
fix-shadow
(fn [shadow]
(d/update-when shadow :color fix-color))
xform
(map fix-shadow)]
(d/update-when shape :shadow
(fn [shadows]
(into [] xform shadows)))))
(defn- fix-root-shape
"Ensure all root objects are well formed shapes"
[shape]
(if (= (:id shape) uuid/zero)
(-> shape
(assoc :parent-id uuid/zero)
(assoc :frame-id uuid/zero)
;; We explicitly dissoc them and let the shape-setup
;; to regenerate it with valid values.
(dissoc :selrect)
(dissoc :points)
(cts/setup-shape))
shape))
(defn- fix-legacy-flex-dir
"This operation is only relevant to old data and it is fixed just
for convenience."
[shape]
(d/update-when shape :layout-flex-dir
(fn [dir]
(case dir
:reverse-row :row-reverse
:reverse-column :column-reverse
dir))))
(defn clean-shape-post-decode
"A shape procesor that expected to be executed after schema decoding
process but before validation."
[shape]
(-> shape
(fix-shape-shadow-color)
(fix-root-shape)
(fix-legacy-flex-dir)))
(defn- fix-container
[container]
(-> container
;; Remove possible `nil` keys on objects
(d/update-when :objects dissoc nil)
(d/update-when :objects d/update-vals clean-shape-post-decode)))
(defn clean-file
[file & {:as _opts}]
(update file :data
(fn [data]
(-> data
(d/update-when :pages-index d/update-vals fix-container)
(d/update-when :components d/update-vals fix-container)
(d/without-nils)))))

View File

@@ -9,73 +9,33 @@
binfile format implementations and management rpc methods."
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.files.helpers :as cfh]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as fval]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.common.weak :as weak]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.features.fdata :as fdata]
[app.features.file-migrations :as fmigr]
[app.features.components-v2 :as feat.compv2]
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.set :as set]
[cuerdas.core :as str]
[datoteka.fs :as fs]
[datoteka.io :as io]))
[clojure.walk :as walk]
[cuerdas.core :as str]))
(set! *warn-on-reflection* true)
(def ^:dynamic *state* nil)
(def ^:dynamic *options* nil)
(def ^:dynamic *reference-file* nil)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Threshold in MiB when we pass from using
;; in-memory byte-array's to use temporal files.
(def temp-file-threshold
(* 1024 1024 2))
;; A maximum (storage) object size allowed: 100MiB
(def ^:const max-object-size
(* 1024 1024 100))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare get-resolved-file-libraries)
(declare update-file!)
(def file-attrs
(sm/keys ctf/schema:file))
(defn parse-file-format
[template]
(assert (fs/path? template) "expected InputStream for `template`")
(with-open [^java.lang.AutoCloseable input (io/input-stream template)]
(let [buffer (byte-array 4)]
(io/read-to-buffer input buffer)
(if (and (= (aget buffer 0) 80)
(= (aget buffer 1) 75)
(= (aget buffer 2) 3)
(= (aget buffer 3) 4))
:binfile-v3
:binfile-v1))))
(def xf-map-id
(map :id))
@@ -96,13 +56,6 @@
(def conj-vec
(fnil conj []))
(defn initial-state
[]
{:storage-objects #{}
:files #{}
:teams #{}
:projects #{}})
(defn collect-storage-objects
[state items]
(update state :storage-objects into xf-map-media-id items))
@@ -134,296 +87,37 @@
attrs))
(defn update-index
([coll]
(update-index {} coll identity))
([index coll]
(update-index index coll identity))
([index coll attr]
(reduce #(index-object %1 %2 attr) index coll)))
(defn- decode-row-features
[{:keys [features] :as row}]
(when row
(cond-> row
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
(def sql:get-minimal-file
"SELECT f.id,
f.revn,
f.modified_at,
f.deleted_at
FROM file AS f
WHERE f.id = ?")
(defn get-minimal-file
[cfg id & {:as opts}]
(db/get-with-sql cfg [sql:get-minimal-file id] opts))
(def sql:files-with-data
"SELECT f.id,
f.project_id,
f.created_at,
f.modified_at,
f.deleted_at,
f.name,
f.is_shared,
f.has_media_trimmed,
f.revn,
f.data AS legacy_data,
f.ignore_sync_until,
f.comment_thread_seqn,
f.features,
f.version,
f.vern,
p.team_id,
coalesce(fd.backend, 'legacy-db') AS backend,
fd.metadata AS metadata,
fd.data AS data
FROM file AS f
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
INNER JOIN project AS p ON (p.id = f.project_id)")
(def sql:get-file
(str sql:files-with-data " WHERE f.id = ?"))
(def sql:get-file-without-data
(str "WITH files AS (" sql:files-with-data ")"
"SELECT f.id,
f.project_id,
f.created_at,
f.modified_at,
f.deleted_at,
f.name,
f.is_shared,
f.has_media_trimmed,
f.revn,
f.ignore_sync_until,
f.comment_thread_seqn,
f.features,
f.version,
f.vern,
f.team_id
FROM files AS f
WHERE f.id = ?"))
(defn- migrate-file
[{:keys [::db/conn] :as cfg} {:keys [read-only?]} {:keys [id] :as file}]
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)
pmap/*tracked* (pmap/create-tracked)]
(let [libs (delay (get-resolved-file-libraries cfg file))
;; For avoid unnecesary overhead of creating multiple
;; pointers and handly internally with objects map in their
;; worst case (when probably all shapes and all pointers
;; will be readed in any case), we just realize/resolve them
;; before applying the migration to the file.
file (-> (fdata/realize cfg file)
(fmg/migrate-file libs))]
(if (or read-only? (db/read-only? conn))
file
(do ;; When file is migrated, we break the rule of no
;; perform mutations on get operations and update the
;; file with all migrations applied
(update-file! cfg file)
(fmigr/resolve-applied-migrations cfg file))))))
(defn- get-file*
[{:keys [::db/conn] :as cfg} id
{:keys [migrate?
realize?
decode?
skip-locked?
include-deleted?
load-data?
throw-if-not-exists?
lock-for-update?
lock-for-share?]
:or {lock-for-update? false
lock-for-share? false
load-data? true
migrate? true
decode? true
include-deleted? false
throw-if-not-exists? true
realize? false}
:as options}]
(assert (db/connection? conn) "expected cfg with valid connection")
(when (and (not load-data?)
(or lock-for-share? lock-for-share? skip-locked?))
(throw (IllegalArgumentException. "locking is incompatible when `load-data?` is false")))
(let [sql
(if load-data?
sql:get-file
sql:get-file-without-data)
sql
(cond
lock-for-update?
(str sql " FOR UPDATE of f")
lock-for-share?
(str sql " FOR SHARE of f")
:else
sql)
sql
(if skip-locked?
(str sql " SKIP LOCKED")
sql)
file
(db/get-with-sql conn [sql id]
{::db/throw-if-not-exists false
::db/remove-deleted (not include-deleted?)})
file
(-> file
(d/update-when :features db/decode-pgarray #{})
(d/update-when :metadata fdata/decode-metadata))]
(if file
(if load-data?
(let [file
(->> file
(fmigr/resolve-applied-migrations cfg)
(fdata/resolve-file-data cfg))
will-migrate?
(and migrate? (fmg/need-migration? file))]
(if decode?
(cond->> (fdata/decode-file-data cfg file)
(and realize? (not will-migrate?))
(fdata/realize cfg)
will-migrate?
(migrate-file cfg options))
file))
file)
(when-not (or skip-locked? (not throw-if-not-exists?))
(ex/raise :type :not-found
:code :object-not-found
:hint "database object not found"
:table :file
:file-id id)))))
(defn decode-row
"A generic decode row helper"
[{:keys [data features] :as row}]
(cond-> row
features (assoc :features (db/decode-pgarray features #{}))
data (assoc :data (blob/decode data))))
(defn get-file
"Get file, resolve all features and apply migrations.
Usefull when you have plan to apply massive or not cirurgical
operations on file, because it removes the ovehead of lazy fetching
and decoding."
[cfg file-id & {:as opts}]
(db/run! cfg get-file* file-id opts))
(defn clean-file-features
[file]
(update file :features (fn [features]
(if (set? features)
(-> features
(cfeat/migrate-legacy-features)
(set/difference cfeat/frontend-only-features)
(set/difference cfeat/backend-only-features))
#{}))))
(defn check-file-exists
[cfg id & {:keys [include-deleted?]
:or {include-deleted? false}
:as options}]
(db/get-with-sql cfg [sql:get-minimal-file id]
{:db/remove-deleted (not include-deleted?)}))
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
union all
select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
from team_profile_rel as tpr
inner join project as p on (p.team_id = tpr.team_id)
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
union all
select ppr.is_owner,
ppr.is_admin,
ppr.can_edit
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?")
(defn- get-file-permissions*
[conn profile-id file-id]
(when (and profile-id file-id)
(db/exec! conn [sql:file-permissions
file-id profile-id
file-id profile-id
file-id profile-id])))
(defn get-file-permissions
([conn profile-id file-id]
(let [rows (get-file-permissions* conn profile-id file-id)
is-owner (boolean (some :is-owner rows))
is-admin (boolean (some :is-admin rows))
can-edit (boolean (some :can-edit rows))]
(when (seq rows)
{:type :membership
:is-owner is-owner
:is-admin (or is-owner is-admin)
:can-edit (or is-owner is-admin can-edit)
:can-read true
:is-logged (some? profile-id)})))
([conn profile-id file-id share-id]
(let [perms (get-file-permissions conn profile-id file-id)
ldata (some-> (db/get* conn :share-link {:id share-id :file-id file-id})
(dissoc :flags)
(update :pages db/decode-pgarray #{}))]
;; NOTE: in a future when share-link becomes more powerful and
;; will allow us specify which parts of the app is available, we
;; will probably need to tweak this function in order to expose
;; this flags to the frontend.
(cond
(some? perms) perms
(some? ldata) {:type :share-link
:can-read true
:pages (:pages ldata)
:is-logged (some? profile-id)
:who-comment (:who-comment ldata)
:who-inspect (:who-inspect ldata)}))))
[cfg file-id]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(when-let [file (db/get* conn :file {:id file-id}
{::db/remove-deleted false})]
(-> file
(decode-row)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))))))))
(defn get-project
[cfg project-id]
(db/get cfg :project {:id project-id}))
(def ^:private sql:get-teams
"SELECT t.* FROM team WHERE id = ANY(?)")
(defn get-teams
[cfg ids]
(let [conn (db/get-connection cfg)
ids (db/create-array conn "uuid" ids)]
(->> (db/exec! conn [sql:get-teams ids])
(map decode-row-features))))
(defn get-team
[cfg team-id]
(-> (db/get cfg :team {:id team-id})
(decode-row-features)))
(decode-row)))
(defn get-fonts
[cfg team-id]
@@ -435,8 +129,10 @@
"Given a set of file-id's, return all matching relations with the libraries"
[cfg ids]
(assert (set? ids) "expected a set of uuids")
(assert (every? uuid? ids) "expected a set of uuids")
(dm/assert!
"expected a set of uuids"
(and (set? ids)
(every? uuid? ids)))
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (db/create-array conn "uuid" ids)
@@ -471,10 +167,9 @@
(defn get-file-object-thumbnails
"Return all file object thumbnails for a given file."
[cfg file-id]
(->> (db/query cfg :file-tagged-object-thumbnail
{:file-id file-id
:deleted-at nil})
(not-empty)))
(db/query cfg :file-tagged-object-thumbnail
{:file-id file-id
:deleted-at nil}))
(defn get-file-thumbnail
"Return the thumbnail for the specified file-id"
@@ -485,93 +180,70 @@
:data nil}
{::sql/columns [:media-id :file-id :revn]}))
(def ^:private sql:get-missing-media-references
"SELECT fmo.*
FROM file_media_object AS fmo
WHERE fmo.id = ANY(?::uuid[])
AND file_id != ?")
(defn update-media-references!
"Given a file and a coll of media-refs, check if all provided
references are correct or fix them in-place"
[{:keys [::db/conn] :as cfg} {file-id :id :as file} media-refs]
(let [missing-index
(reduce (fn [result {:keys [id] :as fmo}]
(assoc result id
(-> fmo
(assoc :id (uuid/next))
(assoc :file-id file-id)
(dissoc :created-at)
(dissoc :deleted-at))))
{}
(db/exec! conn [sql:get-missing-media-references
(->> (into #{} xf-map-id media-refs)
(db/create-array conn "uuid"))
file-id]))
(def ^:private
xform:collect-media-id
(comp
(map :objects)
(mapcat vals)
(mapcat (fn [obj]
;; NOTE: because of some bug, we ended with
;; many shape types having the ability to
;; have fill-image attribute (which initially
;; designed for :path shapes).
(sequence
(keep :id)
(concat [(:fill-image obj)
(:metadata obj)]
(map :fill-image (:fills obj))
(map :stroke-image (:strokes obj))
(->> (:content obj)
(tree-seq map? :children)
(mapcat :fills)
(map :fill-image))))))))
lookup-index
(fn [id]
(if-let [mobj (get missing-index id)]
(do
(l/trc :hint "lookup index"
:file-id (str file-id)
:id (str id)
:result (str (get mobj :id)))
(get mobj :id))
id))
update-shapes
(fn [data {:keys [page-id shape-id]}]
(d/update-in-when data [:pages-index page-id :objects shape-id] cfh/relink-refs lookup-index))
file
(update file :data #(reduce update-shapes % media-refs))]
(doseq [[old-id item] missing-index]
(l/dbg :hint "create missing references"
:file-id (str file-id)
:old-id (str old-id)
:id (str (:id item)))
(db/insert! conn :file-media-object item
{::db/return-keys false}))
file))
(def sql:get-file-media
"SELECT * FROM file_media_object WHERE id = ANY(?)")
(defn get-file-media*
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
(let [used (cfh/collect-used-media data)
used (db/create-array conn "uuid" used)]
(->> (db/exec! conn [sql:get-file-media used])
(mapv (fn [row] (assoc row :file-id id))))))
(defn collect-used-media
"Given a fdata (file data), returns all media references."
[data]
(-> #{}
(into xform:collect-media-id (vals (:pages-index data)))
(into xform:collect-media-id (vals (:components data)))
(into (keys (:media data)))))
(defn get-file-media
[cfg file]
(db/run! cfg get-file-media* file))
[cfg {:keys [data id] :as file}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (collect-used-media data)
ids (db/create-array conn "uuid" ids)
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
(def ^:private sql:get-team-files-ids
;; We assoc the file-id again to the file-media-object row
;; because there are cases that used objects refer to other
;; files and we need to ensure in the exportation process that
;; all ids matches
(->> (db/exec! conn [sql ids])
(mapv #(assoc % :file-id id)))))))
(def ^:private sql:get-team-files
"SELECT f.id FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?")
(defn get-team-files-ids
(defn get-team-files
"Get a set of file ids for the specified team-id"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-files-ids team-id])
(->> (db/exec! conn [sql:get-team-files team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-team-projects
"SELECT p.* FROM project AS p
"SELECT p.id FROM project AS p
WHERE p.team_id = ?
AND p.deleted_at IS NULL")
(defn get-team-projects
"Get a set of project ids for the team"
[cfg team-id]
(->> (db/exec! cfg [sql:get-team-projects team-id])
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-projects team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-project-files
@@ -585,16 +257,53 @@
(->> (db/exec! conn [sql:get-project-files project-id])
(into #{} xf-map-id)))
(defn remap-thumbnail-object-id
[object-id file-id]
(str/replace-first object-id #"^(.*?)/" (str file-id "/")))
(defn- relink-shapes
"A function responsible to analyze all file data and
replace the old :component-file reference with the new
ones, using the provided file-index."
[data]
(cfh/relink-refs data lookup-index))
(letfn [(process-map-form [form]
(cond-> form
;; Relink image shapes
(and (map? (:metadata form))
(= :image (:type form)))
(update-in [:metadata :id] lookup-index)
;; Relink paths with fill image
(map? (:fill-image form))
(update-in [:fill-image :id] lookup-index)
;; This covers old shapes and the new :fills.
(uuid? (:fill-color-ref-file form))
(update :fill-color-ref-file lookup-index)
;; This covers the old shapes and the new :strokes
(uuid? (:stroke-color-ref-file form))
(update :stroke-color-ref-file lookup-index)
;; This covers all text shapes that have typography referenced
(uuid? (:typography-ref-file form))
(update :typography-ref-file lookup-index)
;; This covers the component instance links
(uuid? (:component-file form))
(update :component-file lookup-index)
;; This covers the shadows and grids (they have directly
;; the :file-id prop)
(uuid? (:file-id form))
(update :file-id lookup-index)))
(process-form [form]
(if (map? form)
(try
(process-map-form form)
(catch Throwable cause
(l/warn :hint "failed form" :form (pr-str form) ::l/sync? true)
(throw cause)))
form))]
(walk/postwalk process-form data)))
(defn- relink-media
"A function responsible of process the :media attr of file data and
@@ -625,230 +334,118 @@
[cfg data file-id]
(let [library-ids (get-libraries cfg [file-id])]
(reduce (fn [data library-id]
(if-let [library (get-file cfg library-id :include-deleted? true)]
(ctf/absorb-assets data (:data library))
data))
(let [library (get-file cfg library-id)]
(ctf/absorb-assets data (:data library))))
data
library-ids)))
(defn disable-database-timeouts!
[cfg]
(let [conn (db/get-connection cfg)]
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
(defn invalidate-thumbnails
[cfg file-id]
(let [storage (sto/resolve cfg)
sql-1
(str "update file_tagged_object_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
sql-2
(str "update file_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")]
(run! #(sto/touch-object! storage %)
(sequence
(keep :media-id)
(concat
(db/exec! cfg [sql-1 file-id])
(db/exec! cfg [sql-2 file-id]))))))
(defn- fix-version
[file]
(let [file (fmg/fix-version file)]
;; FIXME: We're temporarily activating all migrations because a
;; problem in the environments messed up with the version numbers
;; When this problem is fixed delete the following line
(if (> (:version file) 22)
(assoc file :version 22)
file)))
(defn process-file
[cfg {:keys [id] :as file}]
(let [libs (delay (get-resolved-file-libraries cfg file))]
(-> file
(update :data (fn [fdata]
(-> fdata
(assoc :id id)
(dissoc :recent-colors))))
(update :data (fn [fdata]
(-> fdata
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(update :colors relink-colors)
(d/without-nils))))
(fmg/migrate-file libs)
[{:keys [id] :as file}]
(-> file
(fix-version)
(update :data (fn [fdata]
(-> fdata
(assoc :id id)
(dissoc :recent-colors))))
(fmg/migrate-file)
(update :data (fn [fdata]
(-> fdata
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(update :colors relink-colors)
(d/without-nils))))))
;; NOTE: this is necessary because when we just creating a new
;; file from imported artifact or cloned file there are no
;; migrations registered on the database, so we need to persist
;; all of them, not only the applied
(vary-meta dissoc ::fmg/migrated))))
(defn- upsert-file!
[conn file]
(let [sql (str "INSERT INTO file (id, project_id, name, revn, version, is_shared, data, created_at, modified_at) "
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) "
"ON CONFLICT (id) DO UPDATE SET data=?, version=?")]
(db/exec-one! conn [sql
(:id file)
(:project-id file)
(:name file)
(:revn file)
(:version file)
(:is-shared file)
(:data file)
(:created-at file)
(:modified-at file)
(:data file)
(:version file)])))
(defn- encode-file
[cfg {:keys [id features] :as file}]
(let [file (if (and (contains? features "fdata/objects-map")
(:data file))
(fdata/enable-objects-map file)
file)
(defn persist-file!
"Applies all the final validations and perist the file."
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
file (if (and (contains? features "fdata/pointer-map")
(:data file))
(dm/assert!
"expected valid timestamp"
(dt/instant? timestamp))
(binding [pmap/*tracked* (pmap/create-tracked :inherit true)]
(let [file (fdata/enable-pointer-map file)]
(fdata/persist-pointers! cfg id)
file))
file)]
(let [file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
(update :features
(fn [features]
(let [features (cfeat/check-supported-features! features)]
(-> (::features cfg #{})
(set/difference cfeat/frontend-only-features)
(set/union features))))))
(-> file
(d/update-when :features into-array)
(d/update-when :data blob/encode))))
_ (when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
(defn- file->params
[file]
(-> (select-keys file file-attrs)
(assoc :data nil)
(dissoc :team-id)
(dissoc :migrations)))
_ (when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
(defn- file->file-data-params
[{:keys [id] :as file} & {:as opts}]
(let [created-at (or (:created-at file) (ct/now))
modified-at (or (:modified-at file) created-at)]
(d/without-nils
{:id id
:type "main"
:file-id id
:data (:data file)
:metadata (:metadata file)
:created-at created-at
:modified-at modified-at})))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
(defn insert-file!
"Insert a new file into the database table. Expectes a not-encoded file.
Returns nil."
[{:keys [::db/conn] :as cfg} file & {:as opts}]
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)
(when (:migrations file)
(fmigr/upsert-migrations! conn file))
(let [file (encode-file cfg file)]
(db/insert! conn :file
(file->params file)
(assoc opts ::db/return-keys false))
(->> (file->file-data-params file)
(fdata/upsert! cfg))
nil))
(defn update-file!
"Update an existing file on the database. Expects not encoded file."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} & {:as opts}]
(if (::reset-migrations? opts false)
(fmigr/reset-migrations! conn file)
(fmigr/upsert-migrations! conn file))
(let [file
(encode-file cfg file)
file-params
(file->params (dissoc file :id))
file-data-params
(file->file-data-params file)]
(db/update! conn :file file-params
{:id id}
{::db/return-keys false})
(fdata/upsert! cfg file-data-params)
nil))
(defn save-file!
"Applies all the final validations and perist the file, binfile
specific, should not be used outside of binfile domain.
Returns nil"
[{:keys [::timestamp] :as cfg} file & {:as opts}]
(assert (ct/inst? timestamp) "expected valid timestamp")
(let [file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(cond-> (not (::overwrite cfg))
(assoc :ignore-sync-until (ct/plus timestamp (ct/duration {:seconds 5}))))
(update :features
(fn [features]
(-> (::features cfg #{})
(set/union features)
;; We never want to store
;; frontend-only features on file
(set/difference cfeat/frontend-only-features)))))]
(when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
(when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
params (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
(if (::overwrite cfg)
(update-file! cfg file (assoc opts ::reset-migrations? true))
(insert-file! cfg file opts))))
(upsert-file! conn params)
(db/insert! conn :file params ::db/return-keys false))
(def ^:private sql:get-file-libraries
"WITH RECURSIVE libs AS (
SELECT fl.*, flr.synced_at
FROM file AS fl
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
WHERE flr.file_id = ?::uuid
UNION
SELECT fl.*, flr.synced_at
FROM file AS fl
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
JOIN libs AS l ON (flr.file_id = l.id)
)
SELECT l.id,
l.features,
l.project_id,
p.team_id,
l.created_at,
l.modified_at,
l.deleted_at,
l.name,
l.revn,
l.vern,
l.synced_at,
l.is_shared,
l.version
FROM libs AS l
INNER JOIN project AS p ON (p.id = l.project_id)
WHERE l.deleted_at IS NULL;")
file))
(defn get-file-libraries
[conn file-id]
(into []
(comp
;; FIXME: :is-indirect set to false to all rows looks
;; completly useless
(map #(assoc % :is-indirect false))
(map decode-row-features))
(db/exec! conn [sql:get-file-libraries file-id])))
(defn apply-pending-migrations!
"Apply alredy registered pending migrations to files"
[cfg]
(doseq [[feature file-id] (-> *state* deref :pending-to-migrate)]
(case feature
"components/v2"
(feat.compv2/migrate-file! cfg file-id
:validate? (::validate cfg true)
:skip-on-graphic-error? true)
(defn get-resolved-file-libraries
"Get all file libraries including itself. Returns an instance of
LoadableWeakValueMap that allows do not have strong references to
the loaded libraries and reduce possible memory pressure on having
all this libraries loaded at same time on processing file validation
or file migration.
"fdata/shape-data-type"
nil
This still requires at least one library at time to be loaded while
access to it is performed, but it improves considerable not having
the need of loading all the libraries at the same time."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [library-ids (->> (get-file-libraries conn (:id file))
(map :id)
(cons (:id file)))
load-fn #(get-file cfg % :migrate? false)]
(weak/loadable-weak-value-map library-ids load-fn {id file})))
(ex/raise :type :internal
:code :no-migration-defined
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
:feature feature))))

View File

@@ -1,42 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.binfile.migrations
"A binfile related migrations handling"
(:require
[app.binfile.common :as bfc]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[clojure.set :as set]
[cuerdas.core :as str]))
(defn register-pending-migrations!
"All features that are enabled and requires explicit migration are
added to the state for a posterior migration step."
[cfg {:keys [id features] :as file}]
(doseq [feature (-> (::features cfg)
(set/difference cfeat/no-migration-features)
(set/difference cfeat/backend-only-features)
(set/difference features))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature id]))
file)
(defn apply-pending-migrations!
"Apply alredy registered pending migrations to files"
[_cfg]
(doseq [[feature _file-id] (-> bfc/*state* deref :pending-to-migrate)]
(case feature
"components/v2"
nil
"fdata/shape-data-type"
nil
(ex/raise :type :internal
:code :no-migration-defined
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
:feature feature))))

View File

@@ -9,7 +9,6 @@
(:refer-clojure :exclude [assert])
(:require
[app.binfile.common :as bfc]
[app.binfile.migrations :as bfm]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
@@ -17,7 +16,6 @@
[app.common.fressian :as fres]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -31,6 +29,7 @@
[app.storage.tmp :as tmp]
[app.tasks.file-gc]
[app.util.events :as events]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.java.io :as jio]
[clojure.set :as set]
@@ -50,6 +49,15 @@
(set! *warn-on-reflection* true)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Threshold in MiB when we pass from using
;; in-memory byte-array's to use temporal files.
(def temp-file-threshold
(* 1024 1024 2))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; LOW LEVEL STREAM IO API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -57,6 +65,11 @@
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
(def ^:const penpot-magic-number 800099563638710213)
;; A maximum (storage) object size allowed: 100MiB
(def ^:const max-object-size
(* 1024 1024 100))
(def ^:dynamic *position* nil)
(defn get-mark
@@ -223,7 +236,7 @@
(defn copy-stream!
[^OutputStream output ^InputStream input ^long size]
(let [written (io/copy input output :size size)]
(let [written (io/copy! input output :size size)]
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true)
(swap! *position* + written)
written))
@@ -245,18 +258,18 @@
p (tmp/tempfile :prefix "penpot.binfile.")]
(assert-mark m :stream)
(when (> s bfc/max-object-size)
(when (> s max-object-size)
(ex/raise :type :validation
:code :max-file-size-reached
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
(if (> s bfc/temp-file-threshold)
(if (> s temp-file-threshold)
(with-open [^OutputStream output (io/output-stream p)]
(let [readed (io/copy input output :offset 0 :size s)]
(let [readed (io/copy! input output :offset 0 :size s)]
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
(swap! *position* + readed)
[s p]))
[s (io/read input :size s)])))
[s (io/read-as-bytes input :size s)])))
(defmacro assert-read-label!
[input expected-label]
@@ -299,7 +312,7 @@
(defmulti write-section ::section)
(defn write-export!
[{:keys [::bfc/include-libraries ::bfc/embed-assets] :as cfg}]
[{:keys [::include-libraries ::embed-assets] :as cfg}]
(when (and include-libraries embed-assets)
(throw (IllegalArgumentException.
"the `include-libraries` and `embed-assets` are mutally excluding options")))
@@ -324,7 +337,7 @@
[:v1/metadata :v1/files :v1/rels :v1/sobjects]))))
(defmethod write-section :v1/metadata
[{:keys [::output ::bfc/ids ::bfc/include-libraries] :as cfg}]
[{:keys [::output ::ids ::include-libraries] :as cfg}]
(if-let [fids (get-files cfg ids)]
(let [lids (when include-libraries
(bfc/get-libraries cfg ids))
@@ -336,7 +349,7 @@
:hint "unable to retrieve files for export")))
(defmethod write-section :v1/files
[{:keys [::output ::bfc/embed-assets ::bfc/include-libraries] :as cfg}]
[{:keys [::output ::embed-assets ::include-libraries] :as cfg}]
;; Initialize SIDS with empty vector
(vswap! bfc/*state* assoc :sids [])
@@ -346,7 +359,7 @@
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
(mapv #(dissoc % :file-id)))
file (cond-> (bfc/get-file cfg file-id :realize? true)
file (cond-> (bfc/get-file cfg file-id)
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
@@ -368,12 +381,10 @@
::l/sync? true)
(doseq [item media]
(l/dbg :hint "write penpot file media object"
:id (:id item) ::l/sync? true))
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true))
(doseq [item thumbnails]
(l/dbg :hint "write penpot file object thumbnail"
:media-id (str (:media-id item)) ::l/sync? true))
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true))
(doto output
(write-obj! file)
@@ -383,7 +394,7 @@
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id thumbnails))))
(defmethod write-section :v1/rels
[{:keys [::output ::bfc/include-libraries] :as cfg}]
[{:keys [::output ::include-libraries] :as cfg}]
(let [ids (-> bfc/*state* deref :files set)
rels (when include-libraries
(bfc/get-files-rels cfg ids))]
@@ -422,19 +433,25 @@
(defmulti read-import ::version)
(defmulti read-section ::section)
(s/def ::bfc/profile-id ::us/uuid)
(s/def ::bfc/project-id ::us/uuid)
(s/def ::bfc/input io/input-stream?)
(s/def ::profile-id ::us/uuid)
(s/def ::project-id ::us/uuid)
(s/def ::input io/input-stream?)
(s/def ::overwrite? (s/nilable ::us/boolean))
(s/def ::ignore-index-errors? (s/nilable ::us/boolean))
;; FIXME: replace with schema
(s/def ::read-import-options
(s/keys :req [::db/pool ::sto/storage ::bfc/project-id ::bfc/profile-id ::bfc/input]
:opt [::ignore-index-errors?]))
(s/keys :req [::db/pool ::sto/storage ::project-id ::profile-id ::input]
:opt [::overwrite? ::ignore-index-errors?]))
(defn read-import!
"Do the importation of the specified resource in penpot custom binary
format."
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (ct/now)} :as options}]
format. There are some options for customize the importation
behavior:
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
it reuses all ids and updates existing objects; defaults to `false`."
[{:keys [::input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
(dm/assert!
"expected input stream"
@@ -442,15 +459,15 @@
(dm/assert!
"expected valid instant"
(ct/inst? timestamp))
(dt/instant? timestamp))
(let [version (read-header! input)]
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
(defn- read-import-v1
[{:keys [::db/conn ::bfc/project-id ::bfc/profile-id ::bfc/input] :as cfg}]
(bfc/disable-database-timeouts! cfg)
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(pu/with-open [input (zstd-input-stream input)
input (io/data-input-stream input)]
@@ -468,13 +485,13 @@
(let [options (-> cfg
(assoc ::bfc/features features)
(assoc ::section section)
(assoc ::bfc/input input))]
(assoc ::input input))]
(binding [bfc/*options* options]
(events/tap :progress {:op :import :section section})
(read-section options))))
[:v1/metadata :v1/files :v1/rels :v1/sobjects])
(bfm/apply-pending-migrations! cfg)
(bfc/apply-pending-migrations! cfg)
;; Knowing that the ids of the created files are in index,
;; just lookup them and return it as a set
@@ -486,7 +503,7 @@
(db/tx-run! options read-import-v1))
(defmethod read-section :v1/metadata
[{:keys [::bfc/input]}]
[{:keys [::input]}]
(let [{:keys [version files]} (read-obj! input)]
(l/dbg :hint "metadata readed"
:version (:full version)
@@ -503,8 +520,18 @@
(update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/")))))
thumbnails))
(defn- clean-features
[file]
(update file :features (fn [features]
(if (set? features)
(-> features
(cfeat/migrate-legacy-features)
(set/difference cfeat/backend-only-features))
#{}))))
(defmethod read-section :v1/files
[{:keys [::bfc/input ::bfc/project-id ::bfc/name] :as system}]
[{:keys [::db/conn ::input ::project-id ::bfc/overwrite ::name] :as system}]
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
(let [file (read-obj! input)
media (read-obj! input)
@@ -512,7 +539,7 @@
file-id (:id file)
file-id' (bfc/lookup-index file-id)
file (bfc/clean-file-features file)
file (clean-features file)
thumbnails (:thumbnails file)]
(when (not= file-id expected-file-id)
@@ -532,9 +559,7 @@
(when (seq thumbnails)
(let [thumbnails (remap-thumbnails thumbnails file-id')]
(l/dbg :hint "updated index with thumbnails"
:total (count thumbnails)
::l/sync? true)
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true)
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
(when (seq media)
@@ -551,8 +576,8 @@
(cond-> (and (= idx 0) (some? name))
(assoc :name name))
(assoc :project-id project-id)
(dissoc :thumbnails))
file (bfc/process-file system file)]
(dissoc :thumbnails)
(bfc/process-file))]
;; All features that are enabled and requires explicit migration are
;; added to the state for a posterior migration step.
@@ -562,12 +587,15 @@
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
(bfc/save-file! system file ::db/return-keys false)
(bfc/persist-file! system file)
(when overwrite
(db/delete! conn :file-thumbnail {:file-id file-id'}))
file-id'))))
(defmethod read-section :v1/rels
[{:keys [::db/conn ::bfc/input ::bfc/timestamp]}]
[{:keys [::db/conn ::input ::bfc/timestamp]}]
(let [rels (read-obj! input)
ids (into #{} (-> bfc/*state* deref :files))]
;; Insert all file relations
@@ -591,7 +619,7 @@
::l/sync? true))))))
(defmethod read-section :v1/sobjects
[{:keys [::db/conn ::bfc/input ::bfc/timestamp] :as cfg}]
[{:keys [::db/conn ::input ::bfc/overwrite ::bfc/timestamp] :as cfg}]
(let [storage (sto/resolve cfg)
ids (read-obj! input)
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
@@ -644,7 +672,8 @@
(-> item
(assoc :file-id file-id)
(d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index))))))
(d/update-when :thumbnail-id bfc/lookup-index))
{::db/on-conflict-do-nothing? overwrite}))))
(doseq [item (:thumbnails @bfc/*state*)]
(let [item (update item :media-id bfc/lookup-index)]
@@ -653,7 +682,8 @@
:media-id (str (:media-id item))
:object-id (:object-id item)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail item)))))
(db/insert! conn :file-tagged-object-thumbnail item
{::db/on-conflict-do-nothing? overwrite})))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HIGH LEVEL API
@@ -663,26 +693,26 @@
"Do the exportation of a specified file in custom penpot binary
format. There are some options available for customize the output:
`::bfc/include-libraries`: additionally to the specified file, all the
`::include-libraries`: additionally to the specified file, all the
linked libraries also will be included (including transitive
dependencies).
`::bfc/embed-assets`: instead of including the libraries, embed in the
`::embed-assets`: instead of including the libraries, embed in the
same file library all assets used from external libraries."
[{:keys [::bfc/ids] :as cfg} output]
[{:keys [::ids] :as cfg} output]
(dm/assert!
"expected a set of uuid's for `::bfc/ids` parameter"
"expected a set of uuid's for `::ids` parameter"
(and (set? ids)
(every? uuid? ids)))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(io/coercible? output))
(satisfies? jio/IOFactory output))
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
ab (volatile! false)
cs (volatile! nil)]
(try
@@ -708,26 +738,26 @@
:cause @cs)))))
(defn import-files!
[{:keys [::bfc/input] :as cfg}]
[cfg input]
(dm/assert!
"expected valid profile-id and project-id on `cfg`"
(and (uuid? (::bfc/profile-id cfg))
(uuid? (::bfc/project-id cfg))))
(and (uuid? (::profile-id cfg))
(uuid? (::project-id cfg))))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(satisfies? jio/IOFactory input))
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
cs (volatile! nil)]
(l/info :hint "import: started" :id (str id))
(try
(binding [*position* (atom 0)]
(pu/with-open [input (io/input-stream input)]
(read-import! (assoc cfg ::bfc/input input))))
(read-import! (assoc cfg ::input input))))
(catch ZstdIOException cause
(ex/raise :type :validation
@@ -742,6 +772,6 @@
(finally
(l/info :hint "import: terminated"
:id (str id)
:elapsed (ct/format-duration (tp))
:elapsed (dt/format-duration (tp))
:error? (some? @cs))))))

View File

@@ -13,7 +13,6 @@
[app.common.data :as d]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -24,6 +23,7 @@
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.events :as events]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.set :as set]
[cuerdas.core :as str]
@@ -141,19 +141,20 @@
(write! cfg :team-font-variant id font))))
(defn- write-project!
[cfg project]
(events/tap :progress
{:op :export
:section :write-project
:id (:id project)
:name (:name project)})
(l/trc :hint "write" :obj "project" :id (str (:id project)))
(write! cfg :project (str (:id project)) project)
(vswap! bfc/*state* update :projects conj (:id project)))
[cfg project-id]
(let [project (bfc/get-project cfg project-id)]
(events/tap :progress
{:op :export
:section :write-project
:id project-id
:name (:name project)})
(l/trc :hint "write" :obj "project" :id (str project-id))
(write! cfg :project (str project-id) project)
(vswap! bfc/*state* update :projects conj project-id)))
(defn- write-file!
[cfg file-id]
(let [file (bfc/get-file cfg file-id :realize? true)
(let [file (bfc/get-file cfg file-id)
thumbs (bfc/get-file-object-thumbnails cfg file-id)
media (bfc/get-file-media cfg file)
rels (bfc/get-files-rels cfg #{file-id})]
@@ -190,7 +191,7 @@
[{:keys [::sto/storage] :as cfg} id]
(let [sobj (sto/get-object storage id)
data (with-open [input (sto/get-object-data storage sobj)]
(io/read input))]
(io/read-as-bytes input))]
(l/trc :hint "write" :obj "storage-object" :id (str id) :size (:size sobj))
(write! cfg :storage-object id (meta sobj) data)))
@@ -281,8 +282,8 @@
(let [file (-> (read-obj cfg :file file-id)
(update :id bfc/lookup-index)
(update :project-id bfc/lookup-index))
file (bfc/process-file cfg file)]
(update :project-id bfc/lookup-index)
(bfc/process-file))]
(events/tap :progress
{:op :import
@@ -297,7 +298,7 @@
(set/difference (:features file)))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
(bfc/save-file! cfg file ::db/return-keys false))
(bfc/persist-file! cfg file))
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
(let [thumbnail (-> thumbnail
@@ -344,7 +345,7 @@
(defn export-team!
[cfg team-id]
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
cfg (create-database cfg)]
(l/inf :hint "start"
@@ -362,7 +363,7 @@
(bfc/get-team-projects cfg team-id))
(run! (partial write-file! cfg)
(bfc/get-team-files-ids cfg team-id))
(bfc/get-team-files cfg team-id))
(run! (partial write-storage-object! cfg)
(-> bfc/*state* deref :storage-objects))
@@ -378,15 +379,15 @@
(l/inf :hint "end"
:operation "export"
:id (str id)
:elapsed (ct/format-duration elapsed)))))))
:elapsed (dt/format-duration elapsed)))))))
(defn import-team!
[cfg path]
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
cfg (-> (create-database cfg path)
(assoc ::bfc/timestamp (ct/now)))]
(assoc ::bfc/timestamp (dt/now)))]
(l/inf :hint "start"
:operation "import"
@@ -434,4 +435,4 @@
(l/inf :hint "end"
:operation "import"
:id (str id)
:elapsed (ct/format-duration elapsed)))))))
:elapsed (dt/format-duration elapsed)))))))

View File

File diff suppressed because it is too large Load Diff

View File

@@ -5,16 +5,16 @@
;; Copyright (c) KALEIDOS INC
(ns app.config
"A configuration management."
(:refer-clojure :exclude [get])
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.flags :as flags]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.version :as v]
[app.util.overrides]
[app.util.time :as dt]
[clojure.core :as c]
[clojure.java.io :as io]
[cuerdas.core :as str]
@@ -26,11 +26,11 @@
[_ data]
(d/without-nils data))
(defmethod ig/expand-key :default
[k v]
{k (if (map? v)
(d/without-nils v)
v)})
(defmethod ig/prep-key :default
[_ data]
(if (map? data)
(d/without-nils data)
data))
(def default
{:database-uri "postgresql://postgres/penpot"
@@ -42,31 +42,27 @@
:rpc-rlimit-config "resources/rlimit.edn"
:rpc-climit-config "resources/climit.edn"
:auto-file-snapshot-total 10
:auto-file-snapshot-every 5
:auto-file-snapshot-timeout "3h"
:public-uri "http://localhost:3449"
:host "localhost"
:tenant "default"
:redis-uri "redis://redis/0"
:file-data-backend "legacy-db"
:objects-storage-backend "fs"
:objects-storage-fs-directory "assets"
:auth-token-cookie-name "auth-token"
:assets-path "/internal/assets/"
:smtp-default-reply-to "Penpot <no-reply@example.com>"
:smtp-default-from "Penpot <no-reply@example.com>"
:profile-complaint-max-age (ct/duration {:days 7})
:profile-complaint-max-age (dt/duration {:days 7})
:profile-complaint-threshold 2
:profile-bounce-max-age (ct/duration {:days 7})
:profile-bounce-max-age (dt/duration {:days 7})
:profile-bounce-threshold 10
:telemetry-uri "https://telemetry.penpot.app/"
@@ -92,7 +88,7 @@
[:secret-key {:optional true} :string]
[:tenant {:optional false} :string]
[:public-uri {:optional false} ::sm/uri]
[:public-uri {:optional false} :string]
[:host {:optional false} :string]
[:http-server-port {:optional true} ::sm/int]
@@ -100,19 +96,17 @@
[:http-server-max-body-size {:optional true} ::sm/int]
[:http-server-max-multipart-body-size {:optional true} ::sm/int]
[:http-server-io-threads {:optional true} ::sm/int]
[:http-server-max-worker-threads {:optional true} ::sm/int]
[:management-api-key {:optional true} :string]
[:http-server-worker-threads {:optional true} ::sm/int]
[:telemetry-uri {:optional true} :string]
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
[:auto-file-snapshot-total {:optional true} ::sm/int]
[:auto-file-snapshot-every {:optional true} ::sm/int]
[:auto-file-snapshot-timeout {:optional true} ::ct/duration]
[:auto-file-snapshot-timeout {:optional true} ::dt/duration]
[:media-max-file-size {:optional true} ::sm/int]
[:deletion-delay {:optional true} ::ct/duration]
[:file-clean-delay {:optional true} ::ct/duration]
[:deletion-delay {:optional true} ::dt/duration] ;; REVIEW
[:telemetry-enabled {:optional true} ::sm/boolean]
[:default-blob-version {:optional true} ::sm/int]
[:allow-demo-users {:optional true} ::sm/boolean]
@@ -132,7 +126,7 @@
[:worker-webhook-parallelism {:optional true} ::sm/int]
[:database-password {:optional true} [:maybe :string]]
[:database-uri {:optional true} ::sm/uri]
[:database-uri {:optional true} :string]
[:database-username {:optional true} [:maybe :string]]
[:database-readonly {:optional true} ::sm/boolean]
[:database-min-pool-size {:optional true} ::sm/int]
@@ -148,16 +142,13 @@
[:quotes-font-variants-per-team {:optional true} ::sm/int]
[:quotes-comment-threads-per-file {:optional true} ::sm/int]
[:quotes-comments-per-file {:optional true} ::sm/int]
[:quotes-snapshots-per-file {:optional true} ::sm/int]
[:quotes-snapshots-per-team {:optional true} ::sm/int]
[:quotes-team-access-requests-per-team {:optional true} ::sm/int]
[:quotes-team-access-requests-per-requester {:optional true} ::sm/int]
[:auth-data-cookie-domain {:optional true} :string]
[:auth-token-cookie-name {:optional true} :string]
[:auth-token-cookie-max-age {:optional true} ::ct/duration]
[:auth-token-cookie-max-age {:optional true} ::dt/duration]
[:registration-domain-whitelist {:optional true} [::sm/set :string]]
[:email-verify-threshold {:optional true} ::ct/duration]
[:email-verify-threshold {:optional true} ::dt/duration]
[:github-client-id {:optional true} :string]
[:github-client-secret {:optional true} :string]
@@ -167,7 +158,7 @@
[:google-client-id {:optional true} :string]
[:google-client-secret {:optional true} :string]
[:oidc-client-id {:optional true} :string]
[:oidc-user-info-source {:optional true} [:enum "auto" "userinfo" "token"]]
[:oidc-user-info-source {:optional true} :keyword]
[:oidc-client-secret {:optional true} :string]
[:oidc-base-uri {:optional true} :string]
[:oidc-token-uri {:optional true} :string]
@@ -192,12 +183,12 @@
[:ldap-starttls {:optional true} ::sm/boolean]
[:ldap-user-query {:optional true} :string]
[:profile-bounce-max-age {:optional true} ::ct/duration]
[:profile-bounce-max-age {:optional true} ::dt/duration]
[:profile-bounce-threshold {:optional true} ::sm/int]
[:profile-complaint-max-age {:optional true} ::ct/duration]
[:profile-complaint-max-age {:optional true} ::dt/duration]
[:profile-complaint-threshold {:optional true} ::sm/int]
[:redis-uri {:optional true} ::sm/uri]
[:redis-uri {:optional true} :string]
[:email-domain-blacklist {:optional true} ::fs/path]
[:email-domain-whitelist {:optional true} ::fs/path]
@@ -216,38 +207,38 @@
[:prepl-host {:optional true} :string]
[:prepl-port {:optional true} ::sm/int]
[:file-data-backend {:optional true} [:enum "db" "legacy-db" "storage"]]
[:media-directory {:optional true} :string] ;; REVIEW
[:media-uri {:optional true} :string]
[:assets-path {:optional true} :string]
[:netty-io-threads {:optional true} ::sm/int]
[:executor-threads {:optional true} ::sm/int]
;; DEPRECATED
;; Legacy, will be removed in 2.5
[:assets-storage-backend {:optional true} :keyword]
[:storage-assets-fs-directory {:optional true} :string]
[:storage-assets-s3-bucket {:optional true} :string]
[:storage-assets-s3-region {:optional true} :keyword]
[:storage-assets-s3-endpoint {:optional true} ::sm/uri]
[:storage-assets-s3-endpoint {:optional true} :string]
[:storage-assets-s3-io-threads {:optional true} ::sm/int]
[:objects-storage-backend {:optional true} :keyword]
[:objects-storage-fs-directory {:optional true} :string]
[:objects-storage-s3-bucket {:optional true} :string]
[:objects-storage-s3-region {:optional true} :keyword]
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]]))
[:objects-storage-s3-endpoint {:optional true} :string]
[:objects-storage-s3-io-threads {:optional true} ::sm/int]]))
(def default-flags
[:enable-backend-api-doc
:enable-backend-openapi-doc
:enable-backend-worker
:enable-secure-session-cookies
:enable-email-verification
:enable-v2-migration])
(defn- parse-flags
[config]
(let [public-uri (c/get config :public-uri)
public-uri (some-> public-uri (u/uri))
extra-flags (if (and public-uri
(= (:scheme public-uri) "http")
(not= (:host public-uri) "localhost"))
#{:disable-secure-session-cookies}
#{})]
(flags/parse flags/default extra-flags (:flags config))))
(flags/parse flags/default
default-flags
(:flags config)))
(defn read-env
[prefix]
@@ -307,12 +298,7 @@
(defn get-deletion-delay
[]
(or (c/get config :deletion-delay)
(ct/duration {:days 7})))
(defn get-file-clean-delay
[]
(or (c/get config :file-clean-delay)
(ct/duration {:days 2})))
(dt/duration {:days 7})))
(defn get
"A configuration getter. Helps code be more testable."
@@ -321,9 +307,5 @@
([key default]
(c/get config key default)))
(defn logging-context
[]
{:version/backend (:full version)})
;; Set value for all new threads bindings.
(alter-var-root #'*assert* (constantly (contains? flags :backend-asserts)))

View File

@@ -10,21 +10,20 @@
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.geom.point :as gpt]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.spec :as us]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.db.sql :as sql]
[app.metrics :as mtx]
[app.util.json :as json]
[app.util.time :as dt]
[clojure.java.io :as io]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[next.jdbc :as jdbc]
[next.jdbc.date-time :as jdbc-dt]
[next.jdbc.prepare :as jdbc.prepare]
[next.jdbc.transaction])
[next.jdbc.date-time :as jdbc-dt])
(:import
com.zaxxer.hikari.HikariConfig
com.zaxxer.hikari.HikariDataSource
@@ -34,7 +33,6 @@
java.io.InputStream
java.io.OutputStream
java.sql.Connection
java.sql.PreparedStatement
java.sql.Savepoint
org.postgresql.PGConnection
org.postgresql.geometric.PGpoint
@@ -44,8 +42,6 @@
org.postgresql.util.PGInterval
org.postgresql.util.PGobject))
(def ^:dynamic *conn* nil)
(declare open)
(declare create-pool)
@@ -53,17 +49,27 @@
;; Initialization
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:pool-options
[:map {:title "pool-options"}
[::connect-timeout {:optional true} ::sm/int]
[::max-size {:optional true} ::sm/int]
[::min-size {:optional true} ::sm/int]
[::name {:optional true} :keyword]
[::uri {:optional true} ::sm/uri]
[::password {:optional true} :string]
[::username {:optional true} :string]
[::validation-timeout {:optional true} ::sm/int]
[::read-only {:optional true} ::sm/boolean]])
(s/def ::connection-timeout ::us/integer)
(s/def ::max-size ::us/integer)
(s/def ::min-size ::us/integer)
(s/def ::name keyword?)
(s/def ::password ::us/string)
(s/def ::uri ::us/not-empty-string)
(s/def ::username ::us/string)
(s/def ::validation-timeout ::us/integer)
(s/def ::read-only? ::us/boolean)
(s/def ::pool-options
(s/keys :opt [::uri
::name
::min-size
::max-size
::connection-timeout
::validation-timeout
::username
::password
::mtx/metrics
::read-only?]))
(def defaults
{::name :main
@@ -73,26 +79,27 @@
::validation-timeout 10000
::idle-timeout 120000 ; 2min
::max-lifetime 1800000 ; 30m
::read-only false})
::read-only? false})
(defmethod ig/assert-key ::pool
[_ options]
(assert (sm/check schema:pool-options options)))
(defmethod ig/prep-key ::pool
[_ cfg]
(merge defaults (d/without-nils cfg)))
;; Don't validate here, just validate that a map is received.
(defmethod ig/pre-init-spec ::pool [_] ::pool-options)
(defmethod ig/init-key ::pool
[_ cfg]
(let [{:keys [::uri ::read-only] :as cfg}
(merge defaults cfg)]
(when uri
(l/info :hint "initialize connection pool"
:name (d/name (::name cfg))
:uri (str uri)
:read-only read-only
:credentials (and (contains? cfg ::username)
(contains? cfg ::password))
:min-size (::min-size cfg)
:max-size (::max-size cfg))
(create-pool cfg))))
[_ {:keys [::uri ::read-only?] :as cfg}]
(when uri
(l/info :hint "initialize connection pool"
:name (d/name (::name cfg))
:uri uri
:read-only read-only?
:with-credentials (and (contains? cfg ::username)
(contains? cfg ::password))
:min-size (::min-size cfg)
:max-size (::max-size cfg))
(create-pool cfg)))
(defmethod ig/halt-key! ::pool
[_ pool]
@@ -108,15 +115,13 @@
"SET idle_in_transaction_session_timeout = 300000;"))
(defn- create-datasource-config
[{:keys [::uri] :as cfg}]
;; (app.common.pprint/pprint cfg)
[{:keys [::mtx/metrics ::uri] :as cfg}]
(let [config (HikariConfig.)]
(doto config
(.setJdbcUrl (str "jdbc:" uri))
(.setPoolName (d/name (::name cfg)))
(.setAutoCommit true)
(.setReadOnly (::read-only cfg))
(.setReadOnly (::read-only? cfg))
(.setConnectionTimeout (::connection-timeout cfg))
(.setValidationTimeout (::validation-timeout cfg))
(.setIdleTimeout (::idle-timeout cfg))
@@ -127,8 +132,8 @@
(.setInitializationFailTimeout -1))
;; When metrics namespace is provided
(when-let [instance (::mtx/metrics cfg)]
(->> (mtx/get-registry instance)
(when metrics
(->> (::mtx/registry metrics)
(PrometheusMetricsTrackerFactory.)
(.setMetricsTrackerFactory config)))
@@ -145,22 +150,10 @@
[conn]
(instance? Connection conn))
(defn connectable?
[o]
(or (connection? o)
(pool? o)))
(sm/register!
{:type ::conn
:pred connection?})
(sm/register!
{:type ::connectable
:pred connectable?})
(sm/register!
{:type ::pool
:pred pool?})
(s/def ::conn some?)
(s/def ::nilable-pool (s/nilable ::pool))
(s/def ::pool pool?)
(s/def ::connectable some?)
(defn closed?
[pool]
@@ -228,6 +221,16 @@
(let [^OutputStream os (.getOutputStream ^LargeObject lobj)]
(io/make-output-stream os opts))))
(defmacro with-atomic
[& args]
(if (symbol? (first args))
(let [cfgs (first args)
body (rest args)]
`(jdbc/with-transaction [conn# (::pool ~cfgs)]
(let [~cfgs (assoc ~cfgs ::conn conn#)]
~@body)))
`(jdbc/with-transaction ~@args)))
(defn open
[system-or-pool]
(if (pool? system-or-pool)
@@ -265,17 +268,19 @@
:else (throw (IllegalArgumentException. "unable to resolve connectable"))))
(def ^:private params-mapping
{::return-keys :return-keys})
{::return-keys? :return-keys
::return-keys :return-keys})
(defn rename-opts
[opts]
(set/rename-keys opts params-mapping))
(def ^:private default-insert-opts
(assoc sql/default-opts :return-keys true))
{:builder-fn sql/as-kebab-maps
:return-keys true})
(def ^:private default-opts
sql/default-opts)
{:builder-fn sql/as-kebab-maps})
(defn exec!
([ds sv] (exec! ds sv nil))
@@ -298,7 +303,7 @@
(defn insert!
"A helper that builds an insert sql statement and executes it. By
default returns the inserted row with all the field; you can delimit
the returned columns with the `::sql/columns` option."
the returned columns with the `::columns` option."
[ds table params & {:as opts}]
(let [conn (get-connectable ds)
sql (sql/insert table params opts)
@@ -326,7 +331,7 @@
(defn update!
"A helper that build an UPDATE SQL statement and executes it.
Given a connectable object, a table name, a hash map of columns and
Given a connectable object, a table name, a hash map of columns and
values to set, and either a hash map of columns and values to search
on or a vector of a SQL where clause and parameters, perform an
update on the table.
@@ -379,7 +384,9 @@
(defn is-row-deleted?
[{:keys [deleted-at]}]
(some? deleted-at))
(and (dt/instant? deleted-at)
(< (inst-ms deleted-at)
(inst-ms (dt/now)))))
(defn get*
"Retrieve a single row from database that matches a simple filters. Do
@@ -404,37 +411,10 @@
:hint "database object not found"))
row))
(defn get-with-sql
[ds sql & {:as opts}]
(let [rows
(cond->> (exec! ds sql opts)
(::remove-deleted opts true)
(remove is-row-deleted?)
:always
(not-empty))]
(when (and (not rows) (::throw-if-not-exists opts true))
(ex/raise :type :not-found
:code :object-not-found
:hint "database object not found"))
(first rows)))
(def ^:private default-plan-opts
(-> default-opts
(assoc :fetch-size 1000)
(assoc :concurrency :read-only)
(assoc :cursors :close)
(assoc :result-type :forward-only)))
(defn plan
([ds sql]
(-> (get-connectable ds)
(jdbc/plan sql default-plan-opts)))
([ds sql opts]
(-> (get-connectable ds)
(jdbc/plan sql (merge default-plan-opts opts)))))
[ds sql]
(-> (get-connectable ds)
(jdbc/plan sql sql/default-opts)))
(defn cursor
"Return a lazy seq of rows using server side cursors"
@@ -545,40 +525,52 @@
(l/trc :hint "explicit rollback requested (savepoint)")
(.rollback conn sp))))
(defn transact!
"A lower-level function for executing function in a transaction"
([transactable f] (transact! transactable f {}))
([transactable f opts]
(binding [next.jdbc.transaction/*nested-tx* :ignore]
(jdbc/transact transactable f opts))))
(defn tx-run!
"Run a function in a transaction."
[system f & params]
(if (connection? system)
(cond
(connection? system)
(tx-run! {::conn system} f)
(if (pool? system)
(tx-run! {::pool system} f)
(if-let [conn (or (::conn system)
(::pool system))]
(transact! conn
(fn [conn]
(let [system' (-> system
(dissoc ::rollback)
(assoc ::conn conn))]
(apply f system' params)))
{:rollback-only (::rollback system)
:read-only (::read-only system)})
(throw (IllegalArgumentException. "invalid system/cfg provided"))))))
(pool? system)
(tx-run! {::pool system} f)
(::conn system)
(let [conn (::conn system)
sp (savepoint conn)]
(try
(let [system' (-> system
(assoc ::savepoint sp)
(dissoc ::rollback))
result (apply f system' params)]
(if (::rollback system)
(rollback! conn sp)
(release! conn sp))
result)
(catch Throwable cause
(.rollback ^Connection conn ^Savepoint sp)
(throw cause))))
(::pool system)
(with-atomic [conn (::pool system)]
(let [system' (-> system
(assoc ::conn conn)
(dissoc ::rollback))
result (apply f system' params)]
(when (::rollback system)
(rollback! conn))
result))
:else
(throw (IllegalArgumentException. "invalid system/cfg provided"))))
(defn run!
[system f & params]
(cond
(connection? system)
(apply run! {::conn system} f params)
(run! {::conn system} f)
(pool? system)
(apply run! {::pool system} f params)
(run! {::pool system} f)
(::conn system)
(apply f system params)
@@ -602,7 +594,7 @@
(string? o)
(pginterval o)
(ct/duration? o)
(dt/duration? o)
(interval (inst-ms o))
:else
@@ -616,7 +608,7 @@
val (.getValue o)]
(if (or (= typ "json")
(= typ "jsonb"))
(json/decode val :key-fn keyword)
(json/decode val)
val))))
(defn decode-transit-pgobject
@@ -657,7 +649,7 @@
(when data
(doto (org.postgresql.util.PGobject.)
(.setType "jsonb")
(.setValue (json/encode data)))))
(.setValue (json/encode-str data)))))
;; --- Locks
@@ -703,14 +695,3 @@
[cause]
(and (sql-exception? cause)
(= "40001" (.getSQLState ^java.sql.SQLException cause))))
(defn duplicate-key-error?
[cause]
(and (sql-exception? cause)
(= "23505" (.getSQLState ^java.sql.SQLException cause))))
(extend-protocol jdbc.prepare/SettableParameter
clojure.lang.Keyword
(set-parameter [^clojure.lang.Keyword v ^PreparedStatement s ^long i]
(.setObject s i ^String (d/name v))))

View File

@@ -15,15 +15,14 @@
(defn kebab-case [s] (str/replace s #"_" "-"))
(defn snake-case [s] (str/replace s #"-" "_"))
(def default-opts
{:table-fn snake-case
:column-fn snake-case})
(defn as-kebab-maps
[rs opts]
(jdbc-opt/as-unqualified-modified-maps rs (assoc opts :label-fn kebab-case)))
(def default-opts
{:table-fn snake-case
:column-fn snake-case
:builder-fn as-kebab-maps})
(defn insert
([table key-map]
(insert table key-map nil))
@@ -39,10 +38,7 @@
(defn insert-many
[table cols rows opts]
(let [opts (merge default-opts opts)
opts (cond-> opts
(::on-conflict-do-nothing opts)
(assoc :suffix "ON CONFLICT DO NOTHING"))]
(let [opts (merge default-opts opts)]
(sql/for-insert-multi table cols rows opts)))
(defn select
@@ -53,15 +49,8 @@
opts (cond-> opts
(::order-by opts) (assoc :order-by (::order-by opts))
(::columns opts) (assoc :columns (::columns opts))
(or (::db/for-update opts)
(::for-update opts))
(assoc :suffix "FOR UPDATE")
(or (::db/for-share opts)
(::for-share opts))
(assoc :suffix "FOR SHARE"))]
(::for-update opts) (assoc :suffix "FOR UPDATE")
(::for-share opts) (assoc :suffix "FOR SHARE"))]
(sql/for-query table where-params opts))))
(defn update

View File

@@ -7,18 +7,23 @@
(ns app.email
"Main api for send emails."
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.common.schema :as sm]
[app.common.spec :as us]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.email.invite-to-team :as-alias email.invite-to-team]
[app.email.join-team :as-alias email.join-team]
[app.email.request-team-access :as-alias email.request-team-access]
[app.metrics :as mtx]
[app.util.template :as tmpl]
[app.worker :as wrk]
[clojure.java.io :as io]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig])
(:import
@@ -94,44 +99,36 @@
headers)))
(defn- assign-body
[^MimeMessage mmsg {:keys [body charset attachments] :or {charset "utf-8"}}]
(let [mixed-mpart (MimeMultipart. "mixed")]
[^MimeMessage mmsg {:keys [body charset] :or {charset "utf-8"}}]
(let [mpart (MimeMultipart. "mixed")]
(cond
(string? body)
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String body ^String charset)
(.addBodyPart mixed-mpart text-part))
(let [bpart (MimeBodyPart.)]
(.setContent bpart ^String body (str "text/plain; charset=" charset))
(.addBodyPart mpart bpart))
(vector? body)
(let [mmp (MimeMultipart. "alternative")
mbp (MimeBodyPart.)]
(.addBodyPart mpart mbp)
(.setContent mbp mmp)
(doseq [item body]
(let [mbp (MimeBodyPart.)]
(.setContent mbp
^String (:content item)
^String (str (:type item "text/plain") "; charset=" charset))
(.addBodyPart mmp mbp))))
(map? body)
(let [content-part (MimeBodyPart.)
alternative-mpart (MimeMultipart. "alternative")]
(when-let [content (get body "text/plain")]
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String content ^String charset)
(.addBodyPart alternative-mpart text-part)))
(when-let [content (get body "text/html")]
(let [html-part (MimeBodyPart.)]
(.setContent html-part ^String content
(str "text/html; charset=" charset))
(.addBodyPart alternative-mpart html-part)))
(.setContent content-part alternative-mpart)
(.addBodyPart mixed-mpart content-part))
(let [bpart (MimeBodyPart.)]
(.setContent bpart
^String (:content body)
^String (str (:type body "text/plain") "; charset=" charset))
(.addBodyPart mpart bpart))
:else
(throw (IllegalArgumentException. "invalid email body provided")))
(doseq [[name content] attachments]
(prn "attachment" name)
(let [attachment-part (MimeBodyPart.)]
(.setFileName attachment-part ^String name)
(.setContent attachment-part ^String content (str "text/plain; charset=" charset))
(.addBodyPart mixed-mpart attachment-part)))
(.setContent mmsg mixed-mpart)
(throw (ex-info "Unsupported type" {:body body})))
(.setContent mmsg mpart)
mmsg))
(defn- opts->props
@@ -219,77 +216,74 @@
(ex/raise :type :internal
:code :missing-email-templates))
{:subject subj
:body (d/without-nils
{"text/plain" text
"text/html" html})}))
:body (into
[{:type "text/plain"
:content text}]
(when html
[{:type "text/html"
:content html}]))}))
(def ^:private schema:params
[:map {:title "Email Params"}
[:to [:or ::sm/email [::sm/vec ::sm/email]]]
[:reply-to {:optional true} ::sm/email]
[:from {:optional true} ::sm/email]
[:lang {:optional true} ::sm/text]
[:subject {:optional true} ::sm/text]
[:priority {:optional true} [:enum :high :low]]
[:extra-data {:optional true} ::sm/text]
[:body {:optional true}
[:or :string [:map-of :string :string]]]
[:attachments {:optional true}
[:map-of :string :string]]])
(s/def ::priority #{:high :low})
(s/def ::to (s/or :single ::us/email
:multi (s/coll-of ::us/email)))
(s/def ::from ::us/email)
(s/def ::reply-to ::us/email)
(s/def ::lang string?)
(s/def ::extra-data ::us/string)
(def ^:private check-params
(sm/check-fn schema:params))
(s/def ::context
(s/keys :req-un [::to]
:opt-un [::reply-to ::from ::lang ::priority ::extra-data]))
(defn template-factory
[& {:keys [id schema]}]
(assert (keyword? id) "id should be provided and it should be a keyword")
(let [check-fn (if schema
(sm/check-fn schema)
(constantly nil))]
(fn [params]
(let [params (-> params check-params check-fn)
email (build-email-template id params)]
(when-not email
(ex/raise :type :internal
:code :email-template-does-not-exists
:hint "seems like the template is wrong or does not exists."
:template-id id))
([id] (template-factory id {}))
([id extra-context]
(s/assert keyword? id)
(fn [context]
(us/verify ::context context)
(when-let [spec (s/get-spec id)]
(s/assert spec context))
(cond-> (assoc email :id (name id))
(:extra-data params)
(assoc :extra-data (:extra-data params))
(let [context (merge (if (fn? extra-context)
(extra-context)
extra-context)
context)
email (build-email-template id context)]
(when-not email
(ex/raise :type :internal
:code :email-template-does-not-exists
:hint "seems like the template is wrong or does not exists."
:context {:id id}))
(cond-> (assoc email :id (name id))
(:extra-data context)
(assoc :extra-data (:extra-data context))
(seq (:attachments params))
(assoc :attachments (:attachments params))
(:from context)
(assoc :from (:from context))
(:from params)
(assoc :from (:from params))
(:reply-to context)
(assoc :reply-to (:reply-to context))
(:reply-to params)
(assoc :reply-to (:reply-to params))
(:to context)
(assoc :to (:to context)))))))
(:to params)
(assoc :to (:to params)))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PUBLIC HIGH-LEVEL API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn render
[email-factory params]
(email-factory params))
[email-factory context]
(email-factory context))
(defn send!
"Schedule an already defined email to be sent using asynchronously
using worker task."
[{:keys [::conn ::factory] :as params}]
(assert (db/connectable? conn) "expected a valid database connection or pool")
[{:keys [::conn ::factory] :as context}]
(us/verify some? conn)
(let [email (if factory
(factory params)
(-> params
(dissoc params)
(check-params)))]
(factory context)
(dissoc context ::conn))]
(wrk/submit! {::wrk/task :sendmail
::wrk/delay 0
::wrk/max-retries 4
@@ -303,6 +297,8 @@
(declare send-to-logger!)
(s/def ::sendmail fn?)
(defmethod ig/init-key ::sendmail
[_ cfg]
(fn [params]
@@ -319,18 +315,19 @@
(l/dbg :hint "sendmail"
:id (:id params)
:to (:to params)
:subject (str/trim (:subject params)))
:subject (str/trim (:subject params))
:body (str/join "," (map :type (:body params))))
(.sendMessage ^Transport transport
^MimeMessage message
(.getAllRecipients message))))))
(when (contains? cf/flags :log-emails)
(when (or (contains? cf/flags :log-emails)
(not (contains? cf/flags :smtp)))
(send-to-logger! cfg params))))
(defmethod ig/assert-key ::handler
[_ params]
(assert (fn? (::sendmail params)) "expected valid sendmail handler"))
(defmethod ig/pre-init-spec ::handler [_]
(s/keys :req [::sendmail ::mtx/metrics]))
(defmethod ig/init-key ::handler
[_ {:keys [::sendmail]}]
@@ -357,154 +354,125 @@
;; EMAIL FACTORIES
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:feedback
[:map
[:feedback-subject ::sm/text]
[:feedback-type ::sm/text]
[:feedback-content ::sm/text]
[:profile :map]])
(s/def ::subject ::us/string)
(s/def ::content ::us/string)
(def user-feedback
(s/def ::feedback
(s/keys :req-un [::subject ::content]))
(def feedback
"A profile feedback email."
(template-factory
:id ::feedback
:schema schema:feedback))
(template-factory ::feedback))
(def ^:private schema:register
[:map [:name ::sm/text]])
(s/def ::name ::us/string)
(s/def ::register
(s/keys :req-un [::name]))
(def register
"A new profile registration welcome email."
(template-factory
:id ::register
:schema schema:register))
(template-factory ::register))
(def ^:private schema:password-recovery
[:map
[:name ::sm/text]
[:token ::sm/text]])
(s/def ::token ::us/string)
(s/def ::password-recovery
(s/keys :req-un [::name ::token]))
(def password-recovery
"A password recovery notification email."
(template-factory
:id ::password-recovery
:schema schema:password-recovery))
(template-factory ::password-recovery))
(def ^:private schema:change-email
[:map
[:name ::sm/text]
[:pending-email ::sm/email]
[:token ::sm/text]])
(s/def ::pending-email ::us/email)
(s/def ::change-email
(s/keys :req-un [::name ::pending-email ::token]))
(def change-email
"Password change confirmation email"
(template-factory
:id ::change-email
:schema schema:change-email))
(template-factory ::change-email))
(def ^:private schema:invite-to-team
[:map
[:invited-by ::sm/text]
[:team ::sm/text]
[:token ::sm/text]])
(s/def ::email.invite-to-team/invited-by ::us/string)
(s/def ::email.invite-to-team/team ::us/string)
(s/def ::email.invite-to-team/token ::us/string)
(s/def ::invite-to-team
(s/keys :req-un [::email.invite-to-team/invited-by
::email.invite-to-team/token
::email.invite-to-team/team]))
(def invite-to-team
"Teams member invitation email."
(template-factory
:id ::invite-to-team
:schema schema:invite-to-team))
(template-factory ::invite-to-team))
(def ^:private schema:join-team
[:map
[:invited-by ::sm/text]
[:team ::sm/text]
[:team-id ::sm/uuid]])
(s/def ::email.join-team/invited-by ::us/string)
(s/def ::email.join-team/team ::us/string)
(s/def ::email.join-team/team-id ::us/uuid)
(s/def ::join-team
(s/keys :req-un [::email.join-team/invited-by
::email.join-team/team-id
::email.join-team/team]))
(def join-team
"Teams member joined after request email."
(template-factory
:id ::join-team
:schema schema:join-team))
(template-factory ::join-team))
(def ^:private schema:request-file-access
[:map
[:requested-by ::sm/text]
[:requested-by-email ::sm/text]
[:team-name ::sm/text]
[:team-id ::sm/uuid]
[:file-name ::sm/text]
[:file-id ::sm/uuid]
[:page-id ::sm/uuid]])
(s/def ::email.request-team-access/requested-by ::us/string)
(s/def ::email.request-team-access/requested-by-email ::us/string)
(s/def ::email.request-team-access/team-name ::us/string)
(s/def ::email.request-team-access/team-id ::us/uuid)
(s/def ::email.request-team-access/file-name ::us/string)
(s/def ::email.request-team-access/file-id ::us/uuid)
(s/def ::email.request-team-access/page-id ::us/uuid)
(s/def ::request-file-access
(s/keys :req-un [::email.request-team-access/requested-by
::email.request-team-access/requested-by-email
::email.request-team-access/team-name
::email.request-team-access/team-id
::email.request-team-access/file-name
::email.request-team-access/file-id
::email.request-team-access/page-id]))
(def request-file-access
"File access request email."
(template-factory
:id ::request-file-access
:schema schema:request-file-access))
(template-factory ::request-file-access))
(s/def ::request-file-access-yourpenpot
(s/keys :req-un [::email.request-team-access/requested-by
::email.request-team-access/requested-by-email
::email.request-team-access/team-name
::email.request-team-access/team-id
::email.request-team-access/file-name
::email.request-team-access/file-id
::email.request-team-access/page-id]))
(def request-file-access-yourpenpot
"File access on Your Penpot request email."
(template-factory
:id ::request-file-access-yourpenpot
:schema schema:request-file-access))
(template-factory ::request-file-access-yourpenpot))
(s/def ::request-file-access-yourpenpot-view
(s/keys :req-un [::email.request-team-access/requested-by
::email.request-team-access/requested-by-email
::email.request-team-access/team-name
::email.request-team-access/team-id
::email.request-team-access/file-name
::email.request-team-access/file-id
::email.request-team-access/page-id]))
(def request-file-access-yourpenpot-view
"File access on Your Penpot view mode request email."
(template-factory
:id ::request-file-access-yourpenpot-view
:schema schema:request-file-access))
(template-factory ::request-file-access-yourpenpot-view))
(def ^:private schema:request-team-access
[:map
[:requested-by ::sm/text]
[:requested-by-email ::sm/text]
[:team-name ::sm/text]
[:team-id ::sm/uuid]])
(s/def ::request-team-access
(s/keys :req-un [::email.request-team-access/requested-by
::email.request-team-access/requested-by-email
::email.request-team-access/team-name
::email.request-team-access/team-id]))
(def request-team-access
"Team access request email."
(template-factory
:id ::request-team-access
:schema schema:request-team-access))
(template-factory ::request-team-access))
(def ^:private schema:comment-mention
[:map
[:name ::sm/text]
[:source-user ::sm/text]
[:comment-reference ::sm/text]
[:comment-content ::sm/text]
[:comment-url ::sm/text]])
(def comment-mention
(template-factory
:id ::comment-mention
:schema schema:comment-mention))
(def ^:private schema:comment-thread
[:map
[:name ::sm/text]
[:source-user ::sm/text]
[:comment-reference ::sm/text]
[:comment-content ::sm/text]
[:comment-url ::sm/text]])
(def comment-thread
(template-factory
:id ::comment-thread
:schema schema:comment-thread))
(def ^:private schema:comment-notification
[:map
[:name ::sm/text]
[:source-user ::sm/text]
[:comment-reference ::sm/text]
[:comment-content ::sm/text]
[:comment-url ::sm/text]])
(def comment-notification
(template-factory
:id ::comment-notification
:schema schema:comment-notification))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; BOUNCE/COMPLAINS HELPERS

View File

File diff suppressed because it is too large Load Diff

View File

@@ -10,21 +10,42 @@
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.objects-map :as omap]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.objects-map :as omap.legacy]
[app.util.objects-map :as omap]
[app.util.pointer-map :as pmap]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OFFLOAD
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn offloaded?
[file]
(= "objects-storage" (:data-backend file)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OBJECTS-MAP
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn enable-objects-map
[file]
(let [update-page
(fn [page]
(if (and (pmap/pointer-map? page)
(not (pmap/loaded? page)))
page
(update page :objects omap/wrap)))
update-data
(fn [fdata]
(update fdata :pages-index d/update-vals update-page))]
(-> file
(update :data update-data)
(update :features conj "fdata/objects-map"))))
(defn process-objects
"Apply a function to all objects-map on the file. Usualy used for convert
the objects-map instances to plain maps"
@@ -34,237 +55,35 @@
(fn [page]
(update page :objects
(fn [objects]
(if (or (omap/objects-map? objects)
(omap.legacy/objects-map? objects))
(if (omap/objects-map? objects)
(update-fn objects)
objects)))))
fdata))
(defn realize-objects
"Process a file and remove all instances of objects map realizing them
to a plain data. Used in operation where is more efficient have the
whole file loaded in memory or we going to persist it in an
alterantive storage."
[_cfg file]
(update file :data process-objects (partial into {})))
(defn enable-objects-map
[file & _opts]
(let [update-page
(fn [page]
(update page :objects omap/wrap))
update-data
(fn [fdata]
(update fdata :pages-index d/update-vals update-page))]
(-> file
(update :data update-data)
(update :features conj "fdata/objects-map"))))
(defn disable-objects-map
[file & _opts]
(let [update-page
(fn [page]
(update page :objects #(into {} %)))
update-data
(fn [fdata]
(update fdata :pages-index d/update-vals update-page))]
(-> file
(update :data update-data)
(update :features disj "fdata/objects-map"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; STORAGE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmulti resolve-file-data
(fn [_cfg file] (get file :backend "legacy-db")))
(defmethod resolve-file-data "legacy-db"
[_cfg {:keys [legacy-data] :as file}]
(-> file
(assoc :data legacy-data)
(dissoc :legacy-data)))
(defmethod resolve-file-data "db"
[_cfg file]
(dissoc file :legacy-data))
(defmethod resolve-file-data "storage"
[cfg {:keys [metadata] :as file}]
(let [storage (sto/resolve cfg ::db/reuse-conn true)
ref-id (:storage-ref-id metadata)
data (->> (sto/get-object storage ref-id)
(sto/get-object-bytes storage))]
(-> file
(assoc :data data)
(dissoc :legacy-data))))
(defn decode-file-data
[_cfg {:keys [data] :as file}]
(cond-> file
(bytes? data)
(assoc :data (blob/decode data))))
(def ^:private sql:insert-file-data
"INSERT INTO file_data (file_id, id, created_at, modified_at, deleted_at,
type, backend, metadata, data)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)")
(def ^:private sql:upsert-file-data
(str sql:insert-file-data
" ON CONFLICT (file_id, id)
DO UPDATE SET modified_at=?,
deleted_at=?,
backend=?,
metadata=?,
data=?"))
(defn- upsert-in-database
[cfg {:keys [id file-id created-at modified-at deleted-at type backend data metadata]}]
(let [created-at (or created-at (ct/now))
metadata (some-> metadata db/json)
modified-at (or modified-at created-at)]
(db/exec-one! cfg [sql:upsert-file-data
file-id id
created-at
modified-at
deleted-at
type
backend
metadata
data
modified-at
deleted-at
backend
metadata
data])))
(defn- handle-persistence
[cfg {:keys [type backend id file-id data] :as params}]
(cond
(= backend "storage")
(let [storage (sto/resolve cfg)
content (sto/content data)
sobject (sto/put-object! storage
{::sto/content content
::sto/touch true
:bucket "file-data"
:content-type "application/octet-stream"
:file-id file-id
:id id})
metadata {:storage-ref-id (:id sobject)}
params (-> params
(assoc :metadata metadata)
(assoc :data nil))]
(upsert-in-database cfg params))
(= backend "db")
(->> (dissoc params :metadata)
(upsert-in-database cfg))
(= backend "legacy-db")
(cond
(= type "main")
(do
(db/delete! cfg :file-data
{:id id :file-id file-id :type "main"}
{::db/return-keys false})
(db/update! cfg :file
{:data data}
{:id file-id}
{::db/return-keys false}))
(= type "snapshot")
(do
(db/delete! cfg :file-data
{:id id :file-id file-id :type "snapshot"}
{::db/return-keys false})
(db/update! cfg :file-change
{:data data}
{:file-id file-id :id id}
{::db/return-keys false}))
(= type "fragment")
(upsert-in-database cfg
(-> (dissoc params :metadata)
(assoc :backend "db")))
:else
(throw (RuntimeException. "not implemented")))
:else
(throw (IllegalArgumentException.
(str "backend '" backend "' not supported")))))
(defn process-metadata
[cfg metadata]
(when-let [storage-id (:storage-ref-id metadata)]
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
(sto/touch-object! storage storage-id))))
(defn- default-backend
[backend]
(or backend (cf/get :file-data-backend)))
(def ^:private schema:metadata
[:map {:title "Metadata"}
[:storage-ref-id {:optional true} ::sm/uuid]])
(def decode-metadata-with-schema
(sm/decoder schema:metadata sm/json-transformer))
(defn decode-metadata
[metadata]
(some-> metadata
(db/decode-json-pgobject)
(decode-metadata-with-schema)))
(def ^:private schema:update-params
[:map {:closed true}
[:id ::sm/uuid]
[:type [:enum "main" "snapshot" "fragment"]]
[:file-id ::sm/uuid]
[:backend {:optional true} [:enum "db" "legacy-db" "storage"]]
[:metadata {:optional true} [:maybe schema:metadata]]
[:data {:optional true} bytes?]
[:created-at {:optional true} ::ct/inst]
[:modified-at {:optional true} [:maybe ::ct/inst]]
[:deleted-at {:optional true} [:maybe ::ct/inst]]])
(def ^:private check-update-params
(sm/check-fn schema:update-params :hint "invalid params received for update"))
(defn upsert!
"Create or update file data"
[cfg params & {:as opts}]
(let [params (-> (check-update-params params)
(update :backend default-backend))]
(some->> (:metadata params)
(process-metadata cfg))
(-> (handle-persistence cfg params)
(db/get-update-count)
(pos?))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; POINTER-MAP
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn get-file-data
"Get file data given a file instance."
[system file]
(if (offloaded? file)
(let [storage (sto/resolve system ::db/reuse-conn true)]
(->> (sto/get-object storage (:data-ref-id file))
(sto/get-object-bytes storage)))
(:data file)))
(defn resolve-file-data
[system file]
(let [data (get-file-data system file)]
(assoc file :data data)))
(defn load-pointer
"A database loader pointer helper"
[cfg file-id id]
(let [fragment (some-> (db/get* cfg :file-data
{:id id :file-id file-id :type "fragment"}
{::sql/columns [:data :backend :id :metadata]})
(update :metadata decode-metadata))]
[system file-id id]
(let [fragment (db/get* system :file-data-fragment
{:id id :file-id file-id}
{::sql/columns [:data :data-backend :data-ref-id :id]})]
(l/trc :hint "load pointer"
:file-id (str file-id)
@@ -278,21 +97,22 @@
:file-id file-id
:fragment-id id))
(-> (resolve-file-data cfg fragment)
(get :data)
(blob/decode))))
(let [data (get-file-data system fragment)]
;; FIXME: conditional thread scheduling for decoding big objects
(blob/decode data))))
(defn persist-pointers!
"Persist all currently tracked pointer objects"
[cfg file-id]
(doseq [[id item] @pmap/*tracked*]
(when (pmap/modified? item)
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
(let [content (-> item deref blob/encode)]
(upsert! cfg {:id id
:file-id file-id
:type "fragment"
:data content})))))
[system file-id]
(let [conn (db/get-connection system)]
(doseq [[id item] @pmap/*tracked*]
(when (pmap/modified? item)
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
(let [content (-> item deref blob/encode)]
(db/insert! conn :file-data-fragment
{:id id
:file-id file-id
:data content}))))))
(defn process-pointers
"Apply a function to all pointers on the file. Usuly used for
@@ -306,14 +126,6 @@
(d/update-vals update-fn')
(update :pages-index d/update-vals update-fn'))))
(defn realize-pointers
"Process a file and remove all instances of pointers realizing them to
a plain data. Used in operation where is more efficient have the
whole file loaded in memory."
[cfg {:keys [id] :as file}]
(binding [pmap/*load-fn* (partial load-pointer cfg id)]
(update file :data process-pointers deref)))
(defn get-used-pointer-ids
"Given a file, return all pointer ids used in the data."
[fdata]
@@ -324,21 +136,10 @@
(defn enable-pointer-map
"Enable the fdata/pointer-map feature on the file."
[file & _opts]
[file]
(-> file
(update :data (fn [fdata]
(-> fdata
(update :pages-index d/update-vals pmap/wrap)
(d/update-when :components pmap/wrap))))
(update :features conj "fdata/pointer-map")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; GENERAL PURPOSE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn realize
"A helper that combines realize-pointers and realize-objects"
[cfg file]
(->> file
(realize-pointers cfg)
(realize-objects cfg)))

View File

@@ -1,53 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.features.file-migrations
"Backend specific code for file migrations. Implemented as permanent feature of files."
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.files.migrations :as fmg :refer [xf:map-name]]
[app.db :as db]
[app.db.sql :as-alias sql]))
(def ^:private sql:get-file-migrations
"SELECT name FROM file_migration WHERE file_id = ? ORDER BY created_at ASC")
(defn resolve-applied-migrations
[cfg {:keys [id] :as file}]
(let [conn (db/get-connection cfg)]
(assoc file :migrations
(->> (db/plan conn [sql:get-file-migrations id])
(transduce xf:map-name conj (d/ordered-set))
(not-empty)))))
(defn upsert-migrations!
"Persist or update file migrations. Return the updated/inserted number
of rows"
[cfg {:keys [id] :as file}]
(let [conn (db/get-connection cfg)
migrations (or (-> file meta ::fmg/migrated)
(-> file :migrations))
columns [:file-id :name]
rows (->> migrations
(mapv (fn [name] [id name]))
(not-empty))]
(when-not rows
(ex/raise :type :internal
:code :missing-migrations
:hint "no migrations available on file"))
(-> (db/insert-many! conn :file-migration columns rows
{::db/return-keys false
::sql/on-conflict-do-nothing true})
(db/get-update-count))))
(defn reset-migrations!
"Replace file migrations"
[cfg {:keys [id] :as file}]
(db/delete! cfg :file-migration {:file-id id})
(upsert-migrations! cfg file))

View File

@@ -1,446 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.features.file-snapshots
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.features :as-alias cfeat]
[app.common.files.migrations :as fmg]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.fdata :as fdata]
[app.storage :as sto]
[app.util.blob :as blob]
[app.worker :as wrk]
[cuerdas.core :as str]))
(def sql:snapshots
"SELECT c.id,
c.label,
c.created_at,
c.updated_at AS modified_at,
c.deleted_at,
c.profile_id,
c.created_by,
c.locked_by,
c.revn,
c.features,
c.migrations,
c.version,
c.file_id,
c.data AS legacy_data,
fd.data AS data,
coalesce(fd.backend, 'legacy-db') AS backend,
fd.metadata AS metadata
FROM file_change AS c
LEFT JOIN file_data AS fd ON (fd.file_id = c.file_id
AND fd.id = c.id
AND fd.type = 'snapshot')
WHERE c.label IS NOT NULL")
(defn- decode-snapshot
[snapshot]
(some-> snapshot
(-> (d/update-when :metadata fdata/decode-metadata)
(d/update-when :migrations db/decode-pgarray [])
(d/update-when :features db/decode-pgarray #{}))))
(def ^:private sql:get-minimal-file
"SELECT f.id,
f.revn,
f.modified_at,
f.deleted_at,
fd.backend AS backend,
fd.metadata AS metadata
FROM file AS f
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
WHERE f.id = ?")
(defn- get-minimal-file
[cfg id & {:as opts}]
(-> (db/get-with-sql cfg [sql:get-minimal-file id] opts)
(d/update-when :metadata fdata/decode-metadata)))
(def ^:private sql:get-snapshot-without-data
(str "WITH snapshots AS (" sql:snapshots ")"
"SELECT c.id,
c.label,
c.revn,
c.created_at,
c.modified_at,
c.deleted_at,
c.profile_id,
c.created_by,
c.locked_by,
c.features,
c.metadata,
c.migrations,
c.version,
c.file_id
FROM snapshots AS c
WHERE c.id = ?
AND CASE WHEN c.created_by = 'user'
THEN c.deleted_at IS NULL
WHEN c.created_by = 'system'
THEN c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz
END"))
(defn get-minimal-snapshot
[cfg snapshot-id]
(let [now (ct/now)]
(-> (db/get-with-sql cfg [sql:get-snapshot-without-data snapshot-id now]
{::db/remove-deleted false})
(decode-snapshot))))
(def ^:private sql:get-snapshot
(str sql:snapshots
" AND c.file_id = ?
AND c.id = ?
AND CASE WHEN c.created_by = 'user'
THEN (c.deleted_at IS NULL)
WHEN c.created_by = 'system'
THEN (c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz)
END"))
(defn- get-snapshot
"Get snapshot with decoded data"
[cfg file-id snapshot-id]
(let [now (ct/now)]
(->> (db/get-with-sql cfg [sql:get-snapshot file-id snapshot-id now]
{::db/remove-deleted false})
(decode-snapshot)
(fdata/resolve-file-data cfg)
(fdata/decode-file-data cfg))))
(def ^:private sql:get-visible-snapshots
(str "WITH "
"snapshots1 AS ( " sql:snapshots "),"
"snapshots2 AS (
SELECT c.id,
c.label,
c.revn,
c.version,
c.created_at,
c.modified_at,
c.created_by,
c.locked_by,
c.profile_id,
c.deleted_at
FROM snapshots1 AS c
WHERE c.file_id = ?
), snapshots3 AS (
(SELECT * FROM snapshots2
WHERE created_by = 'system'
AND (deleted_at IS NULL OR
deleted_at >= ?::timestamptz)
LIMIT 500)
UNION ALL
(SELECT * FROM snapshots2
WHERE created_by = 'user'
AND deleted_at IS NULL
LIMIT 500)
)
SELECT * FROM snapshots3
ORDER BY created_at DESC"))
(defn get-visible-snapshots
"Return a list of snapshots fecheable from the API, it has a limited
set of fields and applies big but safe limits over all available
snapshots. It return a ordered vector by the snapshot date of
creation."
[cfg file-id]
(let [now (ct/now)]
(->> (db/exec! cfg [sql:get-visible-snapshots file-id now])
(mapv decode-snapshot))))
(def ^:private schema:decoded-file
[:map {:title "DecodedFile"}
[:id ::sm/uuid]
[:revn :int]
[:vern :int]
[:data :map]
[:version :int]
[:features ::cfeat/features]
[:migrations [::sm/set :string]]])
(def ^:private schema:snapshot
[:map {:title "Snapshot"}
[:id ::sm/uuid]
[:revn [::sm/int {:min 0}]]
[:version [::sm/int {:min 0}]]
[:features ::cfeat/features]
[:migrations [::sm/set ::sm/text]]
[:profile-id {:optional true} ::sm/uuid]
[:label ::sm/text]
[:file-id ::sm/uuid]
[:created-by [:enum "system" "user" "admin"]]
[:deleted-at {:optional true} ::ct/inst]
[:modified-at ::ct/inst]
[:created-at ::ct/inst]])
(def ^:private check-snapshot
(sm/check-fn schema:snapshot))
(def ^:private check-decoded-file
(sm/check-fn schema:decoded-file))
(defn- generate-snapshot-label
[]
(let [ts (-> (ct/now)
(ct/format-inst)
(str/replace #"[T:\.]" "-")
(str/rtrim "Z"))]
(str "snapshot-" ts)))
(def ^:private schema:create-params
[:map {:title "SnapshotCreateParams"}
[:profile-id ::sm/uuid]
[:created-by {:optional true} [:enum "user" "system"]]
[:label {:optional true} ::sm/text]
[:session-id {:optional true} ::sm/uuid]
[:modified-at {:optional true} ::ct/inst]
[:deleted-at {:optional true} ::ct/inst]])
(def ^:private check-create-params
(sm/check-fn schema:create-params))
(defn create!
"Create a file snapshot; expects a non-encoded file"
[cfg file & {:as params}]
(let [{:keys [label created-by deleted-at profile-id session-id]}
(check-create-params params)
file
(check-decoded-file file)
created-by
(or created-by "system")
snapshot-id
(uuid/next)
created-at
(ct/now)
deleted-at
(or deleted-at
(if (= created-by "system")
(ct/in-future (cf/get-deletion-delay))
nil))
label
(or label (generate-snapshot-label))
snapshot
(cond-> {:id snapshot-id
:revn (:revn file)
:version (:version file)
:file-id (:id file)
:features (:features file)
:migrations (:migrations file)
:label label
:created-at created-at
:modified-at created-at
:created-by created-by}
deleted-at
(assoc :deleted-at deleted-at)
:always
(check-snapshot))]
(db/insert! cfg :file-change
(-> snapshot
(update :features into-array)
(update :migrations into-array)
(assoc :updated-at created-at)
(assoc :profile-id profile-id)
(assoc :session-id session-id)
(dissoc :modified-at))
{::db/return-keys false})
(fdata/upsert! cfg
{:id snapshot-id
:file-id (:id file)
:type "snapshot"
:data (blob/encode (:data file))
:created-at created-at
:deleted-at deleted-at})
snapshot))
(def ^:private schema:update-params
[:map {:title "SnapshotUpdateParams"}
[:id ::sm/uuid]
[:file-id ::sm/uuid]
[:label ::sm/text]
[:modified-at {:optional true} ::ct/inst]])
(def ^:private check-update-params
(sm/check-fn schema:update-params))
(defn update!
[cfg params]
(let [{:keys [id file-id label modified-at]}
(check-update-params params)
modified-at
(or modified-at (ct/now))]
(db/update! cfg :file-data
{:deleted-at nil
:modified-at modified-at}
{:file-id file-id
:id id
:type "snapshot"}
{::db/return-keys false})
(-> (db/update! cfg :file-change
{:label label
:created-by "user"
:updated-at modified-at
:deleted-at nil}
{:file-id file-id
:id id}
{::db/return-keys false})
(db/get-update-count)
(pos?))))
(defn restore!
[{:keys [::db/conn] :as cfg} file-id snapshot-id]
(let [file (get-minimal-file conn file-id {::db/for-update true})
vern (rand-int Integer/MAX_VALUE)
storage
(sto/resolve cfg {::db/reuse-conn true})
snapshot
(get-snapshot cfg file-id snapshot-id)]
(when-not snapshot
(ex/raise :type :not-found
:code :snapshot-not-found
:hint "unable to find snapshot with the provided label"
:snapshot-id snapshot-id
:file-id file-id))
(when-not (:data snapshot)
(ex/raise :type :internal
:code :snapshot-without-data
:hint "snapshot has no data"
:label (:label snapshot)
:file-id file-id))
(let [;; If the snapshot has applied migrations stored, we reuse
;; them, if not, we take a safest set of migrations as
;; starting point. This is because, at the time of
;; implementing snapshots, migrations were not taken into
;; account so we need to make this backward compatible in
;; some way.
migrations
(or (:migrations snapshot)
(fmg/generate-migrations-from-version 67))
file
(-> file
(update :revn inc)
(assoc :migrations migrations)
(assoc :data (:data snapshot))
(assoc :vern vern)
(assoc :version (:version snapshot))
(assoc :has-media-trimmed false)
(assoc :modified-at (:modified-at snapshot))
(assoc :features (:features snapshot)))]
(l/dbg :hint "restoring snapshot"
:file-id (str file-id)
:label (:label snapshot)
:snapshot-id (str (:id snapshot)))
;; In the same way, on reseting the file data, we need to restore
;; the applied migrations on the moment of taking the snapshot
(bfc/update-file! cfg file ::bfc/reset-migrations? true)
;; FIXME: this should be separated functions, we should not have
;; inline sql here.
;; clean object thumbnails
(let [sql (str "update file_tagged_object_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
;; clean file thumbnails
(let [sql (str "update file_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
vern)))
(defn delete!
[cfg & {:keys [id file-id deleted-at]}]
(assert (uuid? id) "missing id")
(assert (uuid? file-id) "missing file-id")
(assert (ct/inst? deleted-at) "missing deleted-at")
(wrk/submit! {::db/conn (db/get-connection cfg)
::wrk/task :delete-object
::wrk/params {:object :snapshot
:deleted-at deleted-at
:file-id file-id
:id id}})
(db/update! cfg :file-change
{:deleted-at deleted-at}
{:id id :file-id file-id}
{::db/return-keys false})
true)
(def ^:private sql:get-snapshots
(str sql:snapshots " AND c.file_id = ?"))
(defn lock-by!
[conn id profile-id]
(-> (db/update! conn :file-change
{:locked-by profile-id}
{:id id}
{::db/return-keys false})
(db/get-update-count)
(pos?)))
(defn unlock!
[conn id]
(-> (db/update! conn :file-change
{:locked-by nil}
{:id id}
{::db/return-keys false})
(db/get-update-count)
(pos?)))
(defn reduce-snapshots
"Process the file snapshots using efficient reduction; the file
reduction comes with all snapshots, including maked as deleted"
[cfg file-id xform f init]
(let [conn (db/get-connection cfg)
xform (comp
(map (partial fdata/resolve-file-data cfg))
(map (partial fdata/decode-file-data cfg))
xform)]
(->> (db/plan conn [sql:get-snapshots file-id] {:fetch-size 1})
(transduce xform f init))))

View File

@@ -1,32 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.features.logical-deletion
"A code related to handle logical deletion mechanism"
(:require
[app.common.time :as ct]
[app.config :as cf]))
(def ^:private canceled-status
#{"canceled" "unpaid"})
(defn get-deletion-delay
"Calculate the next deleted-at for a resource (file, team, etc) in function
of team settings"
[team]
(if-let [{:keys [type status]} (get team :subscription)]
(cond
(and (= "unlimited" type) (not (contains? canceled-status status)))
(ct/duration {:days 30})
(and (= "enterprise" type) (not (contains? canceled-status status)))
(ct/duration {:days 90})
:else
(cf/get-deletion-delay))
(cf/get-deletion-delay)))

View File

@@ -9,7 +9,6 @@
[app.auth.oidc :as-alias oidc]
[app.common.data :as d]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.transit :as t]
[app.db :as-alias db]
[app.http.access-token :as actoken]
@@ -17,21 +16,22 @@
[app.http.awsns :as-alias awsns]
[app.http.debug :as-alias debug]
[app.http.errors :as errors]
[app.http.management :as mgmt]
[app.http.middleware :as mw]
[app.http.security :as sec]
[app.http.session :as session]
[app.http.websocket :as-alias ws]
[app.main :as-alias main]
[app.metrics :as mtx]
[app.rpc :as-alias rpc]
[app.rpc.doc :as-alias rpc.doc]
[app.setup :as-alias setup]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[promesa.exec :as px]
[reitit.core :as r]
[reitit.middleware :as rr]
[yetti.adapter :as yt]
[yetti.request :as yreq]
[yetti.response :as-alias yres]))
[ring.request :as rreq]
[ring.response :as-alias rres]
[yetti.adapter :as yt]))
(declare router-handler)
@@ -39,67 +39,57 @@
;; HTTP SERVER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def default-params
{::port 6060
::host "0.0.0.0"
::max-body-size 31457280 ; default 30 MiB
::max-multipart-body-size 367001600}) ; default 350 MiB
(s/def ::handler fn?)
(s/def ::router some?)
(s/def ::port integer?)
(s/def ::host string?)
(s/def ::name string?)
(defmethod ig/expand-key ::server
[k v]
{k (merge default-params (d/without-nils v))})
(s/def ::max-body-size integer?)
(s/def ::max-multipart-body-size integer?)
(s/def ::io-threads integer?)
(def ^:private schema:server-params
[:map
[::port ::sm/int]
[::host ::sm/text]
[::io-threads {:optional true} ::sm/int]
[::max-worker-threads {:optional true} ::sm/int]
[::max-body-size {:optional true} ::sm/int]
[::max-multipart-body-size {:optional true} ::sm/int]
[::router {:optional true} [:fn r/router?]]
[::handler {:optional true} ::sm/fn]])
(defmethod ig/prep-key ::server
[_ cfg]
(merge {::port 6060
::host "0.0.0.0"
::max-body-size (* 1024 1024 30) ; default 30 MiB
::max-multipart-body-size (* 1024 1024 120)} ; default 120 MiB
(d/without-nils cfg)))
(defmethod ig/assert-key ::server
[_ params]
(assert (sm/check schema:server-params params)))
(defmethod ig/pre-init-spec ::server [_]
(s/keys :req [::port ::host]
:opt [::max-body-size
::max-multipart-body-size
::router
::handler
::io-threads]))
(defmethod ig/init-key ::server
[_ {:keys [::handler ::router ::host ::port ::mtx/metrics] :as cfg}]
[_ {:keys [::handler ::router ::host ::port] :as cfg}]
(l/info :hint "starting http server" :port port :host host)
(let [on-dispatch
(fn [_ start-at-ns]
(let [timing (- (System/nanoTime) start-at-ns)
timing (int (/ timing 1000000))]
(mtx/run! metrics
:id :http-server-dispatch-timing
:val timing)))
(let [options {:http/port port
:http/host host
:http/max-body-size (::max-body-size cfg)
:http/max-multipart-body-size (::max-multipart-body-size cfg)
:xnio/io-threads (or (::io-threads cfg)
(max 3 (px/get-available-processors)))
:xnio/dispatch :virtual
:ring/compat :ring2
:socket/backlog 4069}
options
{:http/port port
:http/host host
:http/max-body-size (::max-body-size cfg)
:http/max-multipart-body-size (::max-multipart-body-size cfg)
:xnio/direct-buffers false
:xnio/io-threads (::io-threads cfg)
:xnio/max-worker-threads (::max-worker-threads cfg)
:ring/compat :ring2
:events/on-dispatch on-dispatch
:socket/backlog 4069}
handler (cond
(some? router)
(router-handler router)
handler
(cond
(some? router)
(router-handler router)
(some? handler)
handler
(some? handler)
handler
:else
(throw (UnsupportedOperationException. "handler or router are required")))
:else
(throw (UnsupportedOperationException. "handler or router are required")))
server
(yt/server handler (d/without-nils options))]
options (d/without-nils options)
server (yt/server handler options)]
(assoc cfg ::server (yt/start! server))))
@@ -110,12 +100,12 @@
(defn- not-found-handler
[_]
{::yres/status 404})
{::rres/status 404})
(defn- router-handler
[router]
(letfn [(resolve-handler [request]
(if-let [match (r/match-by-path router (yreq/path request))]
(if-let [match (r/match-by-path router (rreq/path request))]
(let [params (:path-params match)
result (:result match)
handler (or (:handler result) not-found-handler)
@@ -124,11 +114,11 @@
(partial not-found-handler request)))
(on-error [cause request]
(let [{:keys [::yres/body] :as response} (errors/handle cause request)]
(let [{:keys [::rres/body] :as response} (errors/handle cause request)]
(cond-> response
(map? body)
(-> (update ::yres/headers assoc "content-type" "application/transit+json")
(assoc ::yres/body (t/encode-str body {:type :json-verbose}))))))]
(-> (update ::rres/headers assoc "content-type" "application/transit+json")
(assoc ::rres/body (t/encode-str body {:type :json-verbose}))))))]
(fn [request]
(let [handler (resolve-handler request)]
@@ -141,39 +131,29 @@
;; HTTP ROUTER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:routes
[:vector :any])
(def ^:private schema:router-params
[:map
[::ws/routes schema:routes]
[::rpc/routes schema:routes]
[::oidc/routes schema:routes]
[::assets/routes schema:routes]
[::debug/routes schema:routes]
[::mtx/routes schema:routes]
[::awsns/routes schema:routes]
[::mgmt/routes schema:routes]
::session/manager
::setup/props
::db/pool])
(defmethod ig/assert-key ::router
[_ params]
(assert (sm/check schema:router-params params)))
(defmethod ig/pre-init-spec ::router [_]
(s/keys :req [::session/manager
::ws/routes
::rpc/routes
::rpc.doc/routes
::oidc/routes
::setup/props
::assets/routes
::debug/routes
::db/pool
::mtx/routes
::awsns/routes]))
(defmethod ig/init-key ::router
[_ cfg]
(rr/router
[["" {:middleware [[mw/server-timing]
[sec/sec-fetch-metadata]
[mw/params]
[mw/format-response]
[mw/auth {:bearer (partial session/decode-token cfg)
:cookie (partial session/decode-token cfg)
:token (partial actoken/decode-token cfg)}]
[mw/parse-request]
[mw/errors errors/handle]
[session/soft-auth cfg]
[actoken/soft-auth cfg]
[mw/restrict-methods]]}
(::mtx/routes cfg)
@@ -183,9 +163,9 @@
["/webhooks"
(::awsns/routes cfg)]
["/management"
(::mgmt/routes cfg)]
(::ws/routes cfg)
(::oidc/routes cfg)
(::rpc/routes cfg)]]))
["/api" {:middleware [[mw/cors]]}
(::oidc/routes cfg)
(::rpc.doc/routes cfg)
(::rpc/routes cfg)]]]))

View File

@@ -9,19 +9,23 @@
[app.common.logging :as l]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]))
[app.tokens :as tokens]
[ring.request :as rreq]))
(defn decode-token
[cfg token]
(try
(tokens/verify cfg {:token token :iss "access-token"})
(catch Throwable cause
(l/trc :hint "exception on decoding token"
:token token
:cause cause))))
(def header-re #"^Token\s+(.*)")
(defn- get-token
[request]
(some->> (rreq/get-header request "authorization")
(re-matches header-re)
(second)))
(defn- decode-token
[props token]
(when token
(tokens/verify props {:token token :iss "access-token"})))
(def sql:get-token-data
"SELECT perms, profile_id, expires_at
@@ -31,28 +35,47 @@
OR (expires_at > now()));")
(defn- get-token-data
[pool claims]
[pool token-id]
(when-not (db/read-only? pool)
(when-let [token-id (get claims :tid)]
(some-> (db/exec-one! pool [sql:get-token-data token-id])
(update :perms db/decode-pgarray #{})))))
(some-> (db/exec-one! pool [sql:get-token-data token-id])
(update :perms db/decode-pgarray #{}))))
(defn- wrap-soft-auth
"Soft Authentication, will be executed synchronously on the undertow
worker thread."
[handler {:keys [::setup/props]}]
(letfn [(handle-request [request]
(try
(let [token (get-token request)
claims (decode-token props token)]
(cond-> request
(map? claims)
(assoc ::id (:tid claims))))
(catch Throwable cause
(l/trace :hint "exception on decoding malformed token" :cause cause)
request)))]
(fn [request]
(handler (handle-request request)))))
(defn- wrap-authz
"Authorization middleware, will be executed synchronously on vthread."
[handler {:keys [::db/pool]}]
(fn [request]
(let [{:keys [type claims]} (get request ::http/auth-data)]
(if (= :token type)
(let [{:keys [perms profile-id expires-at]} (some->> claims (get-token-data pool))]
;; FIXME: revisit this, this data looks unused
(handler (cond-> request
(some? perms)
(assoc ::perms perms)
(some? profile-id)
(assoc ::profile-id profile-id)
(some? expires-at)
(assoc ::expires-at expires-at))))
(let [{:keys [perms profile-id expires-at]} (some->> (::id request) (get-token-data pool))]
(handler (cond-> request
(some? perms)
(assoc ::perms perms)
(some? profile-id)
(assoc ::profile-id profile-id)
(some? expires-at)
(assoc ::expires-at expires-at))))))
(handler request)))))
(def soft-auth
{:name ::soft-auth
:compile (fn [& _]
(when (contains? cf/flags :access-tokens)
wrap-soft-auth))})
(def authz
{:name ::authz

View File

@@ -9,18 +9,20 @@
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.time :as ct]
[app.common.spec :as us]
[app.common.uri :as u]
[app.db :as db]
[app.storage :as sto]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[yetti.response :as-alias yres]))
[ring.response :as-alias rres]))
(def ^:private cache-max-age
(ct/duration {:hours 24}))
(dt/duration {:hours 24}))
(def ^:private signature-max-age
(ct/duration {:hours 24 :minutes 15}))
(dt/duration {:hours 24 :minutes 15}))
(defn get-id
[{:keys [path-params]}]
@@ -30,13 +32,13 @@
(defn- get-file-media-object
[pool id]
(db/get pool :file-media-object {:id id} {::db/remove-deleted false}))
(db/get pool :file-media-object {:id id}))
(defn- serve-object-from-s3
[{:keys [::sto/storage] :as cfg} obj]
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
{::yres/status 307
::yres/headers {"location" (str url)
{::rres/status 307
::rres/headers {"location" (str url)
"x-host" (cond-> host port (str ":" port))
"x-mtype" (-> obj meta :content-type)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}}))
@@ -49,8 +51,8 @@
headers {"x-accel-redirect" (:path purl)
"content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}]
{::yres/status 204
::yres/headers headers}))
{::rres/status 204
::rres/headers headers}))
(defn- serve-object
"Helper function that returns the appropriate response depending on
@@ -67,7 +69,7 @@
obj (sto/get-object storage id)]
(if obj
(serve-object cfg obj)
{::yres/status 404})))
{::rres/status 404})))
(defn- generic-handler
"A generic handler helper/common code for file-media based handlers."
@@ -78,7 +80,7 @@
sobj (sto/get-object storage (kf mobj))]
(if sobj
(serve-object cfg sobj)
{::yres/status 404})))
{::rres/status 404})))
(defn file-objects-handler
"Handler that serves storage objects by file media id."
@@ -93,10 +95,11 @@
;; --- Initialization
(defmethod ig/assert-key ::routes
[_ params]
(assert (sto/valid-storage? (::sto/storage params)) "expected valid storage instance")
(assert (string? (::path params))))
(s/def ::path ::us/string)
(s/def ::routes vector?)
(defmethod ig/pre-init-spec ::routes [_]
(s/keys :req [::sto/storage ::path]))
(defmethod ig/init-key ::routes
[_ cfg]

View File

@@ -10,36 +10,37 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.common.schema :as sm]
[app.db :as db]
[app.db.sql :as sql]
[app.http.client :as http]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.worker :as-alias wrk]
[clojure.data.json :as j]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
[yetti.request :as yreq]
[yetti.response :as-alias yres]))
[promesa.exec :as px]
[ring.request :as rreq]
[ring.response :as-alias rres]))
(declare parse-json)
(declare handle-request)
(declare parse-notification)
(declare process-report)
(defmethod ig/assert-key ::routes
[_ params]
(assert (http/client? (::http/client params)) "expect a valid http client")
(assert (sm/valid? ::setup/props (::setup/props params)) "expected valid setup props")
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
(defmethod ig/pre-init-spec ::routes [_]
(s/keys :req [::http/client
::setup/props
::db/pool]))
(defmethod ig/init-key ::routes
[_ cfg]
(letfn [(handler [request]
(let [data (-> request yreq/body slurp)]
(handle-request cfg data)
{::yres/status 200}))]
(let [data (-> request rreq/body slurp)]
(px/run! :vthread (partial handle-request cfg data)))
{::rres/status 200})]
["/sns" {:handler handler
:allowed-methods #{:post}}]))
@@ -107,7 +108,7 @@
[cfg headers]
(let [tdata (get headers "x-penpot-data")]
(when-not (str/empty? tdata)
(let [result (tokens/verify cfg {:token tdata :iss :profile-identity})]
(let [result (tokens/verify (::setup/props cfg) {:token tdata :iss :profile-identity})]
(:profile-id result)))))
(defn- parse-notification

View File

@@ -7,19 +7,20 @@
(ns app.http.client
"Http client abstraction layer."
(:require
[app.common.schema :as sm]
[app.common.spec :as us]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[java-http-clj.core :as http])
[java-http-clj.core :as http]
[promesa.core :as p])
(:import
java.net.http.HttpClient))
(defn client?
[o]
(instance? HttpClient o))
(s/def ::client #(instance? HttpClient %))
(s/def ::client-holder
(s/keys :req [::client]))
(sm/register!
{:type ::client
:pred client?})
(defmethod ig/pre-init-spec ::client [_]
(s/keys :req []))
(defmethod ig/init-key ::client
[_ _]
@@ -28,9 +29,14 @@
(defn send!
([client req] (send! client req {}))
([client req {:keys [response-type] :or {response-type :string}}]
(assert (client? client) "expected valid http client")
(http/send req {:client client :as response-type})))
([client req {:keys [response-type sync?] :or {response-type :string sync? false}}]
(us/assert! ::client client)
(if sync?
(http/send req {:client client :as response-type})
(try
(http/send-async req {:client client :as response-type})
(catch Throwable cause
(p/rejected cause))))))
(defn- resolve-client
[params]
@@ -50,8 +56,8 @@
([cfg-or-client request]
(let [client (resolve-client cfg-or-client)
request (update request :uri str)]
(send! client request {})))
(send! client request {:sync? true})))
([cfg-or-client request options]
(let [client (resolve-client cfg-or-client)
request (update request :uri str)]
(send! client request options))))
(send! client request (merge {:sync? true} options)))))

View File

@@ -7,40 +7,34 @@
(ns app.http.debug
(:refer-clojure :exclude [error-handler])
(:require
[app.binfile.common :as bfc]
[app.binfile.v1 :as bf.v1]
[app.binfile.v3 :as bf.v3]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.features.file-migrations :as feat.fmig]
[app.http.session :as session]
[app.rpc.commands.auth :as auth]
[app.rpc.commands.files-create :refer [create-file]]
[app.rpc.commands.profile :as profile]
[app.rpc.commands.teams :as teams]
[app.setup :as-alias setup]
[app.setup.clock :as clock]
[app.srepl.main :as srepl]
[app.srepl.helpers :as srepl]
[app.storage :as-alias sto]
[app.storage.tmp :as tmp]
[app.util.blob :as blob]
[app.util.template :as tmpl]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.io :as io]
[emoji.core :as emj]
[integrant.core :as ig]
[markdown.core :as md]
[markdown.transformers :as mdt]
[yetti.request :as yreq]
[yetti.response :as yres]))
[ring.request :as rreq]
[ring.response :as rres]))
;; (selmer.parser/cache-off!)
@@ -50,35 +44,29 @@
(defn index-handler
[_cfg _request]
(let [{:keys [clock offset]} @clock/current]
{::yres/status 200
::yres/headers {"content-type" "text/html"}
::yres/body (-> (io/resource "app/templates/debug.tmpl")
(tmpl/render {:version (:full cf/version)
:current-clock (str clock)
:current-offset (if offset
(ct/format-duration offset)
"NO OFFSET")
:current-time (ct/format-inst (ct/now) :http)
:supported-features cfeat/supported-features}))}))
{::rres/status 200
::rres/headers {"content-type" "text/html"}
::rres/body (-> (io/resource "app/templates/debug.tmpl")
(tmpl/render {}))})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FILE CHANGES
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- get-resolved-file
[cfg file-id]
(some-> (bfc/get-file cfg file-id :migrate? false)
(update :data blob/encode)))
(defn prepare-response
[body]
(let [headers {"content-type" "application/transit+json"}]
{::rres/status 200
::rres/body body
::rres/headers headers}))
(defn prepare-download
[file filename]
{::yres/status 200
::yres/headers
{"content-disposition" (str "attachment; filename=" filename ".json")
"content-type" "application/octet-stream"}
::yres/body
(t/encode file {:type :json-verbose})})
(defn prepare-download-response
[body filename]
(let [headers {"content-disposition" (str "attachment; filename=" filename)
"content-type" "application/octet-stream"}]
{::rres/status 200
::rres/body body
::rres/headers headers}))
(def sql:retrieve-range-of-changes
"select revn, changes from file_change where file_id=? and revn >= ? and revn <= ? order by revn")
@@ -86,51 +74,45 @@
(def sql:retrieve-single-change
"select revn, changes, data from file_change where file_id=? and revn = ?")
(defn- download-file-data
[cfg {:keys [params ::session/profile-id] :as request}]
(defn- retrieve-file-data
[{:keys [::db/pool]} {:keys [params ::session/profile-id] :as request}]
(let [file-id (some-> params :file-id parse-uuid)
revn (some-> params :revn parse-long)
filename (str file-id)]
(when-not file-id
(ex/raise :type :validation
:code :missing-arguments))
(if-let [file (get-resolved-file cfg file-id)]
(let [data (if (integer? revn)
(some-> (db/exec-one! pool [sql:retrieve-single-change file-id revn]) :data)
(some-> (db/get-by-id pool :file file-id) :data))]
(when-not data
(ex/raise :type :not-found
:code :enpty-data
:hint "empty response"))
(cond
(contains? params :download)
(prepare-download file filename)
(prepare-download-response data filename)
(contains? params :clone)
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(let [profile (profile/get-profile conn profile-id)
project-id (:default-project-id profile)
file (-> (create-file cfg {:id (uuid/next)
:name (str "Cloned: " (:name file))
:features (:features file)
:project-id project-id
:profile-id profile-id})
(assoc :data (:data file))
(assoc :migrations (:migrations file)))]
(let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile)]
(feat.fmig/reset-migrations! conn file)
(db/update! conn :file
{:data (:data file)}
{:id (:id file)}
{::db/return-keys false})
{::yres/status 201
::yres/body "OK CLONED"})))
(db/run! pool (fn [{:keys [::db/conn] :as cfg}]
(create-file cfg {:id file-id
:name (str "Cloned file: " filename)
:project-id project-id
:profile-id profile-id})
(db/update! conn :file
{:data data}
{:id file-id})
{::rres/status 201
::rres/body "OK CREATED"})))
:else
(ex/raise :type :validation
:code :invalid-params
:hint "invalid button"))
(ex/raise :type :not-found
:code :enpty-data
:hint "empty response"))))
(prepare-response (blob/decode data))))))
(defn- is-file-exists?
[pool id]
@@ -138,61 +120,81 @@
(-> (db/exec-one! pool [sql id]) :exists)))
(defn- upload-file-data
[{:keys [::db/pool] :as cfg} {:keys [::session/profile-id params] :as request}]
[{:keys [::db/pool]} {:keys [::session/profile-id params] :as request}]
(let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile)
file (some-> params :file :path io/read* t/decode)]
data (some-> params :file :path io/read-as-bytes)]
(if (and file project-id)
(let [fname (str "Imported: " (:name file) "(" (ct/now) ")")
(if (and data project-id)
(let [fname (str "Imported file *: " (dt/now))
reuse-id? (contains? params :reuseid)
file-id (or (and reuse-id? (ex/ignoring (-> params :file :filename parse-uuid)))
(uuid/next))]
(if (and reuse-id? file-id
(is-file-exists? pool file-id))
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(db/update! conn :file
{:data (:data file)
:features (into-array (:features file))
:deleted-at nil}
{:id file-id}
{::db/return-keys false})
(feat.fmig/reset-migrations! conn file)
{::yres/status 200
::yres/body "OK UPDATED"}))
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(let [file (-> (create-file cfg {:id file-id
:name fname
:features (:features file)
:project-id project-id
:profile-id profile-id})
(assoc :data (:data file))
(assoc :migrations (:migrations file)))]
(do
(db/update! pool :file
{:data data
:deleted-at nil}
{:id file-id})
{::rres/status 200
::rres/body "OK UPDATED"})
(db/run! pool (fn [{:keys [::db/conn] :as cfg}]
(create-file cfg {:id file-id
:name fname
:project-id project-id
:profile-id profile-id})
(db/update! conn :file
{:data (:data file)}
{:id file-id}
{::db/return-keys false})
(feat.fmig/reset-migrations! conn file)
{::yres/status 201
::yres/body "OK CREATED"})))))
{:data data}
{:id file-id})
{::rres/status 201
::rres/body "OK CREATED"}))))
(ex/raise :type :validation
:code :invalid-params
:hint "invalid file uploaded"))))
{::rres/status 500
::rres/body "ERROR"})))
(defn raw-export-import-handler
(defn file-data-handler
[cfg request]
(case (yreq/method request)
:get (download-file-data cfg request)
(case (rreq/method request)
:get (retrieve-file-data cfg request)
:post (upload-file-data cfg request)
(ex/raise :type :http
:code :method-not-found)))
(defn file-changes-handler
[{:keys [::db/pool]} {:keys [params] :as request}]
(letfn [(retrieve-changes [file-id revn]
(if (str/includes? revn ":")
(let [[start end] (->> (str/split revn #":")
(map str/trim)
(map parse-long))]
(some->> (db/exec! pool [sql:retrieve-range-of-changes file-id start end])
(map :changes)
(map blob/decode)
(mapcat identity)
(vec)))
(if-let [revn (parse-long revn)]
(let [item (db/exec-one! pool [sql:retrieve-single-change file-id revn])]
(some-> item :changes blob/decode vec))
(ex/raise :type :validation :code :invalid-arguments))))]
(let [file-id (some-> params :id parse-uuid)
revn (or (some-> params :revn parse-long) "latest")
filename (str file-id)]
(when (or (not file-id) (not revn))
(ex/raise :type :validation
:code :invalid-arguments
:hint "missing arguments"))
(let [data (retrieve-changes file-id revn)]
(if (contains? params :download)
(prepare-download-response data filename)
(prepare-response data))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ERROR BROWSER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -229,19 +231,19 @@
(-> (io/resource "app/templates/error-report.v3.tmpl")
(tmpl/render (-> content
(assoc :id id)
(assoc :created-at (ct/format-inst created-at :rfc1123))))))]
(assoc :created-at (dt/format-instant created-at :rfc1123))))))]
(if-let [report (get-report request)]
(let [result (case (:version report)
1 (render-template-v1 report)
2 (render-template-v2 report)
3 (render-template-v3 report))]
{::yres/status 200
::yres/body result
::yres/headers {"content-type" "text/html; charset=utf-8"
{::rres/status 200
::rres/body result
::rres/headers {"content-type" "text/html; charset=utf-8"
"x-robots-tag" "noindex"}})
{::yres/status 404
::yres/body "not found"})))
{::rres/status 404
::rres/body "not found"})))
(def sql:error-reports
"SELECT id, created_at,
@@ -253,11 +255,11 @@
(defn error-list-handler
[{:keys [::db/pool]} _request]
(let [items (->> (db/exec! pool [sql:error-reports])
(map #(update % :created-at ct/format-inst :rfc1123)))]
{::yres/status 200
::yres/body (-> (io/resource "app/templates/error-list.tmpl")
(map #(update % :created-at dt/format-instant :rfc1123)))]
{::rres/status 200
::rres/body (-> (io/resource "app/templates/error-list.tmpl")
(tmpl/render {:items items}))
::yres/headers {"content-type" "text/html; charset=utf-8"
::rres/headers {"content-type" "text/html; charset=utf-8"
"x-robots-tag" "noindex"}}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -279,30 +281,29 @@
(ex/raise :type :validation
:code :missing-arguments))
(let [path (tmp/tempfile :prefix "penpot.export." :min-age "30m")]
(let [path (tmp/tempfile :prefix "penpot.export.")]
(with-open [output (io/output-stream path)]
(-> cfg
(assoc ::bfc/ids file-ids)
(assoc ::bfc/embed-assets embed?)
(assoc ::bfc/include-libraries libs?)
(bf.v3/export-files! output)))
(assoc ::bf.v1/ids file-ids)
(assoc ::bf.v1/embed-assets embed?)
(assoc ::bf.v1/include-libraries libs?)
(bf.v1/export-files! output)))
(if clone?
(let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile)
cfg (assoc cfg
::bfc/overwrite false
::bfc/profile-id profile-id
::bfc/project-id project-id
::bfc/input path)]
(bf.v3/import-files! cfg)
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body "OK CLONED"})
::bf.v1/overwrite false
::bf.v1/profile-id profile-id
::bf.v1/project-id project-id)]
(bf.v1/import-files! cfg path)
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body "OK CLONED"})
{::yres/status 200
::yres/body (io/input-stream path)
::yres/headers {"content-type" "application/octet-stream"
{::rres/status 200
::rres/body (io/input-stream path)
::rres/headers {"content-type" "application/octet-stream"
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
@@ -315,30 +316,24 @@
(let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile)
team (teams/get-team pool
:profile-id profile-id
:project-id project-id)]
overwrite? (contains? params :overwrite)
migrate? (contains? params :migrate)]
(when-not project-id
(ex/raise :type :validation
:code :missing-project
:hint "project not found"))
(let [path (-> params :file :path)
format (bfc/parse-file-format path)
cfg (assoc cfg
::bfc/profile-id profile-id
::bfc/project-id project-id
::bfc/input path
::bfc/features (cfeat/get-team-enabled-features cf/flags team))]
(if (= format :binfile-v3)
(bf.v3/import-files! cfg)
(bf.v1/import-files! cfg))
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body "OK"})))
(let [path (-> params :file :path)
cfg (assoc cfg
::bf.v1/overwrite overwrite?
::bf.v1/migrate migrate?
::bf.v1/profile-id profile-id
::bf.v1/project-id project-id)]
(bf.v1/import-files! cfg path)
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body "OK"})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ACTIONS
@@ -368,96 +363,62 @@
(db/update! conn :profile {:is-blocked true} {:id (:id profile)})
(db/delete! conn :http-session {:profile-id (:id profile)})
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body (str/ffmt "PROFILE '%' BLOCKED" (:email profile))})
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "PROFILE '%' BLOCKED" (:email profile))})
(contains? params :unblock)
(do
(db/update! conn :profile {:is-blocked false} {:id (:id profile)})
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body (str/ffmt "PROFILE '%' UNBLOCKED" (:email profile))})
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "PROFILE '%' UNBLOCKED" (:email profile))})
(contains? params :resend)
(if (:is-blocked profile)
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body "PROFILE ALREADY BLOCKED"}
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body "PROFILE ALREADY BLOCKED"}
(do
(#'auth/send-email-verification! cfg profile)
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body (str/ffmt "RESENDED FOR '%'" (:email profile))}))
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "RESENDED FOR '%'" (:email profile))}))
:else
(do
(db/update! conn :profile {:is-active true} {:id (:id profile)})
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
(defn- handle-team-features
(defn- reset-file-version
[cfg {:keys [params] :as request}]
(let [team-id (some-> params :team-id d/parse-uuid)
feature (some-> params :feature str)
action (some-> params :action)
skip-check (contains? params :skip-check)]
(let [file-id (some-> params :file-id d/parse-uuid)
version (some-> params :version d/parse-integer)]
(when (nil? team-id)
(when-not (contains? params :force)
(ex/raise :type :validation
:code :invalid-team-id
:hint "provided invalid team id"))
:code :missing-force
:hint "missing force checkbox"))
(if (= action "show")
(let [team (db/run! cfg teams/get-team-info {:id team-id})]
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body (apply str "Team features:\n"
(->> (:features team)
(map (fn [feature]
(str "- " feature "\n")))))})
(when (nil? file-id)
(ex/raise :type :validation
:code :invalid-file-id
:hint "provided invalid file id"))
(do
(when-not (contains? params :force)
(ex/raise :type :validation
:code :missing-force
:hint "missing force checkbox"))
(when (nil? version)
(ex/raise :type :validation
:code :invalid-version
:hint "provided invalid version"))
(cond
(= action "enable")
(srepl/enable-team-feature! team-id feature :skip-check skip-check)
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
(= action "disable")
(srepl/disable-team-feature! team-id feature :skip-check skip-check)
{::rres/status 200
::rres/headers {"content-type" "text/plain"}
::rres/body "OK"}))
:else
(ex/raise :type :validation
:code :invalid-action
:hint (str "invalid action: " action)))
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body "OK"}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; VIRTUAL CLOCK
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- set-virtual-clock
[_ {:keys [params] :as request}]
(let [offset (some-> params :offset str/trim not-empty ct/duration)
reset? (contains? params :reset)]
(if (= "production" (cf/get :tenant))
{::yres/status 501
::yres/body "OPERATION NOT ALLOWED"}
(do
(if (or reset? (zero? (inst-ms offset)))
(clock/set-offset! nil)
(clock/set-offset! offset))
{::yres/status 302
::yres/headers {"location" "/dbg"}}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OTHER SMALL VIEWS/HANDLERS
@@ -468,13 +429,13 @@
[{:keys [::db/pool]} _]
(try
(db/exec-one! pool ["select count(*) as count from server_prop;"])
{::yres/status 200
::yres/body "OK"}
{::rres/status 200
::rres/body "OK"}
(catch Throwable cause
(l/warn :hint "unable to execute query on health handler"
:cause cause)
{::yres/status 503
::yres/body "KO"})))
{::rres/status 503
::rres/body "KO"})))
(defn changelog-handler
[_ _]
@@ -483,11 +444,11 @@
(md->html [text]
(md/md-to-html-string text :replacement-transformers (into [transform-emoji] mdt/transformer-vector)))]
(if-let [clog (io/resource "changelog.md")]
{::yres/status 200
::yres/headers {"content-type" "text/html; charset=utf-8"}
::yres/body (-> clog slurp md->html)}
{::yres/status 404
::yres/body "NOT FOUND"})))
{::rres/status 200
::rres/headers {"content-type" "text/html; charset=utf-8"}
::rres/body (-> clog slurp md->html)}
{::rres/status 404
::rres/body "NOT FOUND"})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; INIT
@@ -510,29 +471,8 @@
(ex/raise :type :authentication
:code :only-admins-allowed)))))})
(def errors
(letfn [(handle-error [cause]
(when-let [data (ex-data cause)]
(when (= :validation (:type data))
(str "Error: " (or (:hint data) (ex-message cause)) "\n"))))]
{:name ::errors
:compile
(fn [& _params]
(fn [handler]
(fn [request]
(try
(handler request)
(catch Throwable cause
(let [body (or (handle-error cause)
(ex/format-throwable cause))]
{::yres/status 400
::yres/headers {"content-type" "text/plain"}
::yres/body body}))))))}))
(defmethod ig/assert-key ::routes
[_ params]
(assert (db/pool? (::db/pool params)) "expected a valid database pool")
(assert (session/manager? (::session/manager params)) "expected a valid session manager"))
(defmethod ig/pre-init-spec ::routes [_]
(s/keys :req [::db/pool ::session/manager]))
(defmethod ig/init-key ::routes
[_ {:keys [::db/pool] :as cfg}]
@@ -544,14 +484,11 @@
["/changelog" {:handler (partial changelog-handler cfg)}]
["/error/:id" {:handler (partial error-handler cfg)}]
["/error" {:handler (partial error-list-handler cfg)}]
["/actions" {:middleware [[errors]]}
["/set-virtual-clock"
{:handler (partial set-virtual-clock cfg)}]
["/resend-email-verification"
{:handler (partial resend-email-notification cfg)}]
["/handle-team-features"
{:handler (partial handle-team-features cfg)}]
["/file-export" {:handler (partial export-handler cfg)}]
["/file-import" {:handler (partial import-handler cfg)}]
["/file-raw-export-import" {:handler (partial raw-export-import-handler cfg)}]]]])
["/actions/resend-email-verification"
{:handler (partial resend-email-notification cfg)}]
["/actions/reset-file-version"
{:handler (partial reset-file-version cfg)}]
["/file/export" {:handler (partial export-handler cfg)}]
["/file/import" {:handler (partial import-handler cfg)}]
["/file/data" {:handler (partial file-data-handler cfg)}]
["/file/changes" {:handler (partial file-changes-handler cfg)}]]])

View File

@@ -13,26 +13,28 @@
[app.config :as cf]
[app.http :as-alias http]
[app.http.access-token :as-alias actoken]
[app.http.auth :as-alias auth]
[app.http.session :as-alias session]
[app.util.inet :as inet]
[clojure.spec.alpha :as s]
[yetti.request :as yreq]
[yetti.response :as yres]))
[ring.request :as rreq]
[ring.response :as rres]))
(defn request->context
"Extracts error report relevant context data from request."
[request]
(let [{:keys [claims] :as auth} (get request ::http/auth-data)]
(-> (cf/logging-context)
(assoc :request/path (:path request))
(assoc :request/method (:method request))
(assoc :request/params (:params request))
(assoc :request/user-agent (yreq/get-header request "user-agent"))
(assoc :request/ip-addr (inet/parse-request request))
(assoc :request/profile-id (get claims :uid))
(assoc :request/auth-data auth)
(assoc :version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")))))
(let [claims (-> {}
(into (::session/token-claims request))
(into (::actoken/token-claims request)))]
{:request/path (:path request)
:request/method (:method request)
:request/params (:params request)
:request/user-agent (rreq/get-header request "user-agent")
:request/ip-addr (inet/parse-request request)
:request/profile-id (:uid claims)
:version/frontend (or (rreq/get-header request "x-frontend-version") "unknown")
:version/backend (:full cf/version)}))
(defmulti handle-error
(fn [cause _ _]
@@ -44,36 +46,34 @@
(defmethod handle-error :authentication
[err _ _]
{::yres/status 401
::yres/body (ex-data err)})
{::rres/status 401
::rres/body (ex-data err)})
(defmethod handle-error :authorization
[err _ _]
{::yres/status 403
::yres/body (ex-data err)})
{::rres/status 403
::rres/body (ex-data err)})
(defmethod handle-error :restriction
[err request _]
[err _ _]
(let [{:keys [code] :as data} (ex-data err)]
(if (= code :method-not-allowed)
{::yres/status 405
::yres/body data}
(binding [l/*context* (request->context request)]
{::yres/status 400
::yres/body data}))))
{::rres/status 405
::rres/body data}
{::rres/status 400
::rres/body data})))
(defmethod handle-error :rate-limit
[err _ _]
(let [headers (-> err ex-data ::http/headers)]
{::yres/status 429
::yres/headers headers}))
{::rres/status 429
::rres/headers headers}))
(defmethod handle-error :concurrency-limit
[err _ _]
(let [headers (-> err ex-data ::http/headers)]
{::yres/status 429
::yres/headers headers}))
{::rres/status 429
::rres/headers headers}))
(defmethod handle-error :validation
[err request parent-cause]
@@ -84,26 +84,22 @@
(= code :schema-validation)
(= code :data-validation))
(let [explain (ex/explain data)]
{::yres/status 400
::yres/body (-> data
{::rres/status 400
::rres/body (-> data
(dissoc ::s/problems ::s/value ::s/spec ::sm/explain)
(cond-> explain (assoc :explain explain)))})
(= code :vern-conflict)
{::yres/status 409 ;; 409 - Conflict
::yres/body data}
(= code :request-body-too-large)
{::yres/status 413 ::yres/body data}
{::rres/status 413 ::rres/body data}
(= code :invalid-image)
(binding [l/*context* (request->context request)]
(let [cause (or parent-cause err)]
(l/warn :hint "image process error" :cause cause)
{::yres/status 400 ::yres/body data}))
(l/warn :hint "unexpected error on processing image" :cause cause)
{::rres/status 400 ::rres/body data}))
:else
{::yres/status 400 ::yres/body data})))
{::rres/status 400 ::rres/body data})))
(defmethod handle-error :assertion
[error request parent-cause]
@@ -114,47 +110,46 @@
(= code :data-validation)
(let [explain (ex/explain data)]
(l/error :hint "data assertion error" :cause cause)
{::yres/status 500
::yres/body (-> data
(dissoc ::sm/explain)
(cond-> explain (assoc :explain explain))
(assoc :type :server-error)
(assoc :code :assertion))})
{::rres/status 500
::rres/body {:type :server-error
:code :assertion
:data (-> data
(dissoc ::sm/explain)
(cond-> explain (assoc :explain explain)))}})
(= code :spec-validation)
(let [explain (ex/explain data)]
(l/error :hint "spec assertion error" :cause cause)
{::yres/status 500
::yres/body (-> data
(dissoc ::s/problems ::s/value ::s/spec)
(cond-> explain (assoc :explain explain))
(assoc :type :server-error)
(assoc :code :assertion))})
{::rres/status 500
::rres/body {:type :server-error
:code :assertion
:data (-> data
(dissoc ::s/problems ::s/value ::s/spec)
(cond-> explain (assoc :explain explain)))}})
:else
(do
(l/error :hint "assertion error" :cause cause)
{::yres/status 500
::yres/body (-> data
(assoc :type :server-error)
(assoc :code :assertion))})))))
{::rres/status 500
::rres/body {:type :server-error
:code :assertion
:data data}})))))
(defmethod handle-error :not-found
[err _ _]
{::yres/status 404
::yres/body (ex-data err)})
{::rres/status 404
::rres/body (ex-data err)})
(defmethod handle-error :internal
[error request parent-cause]
(binding [l/*context* (request->context request)]
(let [cause (or parent-cause error)
data (ex-data error)]
(let [cause (or parent-cause error)]
(l/error :hint "internal error" :cause cause)
{::yres/status 500
::yres/body (-> data
(assoc :type :server-error)
(update :code #(or % :unhandled))
(assoc :hint (ex-message error)))})))
{::rres/status 500
::rres/body {:type :server-error
:code :unhandled
:hint (ex-message error)
:data (ex-data error)}})))
(defmethod handle-error :default
[error request parent-cause]
@@ -174,24 +169,24 @@
(let [state (.getSQLState ^java.sql.SQLException error)
cause (or parent-cause error)]
(binding [l/*context* (request->context request)]
(l/error :hint "postgresql error"
(l/error :hint "PSQL error"
:cause cause)
(cond
(= state "57014")
{::yres/status 504
::yres/body {:type :server-error
{::rres/status 504
::rres/body {:type :server-error
:code :statement-timeout
:hint (ex-message error)}}
(= state "25P03")
{::yres/status 504
::yres/body {:type :server-error
{::rres/status 504
::rres/body {:type :server-error
:code :idle-in-transaction-timeout
:hint (ex-message error)}}
:else
{::yres/status 500
::yres/body {:type :server-error
{::rres/status 500
::rres/body {:type :server-error
:code :unexpected
:hint (ex-message error)
:state state}}))))
@@ -205,25 +200,25 @@
(nil? edata)
(binding [l/*context* (request->context request)]
(l/error :hint "unexpected error" :cause cause)
{::yres/status 500
::yres/body {:type :server-error
{::rres/status 500
::rres/body {:type :server-error
:code :unexpected
:hint (ex-message error)}})
:else
(binding [l/*context* (request->context request)]
(l/error :hint "unhandled error" :cause cause)
{::yres/status 500
::yres/body (-> edata
(assoc :type :server-error)
(update :code #(or % :unhandled))
(assoc :hint (ex-message error)))}))))
{::rres/status 500
::rres/body {:type :server-error
:code :unhandled
:hint (ex-message error)
:data edata}}))))
(defmethod handle-exception java.io.IOException
[cause _ _]
(l/wrn :hint "io exception" :cause cause)
{::yres/status 500
::yres/body {:type :server-error
{::rres/status 500
::rres/body {:type :server-error
:code :io-exception
:hint (ex-message cause)}})
@@ -249,4 +244,4 @@
(defn handle'
[cause request]
(::yres/body (handle cause request)))
(::rres/body (handle cause request)))

View File

@@ -1,240 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.http.management
"Internal mangement HTTP API"
(:require
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.schema.generators :as sg]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.http.middleware :as mw]
[app.main :as-alias main]
[app.rpc.commands.profile :as cmd.profile]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.worker :as-alias wrk]
[integrant.core :as ig]
[yetti.response :as-alias yres]))
;; ---- ROUTES
(declare ^:private authenticate)
(declare ^:private get-customer)
(declare ^:private update-customer)
(defmethod ig/assert-key ::routes
[_ params]
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
(def ^:private default-system
{:name ::default-system
:compile
(fn [_ _]
(fn [handler cfg]
(fn [request]
(handler cfg request))))})
(def ^:private transaction
{:name ::transaction
:compile
(fn [data _]
(when (:transaction data)
(fn [handler]
(fn [cfg request]
(db/tx-run! cfg handler request)))))})
(defmethod ig/init-key ::routes
[_ {:keys [::setup/props] :as cfg}]
(let [management-key (or (cf/get :management-api-key)
(get props :management-key))]
["" {:middleware [[mw/shared-key-auth management-key]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]]))
;; ---- HELPERS
(defn- coercer
[schema & {:as opts}]
(let [decode-fn (sm/decoder schema sm/json-transformer)
check-fn (sm/check-fn schema opts)]
(fn [data]
(-> data decode-fn check-fn))))
;; ---- API: AUTHENTICATE
(defn- authenticate
[cfg request]
(let [token (-> request :params :token)
result (tokens/verify cfg {:token token :iss "authentication"})]
{::yres/status 200
::yres/body result}))
;; ---- API: GET-CUSTOMER
(def ^:private schema:get-customer
[:map [:id ::sm/uuid]])
(def ^:private coerce-get-customer-params
(coercer schema:get-customer
:type :validation
:hint "invalid data provided for `get-customer` rpc call"))
(def ^:private sql:get-customer-slots
"WITH teams AS (
SELECT tpr.team_id AS id,
tpr.profile_id AS profile_id
FROM team_profile_rel AS tpr
WHERE tpr.is_owner IS true
AND tpr.profile_id = ?
), teams_with_slots AS (
SELECT tpr.team_id AS id,
count(*) AS total
FROM team_profile_rel AS tpr
WHERE tpr.team_id IN (SELECT id FROM teams)
AND tpr.can_edit IS true
GROUP BY 1
ORDER BY 2
)
SELECT max(total) AS total FROM teams_with_slots;")
(defn- get-customer-slots
[cfg profile-id]
(let [result (db/exec-one! cfg [sql:get-customer-slots profile-id])]
(:total result)))
(defn- get-customer
[cfg request]
(let [profile-id (-> request :params coerce-get-customer-params :id)
profile (cmd.profile/get-profile cfg profile-id)
result {:id (get profile :id)
:name (get profile :fullname)
:email (get profile :email)
:num-editors (get-customer-slots cfg profile-id)
:subscription (-> profile :props :subscription)}]
{::yres/status 200
::yres/body result}))
;; ---- API: UPDATE-CUSTOMER
(def ^:private schema:timestamp
(sm/type-schema
{:type ::timestamp
:pred ct/inst?
:type-properties
{:title "inst"
:description "The same as :app.common.time/inst but encodes to epoch"
:error/message "should be an instant"
:gen/gen (->> (sg/small-int)
(sg/fmap (fn [v] (ct/inst v))))
:decode/string ct/inst
:encode/string inst-ms
:decode/json ct/inst
:encode/json inst-ms}}))
(def ^:private schema:subscription
[:map {:title "Subscription"}
[:id ::sm/text]
[:customer-id ::sm/text]
[:type [:enum
"unlimited"
"professional"
"enterprise"]]
[:status [:enum
"active"
"canceled"
"incomplete"
"incomplete_expired"
"past_due"
"paused"
"trialing"
"unpaid"]]
[:billing-period [:enum
"month"
"day"
"week"
"year"]]
[:quantity :int]
[:description [:maybe ::sm/text]]
[:created-at schema:timestamp]
[:start-date [:maybe schema:timestamp]]
[:ended-at [:maybe schema:timestamp]]
[:trial-end [:maybe schema:timestamp]]
[:trial-start [:maybe schema:timestamp]]
[:cancel-at [:maybe schema:timestamp]]
[:canceled-at [:maybe schema:timestamp]]
[:current-period-end [:maybe schema:timestamp]]
[:current-period-start [:maybe schema:timestamp]]
[:cancel-at-period-end :boolean]
[:cancellation-details
[:map {:title "CancellationDetails"}
[:comment [:maybe ::sm/text]]
[:reason [:maybe ::sm/text]]
[:feedback [:maybe
[:enum
"customer_service"
"low_quality"
"missing_feature"
"other"
"switched_service"
"too_complex"
"too_expensive"
"unused"]]]]]])
(def ^:private schema:update-customer
[:map
[:id ::sm/uuid]
[:subscription [:maybe schema:subscription]]])
(def ^:private coerce-update-customer-params
(coercer schema:update-customer
:type :validation
:hint "invalid data provided for `update-customer` rpc call"))
(defn- update-customer
[cfg request]
(let [{:keys [id subscription]}
(-> request :params coerce-update-customer-params)
{:keys [props] :as profile}
(cmd.profile/get-profile cfg id ::db/for-update true)
props
(assoc props :subscription subscription)]
(l/dbg :hint "update customer"
:profile-id (str id)
:subscription-type (get subscription :type)
:subscription-status (get subscription :status)
:subscription-quantity (get subscription :quantity))
(db/update! cfg :profile
{:props (db/tjson props)}
{:id id}
{::db/return-keys false})
{::yres/status 201
::yres/body nil}))

View File

@@ -12,16 +12,13 @@
[app.common.schema :as-alias sm]
[app.common.transit :as t]
[app.config :as cf]
[app.http :as-alias http]
[app.http.errors :as errors]
[app.tokens :as tokens]
[app.util.pointer-map :as pmap]
[buddy.core.codecs :as bc]
[cuerdas.core :as str]
[ring.request :as rreq]
[ring.response :as rres]
[yetti.adapter :as yt]
[yetti.middleware :as ymw]
[yetti.request :as yreq]
[yetti.response :as yres])
[yetti.middleware :as ymw])
(:import
io.undertow.server.RequestTooBigException
java.io.InputStream
@@ -40,17 +37,17 @@
(defn- get-reader
^java.io.BufferedReader
[request]
(let [^InputStream body (yreq/body request)]
(let [^InputStream body (rreq/body request)]
(java.io.BufferedReader.
(java.io.InputStreamReader. body))))
(defn wrap-parse-request
[handler]
(letfn [(process-request [request]
(let [header (yreq/get-header request "content-type")]
(let [header (rreq/get-header request "content-type")]
(cond
(str/starts-with? header "application/transit+json")
(with-open [^InputStream is (yreq/body request)]
(with-open [^InputStream is (rreq/body request)]
(let [params (t/read! (t/reader is))]
(-> request
(assoc :body-params params)
@@ -88,7 +85,7 @@
(errors/handle cause request)))]
(fn [request]
(if (= (yreq/method request) :post)
(if (= (rreq/method request) :post)
(try
(-> request process-request handler)
(catch Throwable cause
@@ -116,53 +113,57 @@
(defn wrap-format-response
[handler]
(letfn [(transit-streamable-body [data opts _ output-stream]
(try
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
(let [tw (t/writer bos opts)]
(t/write! tw data)))
(catch java.io.IOException _)
(catch Throwable cause
(binding [l/*context* {:value data}]
(l/error :hint "unexpected error on encoding response"
:cause cause)))
(finally
(.close ^OutputStream output-stream))))
(letfn [(transit-streamable-body [data opts]
(reify rres/StreamableResponseBody
(-write-body-to-stream [_ _ output-stream]
(try
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
(let [tw (t/writer bos opts)]
(t/write! tw data)))
(catch java.io.IOException _)
(catch Throwable cause
(binding [l/*context* {:value data}]
(l/error :hint "unexpected error on encoding response"
:cause cause)))
(finally
(.close ^OutputStream output-stream))))))
(json-streamable-body [data _ output-stream]
(try
(let [encode (or (-> data meta :encode/json) identity)
data (encode data)]
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
(with-open [^java.io.OutputStreamWriter writer (java.io.OutputStreamWriter. bos)]
(json/write writer data :key-fn json/write-camel-key :value-fn write-json-value))))
(catch java.io.IOException _)
(catch Throwable cause
(binding [l/*context* {:value data}]
(l/error :hint "unexpected error on encoding response"
:cause cause)))
(finally
(.close ^OutputStream output-stream))))
(json-streamable-body [data]
(reify rres/StreamableResponseBody
(-write-body-to-stream [_ _ output-stream]
(try
(let [encode (or (-> data meta :encode/json) identity)
data (encode data)]
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
(with-open [^java.io.OutputStreamWriter writer (java.io.OutputStreamWriter. bos)]
(json/write writer data :key-fn json/write-camel-key :value-fn write-json-value))))
(catch java.io.IOException _)
(catch Throwable cause
(binding [l/*context* {:value data}]
(l/error :hint "unexpected error on encoding response"
:cause cause)))
(finally
(.close ^OutputStream output-stream))))))
(format-response-with-json [response _]
(let [body (::yres/body response)]
(let [body (::rres/body response)]
(if (or (boolean? body) (coll? body))
(-> response
(update ::yres/headers assoc "content-type" "application/json")
(assoc ::yres/body (yres/stream-body (partial json-streamable-body body))))
(update ::rres/headers assoc "content-type" "application/json")
(assoc ::rres/body (json-streamable-body body)))
response)))
(format-response-with-transit [response request]
(let [body (::yres/body response)]
(let [body (::rres/body response)]
(if (or (boolean? body) (coll? body))
(let [qs (yreq/query request)
(let [qs (rreq/query request)
opts (if (or (contains? cf/flags :transit-readable-response)
(str/includes? qs "transit_verbose"))
{:type :json-verbose}
{:type :json})]
(-> response
(update ::yres/headers assoc "content-type" "application/transit+json")
(assoc ::yres/body (yres/stream-body (partial transit-streamable-body body opts)))))
(update ::rres/headers assoc "content-type" "application/transit+json")
(assoc ::rres/body (transit-streamable-body body opts))))
response)))
(format-from-params [{:keys [query-params] :as request}]
@@ -171,7 +172,7 @@
(format-response [response request]
(let [accept (or (format-from-params request)
(yreq/get-header request "accept"))]
(rreq/get-header request "accept"))]
(cond
(or (= accept "application/transit+json")
(str/includes? accept "application/transit+json"))
@@ -220,11 +221,11 @@
(defn wrap-cors
[handler]
(fn [request]
(let [response (if (= (yreq/method request) :options)
{::yres/status 200}
(let [response (if (= (rreq/method request) :options)
{::rres/status 200}
(handler request))
origin (yreq/get-header request "origin")]
(update response ::yres/headers with-cors-headers origin))))
origin (rreq/get-header request "origin")]
(update response ::rres/headers with-cors-headers origin))))
(def cors
{:name ::cors
@@ -239,81 +240,7 @@
(when-let [allowed (:allowed-methods data)]
(fn [handler]
(fn [request]
(let [method (yreq/method request)]
(let [method (rreq/method request)]
(if (contains? allowed method)
(handler request)
{::yres/status 405}))))))})
(defn- wrap-auth
[handler decoders]
(let [token-re
#"(?i)^(Token|Bearer)\s+(.*)"
get-token-from-authorization
(fn [request]
(when-let [[_ token-type token] (some->> (yreq/get-header request "authorization")
(re-matches token-re))]
(if (= "token" (str/lower token-type))
{:type :token
:token token}
{:type :bearer
:token token})))
get-token-from-cookie
(fn [request]
(let [cname (cf/get :auth-token-cookie-name)
token (some-> (yreq/get-cookie request cname) :value)]
(when-not (str/empty? token)
{:type :cookie
:token token})))
get-token
(some-fn get-token-from-cookie get-token-from-authorization)
process-request
(fn [request]
(if-let [{:keys [type token] :as auth} (get-token request)]
(let [decode-fn (get decoders type)]
(if (or (= type :cookie) (= type :bearer))
(let [metadata (tokens/decode-header token)]
;; NOTE: we only proceed to decode claims on new
;; cookie tokens. The old cookies dont need to be
;; decoded because they use the token string as ID
(if (and (= (:kid metadata) 1)
(= (:ver metadata) 1)
(some? decode-fn))
(assoc request ::http/auth-data (assoc auth
:claims (decode-fn token)
:metadata metadata))
(assoc request ::http/auth-data (assoc auth :metadata {:ver 0}))))
(if decode-fn
(assoc request ::http/auth-data (assoc auth :claims (decode-fn token)))
(assoc request ::http/auth-data auth))))
request))]
(fn [request]
(-> request process-request handler))))
(def auth
{:name ::auth
:compile (constantly wrap-auth)})
(defn- wrap-shared-key-auth
[handler shared-key]
(if shared-key
(let [shared-key (if (string? shared-key)
shared-key
(bc/bytes->b64-str shared-key true))]
(fn [request]
(let [key (yreq/get-header request "x-shared-key")]
(if (= key shared-key)
(handler (assoc request ::http/auth-with-shared-key true))
{::yres/status 403}))))
(fn [_ _]
{::yres/status 403})))
(def shared-key-auth
{:name ::shared-key-auth
:compile (constantly wrap-shared-key-auth)})
{::rres/status 405}))))))})

View File

@@ -1,55 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.http.security
"Additional security layer middlewares"
(:require
[app.config :as cf]
[yetti.request :as yreq]
[yetti.response :as yres]))
(def ^:private safe-methods
#{:get :head :options})
(defn- wrap-sec-fetch-metadata
"Sec-Fetch metadata security layer middleware"
[handler]
(fn [request]
(let [site (yreq/get-header request "sec-fetch-site")]
(cond
(= site "same-origin")
(handler request)
(or (= site "same-site")
(= site "cross-site"))
(if (contains? safe-methods (yreq/method request))
(handler request)
{::yres/status 403})
:else
(handler request)))))
(def sec-fetch-metadata
{:name ::sec-fetch-metadata
:compile (fn [_ _]
(when (contains? cf/flags :sec-fetch-metadata-middleware)
wrap-sec-fetch-metadata))})
(defn- wrap-client-header-check
"Check for a penpot custom header to be present as additional CSRF
protection"
[handler]
(fn [request]
(let [client (yreq/get-header request "x-client")]
(if (some? client)
(handler request)
{::yres/status 403}))))
(def client-header-check
{:name ::client-header-check
:compile (fn [_ _]
(when (contains? cf/flags :client-header-check-middleware)
wrap-client-header-check))})

View File

@@ -9,136 +9,116 @@
(:require
[app.common.data :as d]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.common.spec :as us]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.http :as-alias http]
[app.http.auth :as-alias http.auth]
[app.http.session.tasks :as-alias tasks]
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
[yetti.request :as yreq]
[yetti.response :as yres]))
[ring.request :as rreq]
[yetti.request :as yrq]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; A default cookie name for storing the session.
(def default-auth-token-cookie-name "auth-token")
;; A cookie that we can use to check from other sites of the same
;; domain if a user is authenticated.
(def default-auth-data-cookie-name "auth-data")
;; Default value for cookie max-age
(def default-cookie-max-age (ct/duration {:days 7}))
(def default-cookie-max-age (dt/duration {:days 7}))
;; Default age for automatic session renewal
(def default-renewal-max-age (ct/duration {:hours 6}))
(def default-renewal-max-age (dt/duration {:hours 6}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PROTOCOLS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defprotocol ISessionManager
(read-session [_ id])
(create-session [_ params])
(update-session [_ session])
(delete-session [_ id]))
(read [_ key])
(write! [_ key data])
(update! [_ data])
(delete! [_ key]))
(defn manager?
[o]
(satisfies? ISessionManager o))
(sm/register!
{:type ::manager
:pred manager?})
(s/def ::manager #(satisfies? ISessionManager %))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; STORAGE IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:params
[:map {:title "SessionParams" :closed true}
[:profile-id ::sm/uuid]
[:user-agent {:optional true} ::sm/text]
[:sso-provider-id {:optional true} ::sm/uuid]
[:sso-session-id {:optional true} :string]])
(s/def ::session-params
(s/keys :req-un [::user-agent
::profile-id
::created-at]))
(def ^:private valid-params?
(sm/validator schema:params))
(defn- prepare-session-params
[key params]
(us/assert! ::us/not-empty-string key)
(us/assert! ::session-params params)
{:user-agent (:user-agent params)
:profile-id (:profile-id params)
:created-at (:created-at params)
:updated-at (:created-at params)
:id key})
(defn- database-manager
[pool]
(reify ISessionManager
(read-session [_ id]
(if (string? id)
;; Backward compatibility
(let [session (db/exec-one! pool (sql/select :http-session {:id id}))]
(-> session
(assoc :modified-at (:updated-at session))
(dissoc :updated-at)))
(db/exec-one! pool (sql/select :http-session-v2 {:id id}))))
(read [_ token]
(db/exec-one! pool (sql/select :http-session {:id token})))
(create-session [_ params]
(assert (valid-params? params) "expect valid session params")
(write! [_ key params]
(let [params (prepare-session-params key params)]
(db/insert! pool :http-session params)
params))
(let [now (ct/now)
params (-> params
(assoc :id (uuid/next))
(assoc :created-at now)
(assoc :modified-at now))]
(db/insert! pool :http-session-v2 params
{::db/return-keys true})))
(update! [_ params]
(let [updated-at (dt/now)]
(db/update! pool :http-session
{:updated-at updated-at}
{:id (:id params)})
(assoc params :updated-at updated-at)))
(update-session [_ session]
(let [modified-at (ct/now)]
(if (string? (:id session))
(db/insert! pool :http-session-v2
(-> session
(assoc :id (uuid/next))
(assoc :created-at modified-at)
(assoc :modified-at modified-at)))
(db/update! pool :http-session-v2
{:modified-at modified-at}
{:id (:id session)}
{::db/return-keys true}))))
(delete-session [_ id]
(if (string? id)
(db/delete! pool :http-session {:id id} {::db/return-keys false})
(db/delete! pool :http-session-v2 {:id id} {::db/return-keys false}))
(delete! [_ token]
(db/delete! pool :http-session {:id token})
nil)))
(defn inmemory-manager
[]
(let [cache (atom {})]
(reify ISessionManager
(read-session [_ id]
(get @cache id))
(read [_ token]
(get @cache token))
(create-session [_ params]
(assert (valid-params? params) "expect valid session params")
(write! [_ key params]
(let [params (prepare-session-params key params)]
(swap! cache assoc key params)
params))
(let [now (ct/now)
session (-> params
(assoc :id (uuid/next))
(assoc :created-at now)
(assoc :modified-at now))]
(swap! cache assoc (:id session) session)
session))
(update! [_ params]
(let [updated-at (dt/now)]
(swap! cache update (:id params) assoc :updated-at updated-at)
(assoc params :updated-at updated-at)))
(update-session [_ session]
(let [modified-at (ct/now)]
(swap! cache update (:id session) assoc :modified-at modified-at)
(assoc session :modified-at modified-at)))
(delete-session [_ id]
(swap! cache dissoc id)
(delete! [_ token]
(swap! cache dissoc token)
nil))))
(defmethod ig/assert-key ::manager
[_ params]
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
(defmethod ig/pre-init-spec ::manager [_]
(s/keys :req [::db/pool]))
(defmethod ig/init-key ::manager
[_ {:keys [::db/pool]}]
@@ -153,116 +133,109 @@
;; MANAGER IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare ^:private assign-session-cookie)
(declare ^:private clear-session-cookie)
(defn- assign-token
[cfg session]
(let [claims {:iss "authentication"
:aud "penpot"
:sid (:id session)
:iat (:modified-at session)
:uid (:profile-id session)
:sso-provider-id (:sso-provider-id session)
:sso-session-id (:sso-session-id session)}
header {:kid 1 :ver 1}
token (tokens/generate cfg claims header)]
(assoc session :token token)))
(declare ^:private assign-auth-token-cookie)
(declare ^:private assign-auth-data-cookie)
(declare ^:private clear-auth-token-cookie)
(declare ^:private clear-auth-data-cookie)
(declare ^:private gen-token)
(defn create-fn
[{:keys [::manager] :as cfg} {profile-id :id :as profile}
& {:keys [sso-provider-id sso-session-id]}]
(assert (manager? manager) "expected valid session manager")
(assert (uuid? profile-id) "expected valid uuid for profile-id")
[{:keys [::manager ::setup/props]} profile-id]
(us/assert! ::manager manager)
(us/assert! ::us/uuid profile-id)
(fn [request response]
(let [uagent (yreq/get-header request "user-agent")
session (->> {:user-agent uagent
:profile-id profile-id
:sso-provider-id sso-provider-id
:sso-session-id sso-session-id}
(d/without-nils)
(create-session manager)
(assign-token cfg))]
(l/trc :hint "create" :id (str (:id session)) :profile-id (str profile-id))
(assign-session-cookie response session))))
(let [uagent (rreq/get-header request "user-agent")
params {:profile-id profile-id
:user-agent uagent
:created-at (dt/now)}
token (gen-token props params)
session (write! manager token params)]
(l/trace :hint "create" :profile-id (str profile-id))
(-> response
(assign-auth-token-cookie session)
(assign-auth-data-cookie session)))))
(defn delete-fn
[{:keys [::manager]}]
(assert (manager? manager) "expected valid session manager")
(us/assert! ::manager manager)
(fn [request response]
(some->> (get request ::id) (delete-session manager))
(clear-session-cookie response)))
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (yrq/get-cookie request cname)]
(l/trace :hint "delete" :profile-id (:profile-id request))
(some->> (:value cookie) (delete! manager))
(-> response
(assoc :status 204)
(assoc :body nil)
(clear-auth-token-cookie)
(clear-auth-data-cookie)))))
(defn decode-token
[cfg token]
(try
(tokens/verify cfg {:token token :iss "authentication"})
(catch Throwable cause
(l/trc :hint "exception on decoding token"
:token token
:cause cause))))
(defn- gen-token
[props {:keys [profile-id created-at]}]
(tokens/generate props {:iss "authentication"
:iat created-at
:uid profile-id}))
(defn- decode-token
[props token]
(when token
(tokens/verify props {:token token :iss "authentication"})))
(defn get-session
(defn- get-token
[request]
(get request ::session))
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (some-> (yrq/get-cookie request cname) :value)]
(when-not (str/empty? cookie)
cookie)))
(defn invalidate-others
[cfg session]
(let [sql "delete from http_session_v2 where profile_id = ? and id != ?"]
(-> (db/exec-one! cfg [sql (:profile-id session) (:id session)])
(db/get-update-count))))
(defn- get-session
[manager token]
(some->> token (read manager)))
(defn- renew-session?
[{:keys [id modified-at] :as session}]
(or (string? id)
(and (ct/inst? modified-at)
(let [elapsed (ct/diff modified-at (ct/now))]
(neg? (compare default-renewal-max-age elapsed))))))
[{:keys [updated-at] :as session}]
(and (dt/instant? updated-at)
(let [elapsed (dt/diff updated-at (dt/now))]
(neg? (compare default-renewal-max-age elapsed)))))
(defn- wrap-soft-auth
[handler {:keys [::manager ::setup/props]}]
(us/assert! ::manager manager)
(letfn [(handle-request [request]
(try
(let [token (get-token request)
claims (decode-token props token)]
(cond-> request
(map? claims)
(-> (assoc ::token-claims claims)
(assoc ::token token))))
(catch Throwable cause
(l/trace :hint "exception on decoding malformed token" :cause cause)
request)))]
(fn [request]
(handler (handle-request request)))))
(defn- wrap-authz
[handler {:keys [::manager] :as cfg}]
(assert (manager? manager) "expected valid session manager")
[handler {:keys [::manager]}]
(us/assert! ::manager manager)
(fn [request]
(let [{:keys [type token claims metadata]} (get request ::http/auth-data)]
(cond
(= type :cookie)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
(let [session (get-session manager (::token request))
request (cond-> request
(some? session)
(assoc ::profile-id (:profile-id session)
::id (:id session)))
response (handler request)]
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))
(if (renew-session? session)
(let [session (update! manager session)]
(-> response
(assign-auth-token-cookie session)
(assign-auth-data-cookie session)))
response))))
response (handler request)]
(if (and session (renew-session? session))
(let [session (->> session
(update-session manager)
(assign-token cfg))]
(assign-session-cookie response session))
response))
(= type :bearer)
(let [session (case (:ver metadata)
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
0 (read-session manager token)
1 (some->> (:sid claims) (read-session manager))
nil)
request (cond-> request
(some? session)
(-> (assoc ::profile-id (:profile-id session))
(assoc ::session session)))]
(handler request))
:else
(handler request)))))
(def soft-auth
{:name ::soft-auth
:compile (constantly wrap-soft-auth)})
(def authz
{:name ::authz
@@ -270,17 +243,17 @@
;; --- IMPL
(defn- assign-session-cookie
[response {token :token modified-at :modified-at}]
(defn- assign-auth-token-cookie
[response {token :id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
created-at modified-at
renewal (ct/plus created-at default-renewal-max-age)
expires (ct/plus created-at max-age)
created-at (or updated-at (dt/now))
renewal (dt/plus created-at default-renewal-max-age)
expires (dt/plus created-at max-age)
secure? (contains? cf/flags :secure-session-cookies)
strict? (contains? cf/flags :strict-session-cookies)
cors? (contains? cf/flags :cors)
name (cf/get :auth-token-cookie-name)
comment (str "Renewal at: " (ct/format-inst renewal :rfc1123))
name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
cookie {:path "/"
:http-only true
:expires expires
@@ -288,46 +261,81 @@
:comment comment
:same-site (if cors? :none (if strict? :strict :lax))
:secure secure?}]
(update response ::yres/cookies assoc name cookie)))
(update response :cookies assoc name cookie)))
(defn- clear-session-cookie
(defn- assign-auth-data-cookie
[response {profile-id :profile-id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
domain (cf/get :auth-data-cookie-domain)
cname default-auth-data-cookie-name
created-at (or updated-at (dt/now))
renewal (dt/plus created-at default-renewal-max-age)
expires (dt/plus created-at max-age)
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
secure? (contains? cf/flags :secure-session-cookies)
strict? (contains? cf/flags :strict-session-cookies)
cors? (contains? cf/flags :cors)
cookie {:domain domain
:expires expires
:path "/"
:comment comment
:value (u/map->query-string {:profile-id profile-id})
:same-site (if cors? :none (if strict? :strict :lax))
:secure secure?}]
(cond-> response
(string? domain)
(update :cookies assoc cname cookie))))
(defn- clear-auth-token-cookie
[response]
(let [cname (cf/get :auth-token-cookie-name)]
(update response ::yres/cookies assoc cname {:path "/" :value "" :max-age 0})))
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
(update response :cookies assoc cname {:path "/" :value "" :max-age 0})))
(defn- clear-auth-data-cookie
[response]
(let [cname default-auth-data-cookie-name
domain (cf/get :auth-data-cookie-domain)]
(cond-> response
(string? domain)
(update :cookies assoc cname {:domain domain :path "/" :value "" :max-age 0}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TASK: SESSION GC
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmethod ig/assert-key ::tasks/gc
[_ params]
(assert (db/pool? (::db/pool params)) "expected valid database pool")
(assert (ct/duration? (::tasks/max-age params))))
(s/def ::tasks/max-age ::dt/duration)
(defmethod ig/expand-key ::tasks/gc
[k v]
(defmethod ig/pre-init-spec ::tasks/gc [_]
(s/keys :req [::db/pool]
:opt [::tasks/max-age]))
(defmethod ig/prep-key ::tasks/gc
[_ cfg]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)]
{k (merge {::tasks/max-age max-age} (d/without-nils v))}))
(merge {::tasks/max-age max-age} (d/without-nils cfg))))
(def ^:private
sql:delete-expired
"DELETE FROM http_session
WHERE updated_at < ?::timestamptz
"delete from http_session
where updated_at < now() - ?::interval
or (updated_at is null and
created_at < ?::timestamptz)")
(defn- collect-expired-tasks
[{:keys [::db/conn ::tasks/max-age]}]
(let [threshold (ct/minus (ct/now) max-age)
result (-> (db/exec-one! conn [sql:delete-expired threshold threshold])
(db/get-update-count))]
(l/dbg :task "gc"
:hint "clean http sessions"
:deleted result)
result))
created_at < now() - ?::interval)")
(defmethod ig/init-key ::tasks/gc
[_ {:keys [::tasks/max-age] :as cfg}]
(l/dbg :hint "initializing session gc task" :max-age max-age)
[_ {:keys [::db/pool ::tasks/max-age] :as cfg}]
(l/debug :hint "initializing session gc task" :max-age max-age)
(fn [_]
(db/tx-run! cfg collect-expired-tasks)))
(db/with-atomic [conn pool]
(let [interval (db/interval max-age)
result (db/exec-one! conn [sql:delete-expired interval interval])
result (:next.jdbc/update-count result)]
(l/debug :task "gc"
:hint "clean http sessions"
:deleted result)
result))))

Some files were not shown because too many files have changed in this diff Show More