Compare commits

..

14 Commits

Author SHA1 Message Date
Andrey Antukh
a124812f21 WIP 2025-08-13 11:47:11 +02:00
Andrey Antukh
f75b7ea284 WIP 2025-08-13 11:38:12 +02:00
Andrey Antukh
3f99b1b626 WIP 2025-08-13 11:33:50 +02:00
Andrey Antukh
b2abd308ca WIP 2025-08-13 11:32:30 +02:00
Andrey Antukh
a4833f95b5 WIP 2025-08-13 11:26:41 +02:00
Andrey Antukh
7e9c8e8f01 WIP 2025-08-13 11:24:06 +02:00
Andrey Antukh
38066c73ee WIP 2025-08-13 09:49:06 +02:00
Andrey Antukh
2f0045e835 WIP 2025-08-13 09:49:06 +02:00
Andrey Antukh
3a0870690b WIP 2025-08-13 09:49:06 +02:00
Andrey Antukh
872b8fec85 WIP 2025-08-13 09:49:06 +02:00
Andrey Antukh
cb0d409ebf WIP 2025-08-13 09:49:06 +02:00
Andrey Antukh
a774387011 WIP 2025-08-13 09:49:06 +02:00
Andrey Antukh
07af88f33b WIP 2025-08-13 09:49:06 +02:00
Andrey Antukh
cc02a4732e 🚧 Refactor file storage
Make it more scallable and make it easily extensible
2025-08-13 09:49:06 +02:00
1257 changed files with 68138 additions and 127420 deletions

View File

@@ -226,29 +226,14 @@ jobs:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
# Build frontend
- run:
name: "frontend build"
name: "integration tests"
working_directory: "./frontend"
command: |
yarn install
yarn run build:app:assets
yarn run build:app
yarn run build:app:libs
# Build the wasm bundle
- run:
name: "wasm build"
working_directory: "./render-wasm"
command: |
EMSDK_QUIET=1 . /opt/emsdk/emsdk_env.sh
./build release
# Run integration tests
- run:
name: "integration tests"
working_directory: "./frontend"
command: |
yarn run playwright install chromium
yarn run test:e2e -x --workers=4
@@ -326,16 +311,24 @@ jobs:
workflows:
penpot:
jobs:
- lint
- test-frontend:
requires:
- lint: success
- test-library:
requires:
- test-frontend: success
- lint: success
- test-components:
requires:
- test-frontend: success
- lint: success
- test-integration:
requires:
- test-frontend: success
- lint: success
- test-backend:
@@ -346,6 +339,4 @@ workflows:
requires:
- lint: success
- lint
- test-integration
- test-render-wasm

View File

@@ -13,7 +13,6 @@
- [ ] Add a detailed explanation of how to reproduce the issue and/or verify the fix, if applicable.
- [ ] Include screenshots or videos, if applicable.
- [ ] Add or modify existing integration tests in case of bugs or new features, if applicable.
- [ ] Refactor any modified SCSS files following the refactor guide.
- [ ] Check CI passes successfully.
- [ ] Update the `CHANGES.md` file, referencing the related GitHub issue, if applicable.

View File

@@ -1,93 +0,0 @@
name: Bundles Builder
on:
# Create bundle from manual action
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
build_wasm:
description: 'BUILD_WASM. Valid values: yes, no'
type: string
required: false
default: 'yes'
build_storybook:
description: 'BUILD_STORYBOOK. Valid values: yes, no'
type: string
required: false
default: 'yes'
workflow_call:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
build_wasm:
description: 'BUILD_WASM. Valid values: yes, no'
type: string
required: false
default: 'yes'
build_storybook:
description: 'BUILD_STORYBOOK. Valid values: yes, no'
type: string
required: false
default: 'yes'
jobs:
build-bundle:
name: Build and Upload Penpot Bundle
runs-on: ubuntu-24.04
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.gh_ref }}
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
echo "bundle_version=$(git describe --tags --always)" >> $GITHUB_OUTPUT
- name: Build bundle
env:
BUILD_WASM: ${{ inputs.build_wasm }}
BUILD_STORYBOOK: ${{ inputs.build_storybook }}
run: ./manage.sh build-bundle
- name: Prepare directories for zipping
run: |
mkdir zips
mv bundles penpot
- name: Create zip bundle
run: |
echo "📦 Packaging Penpot bundle..."
zip -r zips/penpot.zip penpot
- name: Upload Penpot bundle to S3
run: |
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_ref }}.zip --metadata bundle-version=${{ steps.vars.outputs.bundle_version }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 📦 *[PENPOT] Error building penpot bundles.*
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
Bundle version: `${{ steps.vars.outputs.bundle_version }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

64
.github/workflows/build-bundles.yml vendored Normal file
View File

@@ -0,0 +1,64 @@
name: Build and Upload Penpot Bundles
on:
# Create bundle from manual action
workflow_dispatch:
workflow_call:
inputs:
gh_ref:
description: 'Name of the branch'
type: string
required: true
jobs:
build-bundles:
name: Build and Upload Penpot Bundles
runs-on: ubuntu-24.04
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.gh_ref }}
- name: Extract some useful variables
id: vars
run: |
echo "commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
echo "gh_branch=${{ github.base_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx for multi-arch build
uses: docker/setup-buildx-action@v3
- name: Run manage.sh build-bundle from host
run: ./manage.sh build-bundle
- name: Prepare directories for zipping
run: |
mkdir zips
mv bundles penpot
- name: Create zip bundles
run: |
echo "📦 Packaging Penpot bundles..."
zip -r zips/penpot.zip penpot
- name: Upload Penpot bundle to S3
run: |
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_branch}}-latest.zip
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.commit_hash }}.zip
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
TEXT: |
❌ *[PENPOT] Error during the execution of the job*
📄 Triggered from ref: `${{ steps.vars.outputs.gh_branch}}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}

View File

@@ -1,21 +1,12 @@
name: _DEVELOP
name: Build and Upload Penpot DEVELOP Bundles
on:
schedule:
- cron: '16 5-20 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "develop"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
build-develop-bundle:
uses: ./.github/workflows/build-bundles.yml
secrets: inherit
with:
gh_ref: "develop"

View File

@@ -1,36 +0,0 @@
name: DevEnv Docker Image Builder
on:
workflow_dispatch:
jobs:
build-and-push:
name: Build and push DevEnv Docker image
environment: release-admins
runs-on: ubuntu-24.04
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.PUB_DOCKER_USERNAME }}
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
- name: Build and push DevEnv Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'penpotapp/devenv'
with:
context: ./docker/devenv/
file: ./docker/devenv/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ env.DOCKER_IMAGE }}:latest
cache-from: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache,mode=max

View File

@@ -1,152 +0,0 @@
name: Docker Images Builder
on:
workflow_dispatch:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
workflow_call:
inputs:
gh_ref:
description: 'Name of the branch or ref'
type: string
required: true
default: 'develop'
jobs:
build-and-push:
name: Build and Push Penpot Docker Images
runs-on: ubuntu-24.04-arm
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.gh_ref }}
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Download Penpot Bundles
id: bundles
env:
FILE_NAME: penpot-${{ steps.vars.outputs.gh_ref }}.zip
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
run: |
tmp=$(aws s3api head-object \
--bucket ${{ secrets.S3_BUCKET }} \
--key "$FILE_NAME" \
--query 'Metadata."bundle-version"' \
--output text)
echo "bundle_version=$tmp" >> $GITHUB_OUTPUT
pushd docker/images
aws s3 cp s3://${{ secrets.S3_BUCKET }}/$FILE_NAME .
unzip $FILE_NAME > /dev/null
mv penpot/backend bundle-backend
mv penpot/frontend bundle-frontend
mv penpot/exporter bundle-exporter
mv penpot/storybook bundle-storybook
popd
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Registry
uses: docker/login-action@v3
with:
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v5
with:
images:
frontend
backend
exporter
storybook
labels: |
bundle_version=${{ steps.bundles.outputs.bundle_version }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'backend'
BUNDLE_PATH: './bundle-backend'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.backend
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Build and push Frontend Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'frontend'
BUNDLE_PATH: './bundle-frontend'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.frontend
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Build and push Exporter Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'exporter'
BUNDLE_PATH: './bundle-exporter'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.exporter
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Build and push Storybook Docker image
uses: docker/build-push-action@v6
env:
DOCKER_IMAGE: 'storybook'
BUNDLE_PATH: './bundle-storybook'
with:
context: ./docker/images/
file: ./docker/images/Dockerfile.storybook
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🐳 *[PENPOT] Error building penpot docker images.*
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
📦 Bundle: `${{ steps.bundles.outputs.bundle_version }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

View File

@@ -1,21 +1,12 @@
name: _STAGING
name: Build and Upload Penpot STAGING Bundles
on:
schedule:
- cron: '36 5-20 * * 1-5'
- cron: '0 5 * * 1-5'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: "staging"
build_wasm: "yes"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
build-staging-bundle:
uses: ./.github/workflows/build-bundles.yml
secrets: inherit
with:
gh_ref: "staging"

View File

@@ -1,30 +0,0 @@
name: _TAG
on:
push:
tags:
- '*'
jobs:
build-bundle:
uses: ./.github/workflows/build-bundle.yml
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}
build_wasm: "no"
build_storybook: "yes"
build-docker:
needs: build-bundle
uses: ./.github/workflows/build-docker.yml
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}
publish-final-tag:
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
needs: build-docker
uses: ./.github/workflows/release.yml
secrets: inherit
with:
gh_ref: ${{ github.ref_name }}

View File

@@ -26,7 +26,7 @@ jobs:
- name: Check Commit Type
uses: gsactions/commit-message-checker@v2
with:
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert).+[^.])$'
pattern: '^(Merge|Revert|:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle):)\s[A-Z].*[^.]$'
flags: 'gm'
error: 'Commit should match CONTRIBUTING.md guideline'
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request

View File

@@ -1,107 +0,0 @@
name: Release Publisher
on:
workflow_dispatch:
inputs:
gh_ref:
description: 'Tag to release'
type: string
required: true
workflow_call:
inputs:
gh_ref:
description: 'Tag to release'
type: string
required: true
permissions:
contents: write
jobs:
release:
environment: release-admins
runs-on: ubuntu-24.04
outputs:
version: ${{ steps.vars.outputs.gh_ref }}
release_notes: ${{ steps.extract_release_notes.outputs.release_notes }}
steps:
- name: Extract some useful variables
id: vars
run: |
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ steps.vars.outputs.gh_ref }}
# --- Publicly release the docker images ---
- name: Login to private registry
uses: docker/login-action@v3
with:
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.PUB_DOCKER_USERNAME }}
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
- name: Publish docker images to DockerHub
env:
TAG: ${{ steps.vars.outputs.gh_ref }}
REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
HUB: ${{ secrets.PUB_DOCKER_HUB }}
run: |
IMAGES=("frontend" "backend" "exporter")
EXTRA_TAGS=("main" "latest")
for image in "${IMAGES[@]}"; do
docker pull "$REGISTRY/$image:$TAG"
docker tag "$REGISTRY/$image:$TAG" "penpotapp/$image:$TAG"
docker push "penpotapp/$image:$TAG"
for tag in "${EXTRA_TAGS[@]}"; do
docker tag "$REGISTRY/$image:$TAG" "penpotapp/$image:$tag"
docker push "penpotapp/$image:$tag"
done
done
# --- Release notes extraction ---
- name: Extract release notes from CHANGES.md
id: extract_release_notes
env:
TAG: ${{ steps.vars.outputs.gh_ref }}
run: |
RELEASE_NOTES=$(awk "/^## $TAG$/{flag=1; next} /^## /{flag=0} flag" CHANGES.md | awk '{$1=$1};1')
if [ -z "$RELEASE_NOTES" ]; then
RELEASE_NOTES="No changes for $TAG according to CHANGES.md"
fi
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# --- Create GitHub release ---
- name: Create GitHub release
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ steps.vars.outputs.gh_ref }}
name: ${{ steps.vars.outputs.gh_ref }}
body: ${{ steps.extract_release_notes.outputs.release_notes }}
- name: Notify Mattermost
if: failure()
uses: mattermost/action-mattermost-notify@master
with:
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
MATTERMOST_CHANNEL: bot-alerts-cicd
TEXT: |
❌ 🚀 *[PENPOT] Error releasing penpot.*
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
@infra

1
.gitignore vendored
View File

@@ -31,7 +31,6 @@
/.clj-kondo/.cache
/_dump
/notes
/playground/
/backend/*.md
/backend/*.sql
/backend/*.txt

2
.nvmrc
View File

@@ -1 +1 @@
v22.19.0
v22.13.1

View File

@@ -1,152 +1,16 @@
# CHANGELOG
## 2.12.0 (Unreleased)
### :boom: Breaking changes & Deprecations
## 2.10.0 (Unreleased)
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- Select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
### :bug: Bugs fixed
- Fix text line-height values are wrong [Taiga #12252](https://tree.taiga.io/project/penpot/issue/12252)
- Fix an error translation [Taiga #12402](https://tree.taiga.io/project/penpot/issue/12402)
- Fix pan cursor not disabling viewport guides [Github #6985](https://github.com/penpot/penpot/issues/6985)
- Fix viewport resize on locked shapes [Taiga #11974](https://tree.taiga.io/project/penpot/issue/11974)
- Fix nested variant in a component doesn't keep inherited overrides [Taiga #12299](https://tree.taiga.io/project/penpot/issue/12299)
- Fix on copy instance inside a components chain touched are missing [Taiga #12371](https://tree.taiga.io/project/penpot/issue/12371)
- Fix problem with multiple selection and shadows [Github #7437](https://github.com/penpot/penpot/issues/7437)
- Fix search shortcut [Taiga #10265](https://tree.taiga.io/project/penpot/issue/10265)
- Fix shortcut conflict in text editor (increase/decrease font size vs word selection)
- Fix problem with plugins generating code for pages different than current one [Taiga #12312](https://tree.taiga.io/project/penpot/issue/12312)
## 2.11.0 (Unreleased)
### :boom: Breaking changes & Deprecations
- Deprecated configuration variables with the prefix `PENPOT_ASSETS_*`, and will be
removed in future versions:
- The `PENPOT_ASSETS_STORAGE_BACKEND` becomes `PENPOT_OBJECTS_STORAGE_BACKEND` and its
values passes from (`assets-fs` or `assets-s3`) to (`fs` or `s3`)
- The `PENPOT_STORAGE_ASSETS_FS_DIRECTORY` becomes `PENPOT_OBJECTS_STORAGE_FS_DIRECTORY`
- The `PENPOT_STORAGE_ASSETS_S3_BUCKET` becomes `PENPOT_OBJECTS_STORAGE_S3_BUCKET`
- The `PENPOT_STORAGE_ASSETS_S3_REGION` becomes `PENPOT_OBJECTS_STORAGE_S3_REGION`
- The `PENPOT_STORAGE_ASSETS_S3_ENDPOINT` becomes `PENPOT_OBJECTS_STORAGE_S3_ENDPOINT`
- The `PENPOT_STORAGE_ASSETS_S3_IO_THREADS` replaced (see below)
- Add `PENPOT_NETTY_IO_THREADS` and `PENPOT_EXECUTOR_THREADS` variables to provide the
control over concurrency of the shared resources used by netty. Penpot uses the netty IO
threads for AWS S3 SDK and Redis/Valkey communication, and the EXEC threads to perform
out of HTTP serving threads tasks such that cache invalidation, S3 response completion,
configuration reloading and many other auxiliar tasks. By default they use a half number
if available cpus with a minumum of 2 for both executors. You should not touch that
variables unless you are know what you are doing.
- Replace the `PENPOT_STORAGE_ASSETS_S3_IO_THREADS` with a more general configuration
`PENPOT_NETTY_IO_THREADS` used to configure a shared netty resources across different
services which use netty internally (redis connection, S3 SDK client). This
configuration is not very commonly used so don't expected real impact on any user.
### :rocket: Epics and highlights
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- New composite token: Typography [Taiga #10200](https://tree.taiga.io/project/penpot/us/10200)
- Show current Penpot version [Taiga #11603](https://tree.taiga.io/project/penpot/us/11603)
- Switch several variant copies at the same time [Taiga #11411](https://tree.taiga.io/project/penpot/us/11411)
- Invitations management improvements [Taiga #3479](https://tree.taiga.io/project/penpot/us/3479)
- Alternative ways of creating variants - Button Viewport [Taiga #11931](https://tree.taiga.io/project/penpot/us/11931)
- Reorder properties for a component [Taiga #10225](https://tree.taiga.io/project/penpot/us/10225)
- File Data storage layout refactor [Github #7345](https://github.com/penpot/penpot/pull/7345)
- Make several queries optimization on comment threads [Github #7506](https://github.com/penpot/penpot/pull/7506)
### :bug: Bugs fixed
- Fix selection problems when devtools open [Taiga #11950](https://tree.taiga.io/project/penpot/issue/11950)
- Fix long font names overlap [Taiga #11844](https://tree.taiga.io/project/penpot/issue/11844)
- Fix paste behavior according to the selected element [Taiga #11979](https://tree.taiga.io/project/penpot/issue/11979)
- Fix problem with export size [Github #7160](https://github.com/penpot/penpot/issues/7160)
- Fix multi level library dependencies [Taiga #12155](https://tree.taiga.io/project/penpot/issue/12155)
- Fix component context menu options order in assets tab [Taiga #11941](https://tree.taiga.io/project/penpot/issue/11941)
- Fix error updating library [Taiga #12218](https://tree.taiga.io/project/penpot/issue/12218)
- Fix restoring a variant in another file makes it overlap the existing variant [Taiga #12049](https://tree.taiga.io/project/penpot/issue/12049)
- Fix auto-width changes to fixed when switching variants [Taiga #12172](https://tree.taiga.io/project/penpot/issue/12172)
- Fix component number has no singular translation string [Taiga #12106](https://tree.taiga.io/project/penpot/issue/12106)
- Fix adding/removing identical text fills [Taiga #12287](https://tree.taiga.io/project/penpot/issue/12287)
- Fix scroll on the inspect tab [Taiga #12293](https://tree.taiga.io/project/penpot/issue/12293)
- Fix lock proportion tooltip [Taiga #12326](https://tree.taiga.io/project/penpot/issue/12326)
- Fix internal Error when selecting a set by name in the token theme editor [Taiga #12310](https://tree.taiga.io/project/penpot/issue/12310)
- Fix drag & drop functionality is swapping instead or reordering [Taiga #12254](https://tree.taiga.io/project/penpot/issue/12254)
- Fix variants not syncronizing tokens on switch [Taiga #12290](https://tree.taiga.io/project/penpot/issue/12290)
- Fix incorrect behavior of Alt + Drag for variants [Taiga #12309](https://tree.taiga.io/project/penpot/issue/12309)
- Fix text override is lost after switch [Taiga #12269](https://tree.taiga.io/project/penpot/issue/12269)
- Fix exporting a board crashing the app [Taiga #12384](https://tree.taiga.io/project/penpot/issue/12384)
- Fix nested variant in a component doesn't keep inherited overrides [Taiga #12299](https://tree.taiga.io/project/penpot/issue/12299)
- Fix selected colors not showing colors from children shapes in multiple selection [Taiga #12384](https://tree.taiga.io/project/penpot/issue/12385)
- Fix scrollbar issue in design tab [Taiga #12367](https://tree.taiga.io/project/penpot/issue/12367)
- Fix library update notificacions showing when they should not [Taiga #12397](https://tree.taiga.io/project/penpot/issue/12397)
- Fix remove flex button doesnt work within variant [Taiga #12314](https://tree.taiga.io/project/penpot/issue/12314)
- Fix an error translation [Taiga #12402](https://tree.taiga.io/project/penpot/issue/12402)
- Fix problem with certain text input in some editable labels (pages, components, tokens...) being in conflict with the drag/drop functionality [Taiga #12316](https://tree.taiga.io/project/penpot/issue/12316)
- Fix not controlled theme renaming [Taiga #12411](https://tree.taiga.io/project/penpot/issue/12411)
- Fix paste without selection sends the new element in the back [Taiga #12382](https://tree.taiga.io/project/penpot/issue/12382)
- Fix options button does not work for comments created in the lower part of the screen [Taiga #12422](https://tree.taiga.io/project/penpot/issue/12422)
- Fix problem when checking usage with removed teams [Taiga #12442](https://tree.taiga.io/project/penpot/issue/12442)
- Fix focus mode persisting across page/file navigation [Taiga #12469](https://tree.taiga.io/project/penpot/issue/12469)
## 2.10.1
### :sparkles: New features & Enhancements
- Improve workpace file loading [Github 7366](https://github.com/penpot/penpot/pull/7366)
### :bug: Bugs fixed
- Fix regression with text shapes creation with Plugins API [Taiga #12244](https://tree.taiga.io/project/penpot/issue/12244)
## 2.10.0
### :rocket: Epics and highlights
- Variants
### :boom: Breaking changes & Deprecations
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- Add efficiency enhancements to right sidebar [Github #7182](https://github.com/penpot/penpot/pull/7182)
- Add defaults for artboard drawing [Taiga #494](https://tree.taiga.io/project/penpot/us/494?milestone=465047)
- Continuous display of distances between elements when moving a layer with the keyboard [Taiga #1780](https://tree.taiga.io/project/penpot/us/1780)
- New Number token - unitless values [Taiga #10936](https://tree.taiga.io/project/penpot/us/10936)
- New font-family token [Taiga #10937](https://tree.taiga.io/project/penpot/us/10937)
- New text case token [Taiga #10942](https://tree.taiga.io/project/penpot/us/10942)
- New text-decoration token [Taiga #10941](https://tree.taiga.io/project/penpot/us/10941)
- New letter spacing token [Taiga #10940](https://tree.taiga.io/project/penpot/us/10940)
- New font weight token [Taiga #10939](https://tree.taiga.io/project/penpot/us/10939)
- Upgrade Node to v22.18.0 [Github #7283](https://github.com/penpot/penpot/pull/7283)
- Upgrade the base docker image for penpot frontend to v1.29.1 [Github #7283](https://github.com/penpot/penpot/pull/7283)
- Create variant from an existing component [Taiga #2088](https://tree.taiga.io/project/penpot/us/2088)
- Create variant from an existing variant [Taiga #8282](https://tree.taiga.io/project/penpot/us/8282)
- Actions over a component with variants [Taiga #10503](https://tree.taiga.io/project/penpot/us/10503)
- Create a variant by dragging a component into a component with variants [Taiga #8134](https://tree.taiga.io/project/penpot/us/8134)
- Transform a variant into an individual component [Taiga #8141](https://tree.taiga.io/project/penpot/us/8141)
- Delete variant [Taiga #6890](https://tree.taiga.io/project/penpot/us/6890)
- Restore an orphaned copy of a variant [Taiga #10446](https://tree.taiga.io/project/penpot/us/10446)
- Add, Edit & Delete variant properties name and value [Taiga #6892](https://tree.taiga.io/project/penpot/us/6892)
- Retrieve variants [Taiga #6888](https://tree.taiga.io/project/penpot/us/6888)
- Retrieve variants with nested components [Taiga #10277](https://tree.taiga.io/project/penpot/us/10277)
- Create variants in bulk from existing components [Taiga #7926](https://tree.taiga.io/project/penpot/us/7926)
- Alternative ways of creating variants - Button Design Tab [Taiga #10316](https://tree.taiga.io/project/penpot/us/10316)
- Fix problem with component swapping panel [Taiga #12175](https://tree.taiga.io/project/penpot/issue/12175)
### :bug: Bugs fixed
@@ -159,25 +23,8 @@
- Fix font size/variant not updated when editing a text [Taiga #11552](https://tree.taiga.io/project/penpot/issue/11552)
- Fix issue where Alt + arrow keys shortcut interferes with letter-spacing when moving text layers [Taiga #11552](https://tree.taiga.io/project/penpot/issue/11771)
- Fix consistency issues on how font variants are visualized [Taiga #11499](https://tree.taiga.io/project/penpot/us/11499)
- Fix parsing rx and ry SVG values for rect radius [Taiga #11861](https://tree.taiga.io/project/penpot/issue/11861)
- Fix misleading affordance in saved versions [Taiga #11887](https://tree.taiga.io/project/penpot/issue/11887)
- Fix pasting RTF text crashes penpot [Taiga #11717](https://tree.taiga.io/project/penpot/issue/11717)
- Fix navigation arrows in Libraries & Templates carousel [Taiga #10609](https://tree.taiga.io/project/penpot/issue/10609)
- Fix applying tokens with zero value to size [Taiga #11618](https://tree.taiga.io/project/penpot/issue/11618)
- Fix typo [Taiga #11969](https://tree.taiga.io/project/penpot/issue/11969)
- Fix typo [Taiga #11970](https://tree.taiga.io/project/penpot/issue/11970)
- Fix typos [Taiga #11971](https://tree.taiga.io/project/penpot/issue/11971)
- Fix inconsistent naming for "Flatten" [Taiga #8371](https://tree.taiga.io/project/penpot/issue/8371)
- Layout item tokens should be unapplied when moving out of a layout [Taiga #11012](https://tree.taiga.io/project/penpot/issue/11012)
- Fix incorrect date displayed for support plan [Taiga #11986](https://tree.taiga.io/project/penpot/issue/11986)
- Fix can't import 'borderWidth' type token [#132](https://github.com/tokens-studio/penpot/issues/132)
- Fix moving elements up or down while pressing alt [Taiga Issue #11992](https://tree.taiga.io/project/penpot/issue/11992)
- Fix conflicting shortcuts (remove dec/inc line height and letter spacing) [Taiga #12102](https://tree.taiga.io/project/penpot/issue/12102)
- Fix conflicting shortcuts (remove text-align shortcuts) [Taiga #12047](https://tree.taiga.io/project/penpot/issue/12047)
- Fix export file with empty tokens library [Taiga #12137](https://tree.taiga.io/project/penpot/issue/12137)
- Fix context menu on spacing tokens [Taiga #12141](https://tree.taiga.io/project/penpot/issue/12141)
## 2.9.0
## 2.9.0 (Unreleased)
### :rocket: Epics and highlights
@@ -205,7 +52,6 @@
- Add the ability to show login dialog on profile settings [Github #6871](https://github.com/penpot/penpot/pull/6871)
- Improve the application of tokens with object specific tokens [Taiga #10209](https://tree.taiga.io/project/penpot/us/10209)
- Add info to apply-token event [Taiga #11710](https://tree.taiga.io/project/penpot/task/11710)
- Fix double click on set name input [Taiga #11747](https://tree.taiga.io/project/penpot/issue/11747)
### :bug: Bugs fixed
@@ -254,7 +100,7 @@
**Penpot Library**
The initial prototype is completly reworked to provide a more consistent API
The initial prototype is completly reworked for provide a more consistent API
and to have proper validation and params decoding. All the details can be found
on [its own changelog](library/CHANGES.md)

View File

@@ -77,14 +77,17 @@ Provide your team or organization with a completely owned collaborative design t
### Integrations ###
Penpot offers integration into the development toolchain, thanks to its support for webhooks and an API accessible through access tokens.
### Building Design Systems: design tokens, components and variants ###
Penpot brings design systems to code-minded teams: a single source of truth with native Design Tokens, Components, and Variants for scalable, reusable, and consistent UI across projects and platforms.
### Whats great for design ###
With Penpot you can design libraries to share and reuse; turn design elements into components and tokens to allow reusability and scalability; and build realistic user flows and interactions.
### Design Tokens ###
With Penpots standardized [design tokens](https://penpot.dev/collaboration/design-tokens) format, you can easily reuse and sync tokens across different platforms, workflows, and disciplines.
<br />
<p align="center">
<img src="https://github.com/user-attachments/assets/cce75ad6-f783-473f-8803-da9eb8255fef">
<img src="https://img.plasmic.app/img-optimizer/v1/img?src=https%3A%2F%2Fimg.plasmic.app%2Fimg-optimizer%2Fv1%2Fimg%2F9dd677c36afb477e9666ccd1d3f009ad.png" alt="Open Source" style="width: 65%;">
</p>
<br />

View File

@@ -3,10 +3,10 @@
:deps
{penpot/common {:local/root "../common"}
org.clojure/clojure {:mvn/version "1.12.2"}
org.clojure/clojure {:mvn/version "1.12.1"}
org.clojure/tools.namespace {:mvn/version "1.5.0"}
com.github.luben/zstd-jni {:mvn/version "1.5.7-4"}
com.github.luben/zstd-jni {:mvn/version "1.5.7-3"}
io.prometheus/simpleclient {:mvn/version "0.16.0"}
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
@@ -17,7 +17,7 @@
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
io.lettuce/lettuce-core {:mvn/version "6.8.1.RELEASE"}
io.lettuce/lettuce-core {:mvn/version "6.7.0.RELEASE"}
;; Minimal dependencies required by lettuce, we need to include them
;; explicitly because clojure dependency management does not support
;; yet the BOM format.
@@ -28,30 +28,29 @@
com.google.guava/guava {:mvn/version "33.4.8-jre"}
funcool/yetti
{:git/tag "v11.8"
:git/sha "1d1b33f"
{:git/tag "v11.4"
:git/sha "ce50d42"
:git/url "https://github.com/funcool/yetti.git"
:exclusions [org.slf4j/slf4j-api]}
com.github.seancorfield/next.jdbc
{:mvn/version "1.3.1070"}
{:mvn/version "1.3.1002"}
metosin/reitit-core {:mvn/version "0.9.1"}
nrepl/nrepl {:mvn/version "1.4.0"}
nrepl/nrepl {:mvn/version "1.3.1"}
org.postgresql/postgresql {:mvn/version "42.7.7"}
org.xerial/sqlite-jdbc {:mvn/version "3.50.3.0"}
org.postgresql/postgresql {:mvn/version "42.7.6"}
org.xerial/sqlite-jdbc {:mvn/version "3.49.1.0"}
com.zaxxer/HikariCP {:mvn/version "7.0.2"}
com.zaxxer/HikariCP {:mvn/version "6.3.0"}
io.whitfin/siphash {:mvn/version "2.0.0"}
buddy/buddy-hashers {:mvn/version "2.0.167"}
buddy/buddy-sign {:mvn/version "3.6.1-359"}
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.2"}
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.0"}
org.jsoup/jsoup {:mvn/version "1.21.2"}
org.jsoup/jsoup {:mvn/version "1.20.1"}
org.im4java/im4java
{:git/tag "1.4.0-penpot-2"
:git/sha "e2b3e16"
@@ -61,12 +60,12 @@
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
dawran6/emoji {:mvn/version "0.2.0"}
markdown-clj/markdown-clj {:mvn/version "1.12.4"}
dawran6/emoji {:mvn/version "0.1.5"}
markdown-clj/markdown-clj {:mvn/version "1.12.3"}
;; Pretty Print specs
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
software.amazon.awssdk/s3 {:mvn/version "2.33.10"}}
software.amazon.awssdk/s3 {:mvn/version "2.31.55"}}
:paths ["src" "resources" "target/classes"]
:aliases
@@ -81,14 +80,12 @@
:build
{:extra-deps
{io.github.clojure/tools.build {:mvn/version "0.10.10"}}
{io.github.clojure/tools.build {:git/tag "v0.10.9" :git/sha "e405aac"}}
:ns-default build}
:test
{:main-opts ["-m" "kaocha.runner"]
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
"--sun-misc-unsafe-memory-access=allow"
"--enable-native-access=ALL-UNNAMED"]
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"]
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
:outdated

View File

@@ -6,14 +6,12 @@
(ns user
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.debug :as debug]
[app.common.exceptions :as ex]
[app.common.files.helpers :as cfh]
[app.common.fressian :as fres]
[app.common.geom.matrix :as gmt]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.perf :as perf]
[app.common.pprint :as pp]
@@ -21,21 +19,20 @@
[app.common.schema.desc-js-like :as smdj]
[app.common.schema.desc-native :as smdn]
[app.common.schema.generators :as sg]
[app.common.schema.openapi :as oapi]
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.json :as json]
[app.common.transit :as t]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.main :as main]
[app.srepl.helpers :as h]
[app.srepl.main :refer :all]
[app.srepl.helpers :as srepl.helpers]
[app.srepl.main :as srepl]
[app.util.blob :as blob]
[app.common.time :as ct]
[clj-async-profiler.core :as prof]
[clojure.contrib.humanize :as hum]
[clojure.datafy :refer [datafy]]
[clojure.java.io :as io]
[clojure.pprint :refer [pprint print-table]]
[clojure.repl :refer :all]

View File

@@ -8,41 +8,38 @@
<body>
<p>
<strong>Feedback from:</strong><br />
<span>
<span>Name: </span>
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
</span>
<br />
<span>
<span>Email: </span>
<span>{{profile.email}}</span>
</span>
<br />
<span>
<span>ID: </span>
<span><code>{{profile.id}}</code></span>
</span>
{% if profile %}
<span>
<span>Name: </span>
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
</span>
<br />
<span>
<span>Email: </span>
<span>{{profile.email}}</span>
</span>
<br />
<span>
<span>ID: </span>
<span><code>{{profile.id}}</code></span>
</span>
{% else %}
<span>
<span>Email: </span>
<span>{{profile.email}}</span>
</span>
{% endif %}
</p>
<p>
<strong>Subject:</strong><br />
<span>{{feedback-subject|abbreviate:300}}</span>
<span>{{subject|abbreviate:300}}</span>
</p>
<p>
<strong>Type:</strong><br />
<span>{{feedback-type|abbreviate:300}}</span>
</p>
{% if feedback-error-href %}
<p>
<strong>Error HREF:</strong><br />
<span>{{feedback-error-href|abbreviate:500}}</span>
</p>
{% endif %}
<p>
<strong>Message:</strong><br />
{{feedback-content|linebreaks-br}}
{{content|linebreaks-br|safe}}
</p>
</body>
</html>

View File

@@ -1 +1 @@
[PENPOT FEEDBACK]: {{feedback-subject}}
[PENPOT FEEDBACK]: {{subject}}

View File

@@ -1,10 +1,9 @@
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
Subject: {{feedback-subject}}
Type: {{feedback-type}}
{%- if feedback-error-href %}
HREF: {{feedback-error-href}}
{% endif -%}
{% if profile %}
Feedback profile: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
{% else %}
Feedback from: {{email}}
{% endif %}
Message:
Subject: {{subject}}
{{feedback-content}}
{{content}}

View File

@@ -1 +1 @@
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}
Invitation to join {{team}}

View File

@@ -1,9 +1,6 @@
[{:id "tokens-starter-kit"
:name "Design tokens starter kit"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"}
{:id "penpot-design-system"
:name "Penpot Design System | Pencil"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/penpot-app.penpot"}
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"},
{:id "wireframing-kit"
:name "Wireframe library"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}
@@ -13,6 +10,9 @@
{:id "plants-app"
:name "UI mockup example"
:file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"}
{:id "penpot-design-system"
:name "Design system example"
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Penpot%20-%20Design%20System%20v2.1.penpot"}
{:id "tutorial-for-beginners"
:name "Tutorial for beginners"
:file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"}

View File

@@ -45,41 +45,7 @@ Debug Main Page
</form>
</fieldset>
<fieldset>
<legend>VIRTUAL CLOCK</legend>
<desc>
<p>
CURRENT CLOCK: <b>{{current-clock}}</b>
<br />
CURRENT OFFSET: <b>{{current-offset}}</b>
<br />
CURRENT TIME: <b>{{current-time}}</b>
</p>
<p>Examples: 3h, -7h, 24h (allowed suffixes: h, s)</p>
</desc>
<form method="post" action="/dbg/actions/set-virtual-clock">
<div class="row">
<input type="text" name="offset" placeholder="3h" value="" />
</div>
<div class="row">
<label for="force-verify">Are you sure?</label>
<input id="force-verify" type="checkbox" name="force" />
<br />
<small>
This is a just a security double check for prevent non intentional submits.
</small>
</div>
<div class="row">
<input type="submit" name="submit" value="Submit" />
<input type="submit" name="reset" value="Reset" />
</div>
</form>
</fieldset>
</section>

View File

@@ -1,84 +0,0 @@
#!/usr/bin/env bash
export PENPOT_MANAGEMENT_API_SHARED_KEY=super-secret-management-api-key
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-login-with-ldap \
enable-login-with-password
enable-login-with-oidc \
enable-login-with-google \
enable-login-with-github \
enable-login-with-gitlab \
enable-backend-worker \
enable-backend-asserts \
disable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
enable-audit-log \
enable-transit-readable-response \
enable-demo-users \
enable-user-feedback \
disable-secure-session-cookies \
enable-smtp \
enable-prepl-server \
enable-urepl-server \
enable-rpc-climit \
enable-rpc-rlimit \
enable-quotes \
enable-soft-rpc-rlimit \
enable-auto-file-snapshot \
enable-webhooks \
enable-access-tokens \
disable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation \
enable-redis-cache \
enable-subscriptions";
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"
# Setup default upload media file size to 100MiB
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
# Setup default multipart upload size to 300MiB
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
export PENPOT_USER_FEEDBACK_DESTINATION="support@example.com"
export AWS_ACCESS_KEY_ID=penpot-devenv
export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
export JAVA_OPTS="\
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-Djdk.attach.allowAttachSelf \
-Dlog4j2.configurationFile=log4j2-devenv.xml \
-Djdk.tracePinnedThreads=full \
-Dim4java.useV7=true \
-XX:+UnlockExperimentalVMOptions \
-XX:+UseShenandoahGC \
-XX:+UseCompactObjectHeaders \
-XX:ShenandoahGCMode=generational \
-XX:-OmitStackTraceInFastThrow \
--sun-misc-unsafe-memory-access=allow \
--enable-preview \
--enable-native-access=ALL-UNNAMED";
function setup_minio() {
# Initialize MINIO config
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
if [ "$?" = "1" ]; then
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
fi
mc mb penpot-s3/penpot -p -q
}

View File

@@ -1,13 +1,112 @@
#!/usr/bin/env bash
SCRIPT_DIR=$(dirname $0);
source $SCRIPT_DIR/_env;
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-login-with-ldap \
enable-login-with-password
enable-login-with-oidc \
enable-login-with-google \
enable-login-with-github \
enable-login-with-gitlab \
enable-backend-worker \
enable-backend-asserts \
disable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
enable-audit-log \
enable-transit-readable-response \
enable-demo-users \
disable-secure-session-cookies \
enable-smtp \
enable-prepl-server \
enable-urepl-server \
enable-rpc-climit \
enable-rpc-rlimit \
enable-quotes \
enable-soft-rpc-rlimit \
enable-auto-file-snapshot \
enable-webhooks \
enable-access-tokens \
disable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation \
enable-subscriptions \
disable-subscriptions-old";
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"
# Setup default upload media file size to 100MiB
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
# Setup default multipart upload size to 300MiB
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
# export PENPOT_DATABASE_USERNAME="penpot"
# export PENPOT_DATABASE_PASSWORD="penpot"
# export PENPOT_DATABASE_READONLY=true
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot_pre"
# export PENPOT_DATABASE_USERNAME="penpot_pre"
# export PENPOT_DATABASE_PASSWORD="penpot_pre"
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
# Initialize MINIO config
setup_minio;
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
if [ "$?" = "1" ]; then
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
fi
mc mb penpot-s3/penpot -p -q
export AWS_ACCESS_KEY_ID=penpot-devenv
export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
export PENPOT_OBJECTS_STORAGE_FS_DIRECTORY="assets"
export JAVA_OPTS="\
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-Djdk.attach.allowAttachSelf \
-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
-Djdk.tracePinnedThreads=full \
-Dim4java.useV7=true \
-XX:+EnableDynamicAgentLoading \
-XX:-OmitStackTraceInFastThrow \
-XX:+UnlockDiagnosticVMOptions \
-XX:+DebugNonSafepoints \
--sun-misc-unsafe-memory-access=allow \
--enable-preview \
--enable-native-access=ALL-UNNAMED";
export JAVA_OPTS="$JAVA_OPTS -Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
export OPTIONS="-A:jmx-remote -A:dev"
# Setup HEAP
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
# Increase virtual thread pool size
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
# Disable C2 Compiler
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
# Disable all compilers
# export OPTIONS="$OPTIONS -J-Xint"
# Setup GC
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
# Setup GC
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
export OPTIONS_EVAL="nil"
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"

48
backend/scripts/repl-test Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
source /home/penpot/environ
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
export OPTIONS="
-A:jmx-remote -A:dev \
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-J-Djdk.attach.allowAttachSelf \
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
-J-XX:-OmitStackTraceInFastThrow \
-J-XX:+UnlockDiagnosticVMOptions \
-J-XX:+DebugNonSafepoints \
-J-Djdk.tracePinnedThreads=full \
-J-XX:+UseTransparentHugePages \
-J-XX:ReservedCodeCacheSize=1g \
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
-J--enable-preview";
# Setup HEAP
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
export PENPOT_HTTP_SERVER_IO_THREADS=2
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
# Increase virtual thread pool size
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
# Disable C2 Compiler
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
# Disable all compilers
# export OPTIONS="$OPTIONS -J-Xint"
# Setup GC
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
# Setup GC
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
# Enable ImageMagick v7.x support
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
export OPTIONS_EVAL="nil"
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
set -ex
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main

View File

@@ -1,13 +1,44 @@
#!/usr/bin/env bash
SCRIPT_DIR=$(dirname $0);
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-backend-asserts \
enable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
enable-file-snapshot \
enable-tiered-file-data-storage";
source $SCRIPT_DIR/_env;
export OPTIONS="-A:dev"
export JAVA_OPTS="
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-Djdk.attach.allowAttachSelf \
-Dlog4j2.configurationFile=log4j2-devenv.xml \
-XX:+EnableDynamicAgentLoading \
-XX:-OmitStackTraceInFastThrow \
-XX:+UnlockDiagnosticVMOptions \
-XX:+DebugNonSafepoints";
export CLOJURE_OPTIONS="-A:dev"
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"
# Setup default upload media file size to 100MiB
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
# Setup default multipart upload size to 300MiB
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
export AWS_ACCESS_KEY_ID=penpot-devenv
export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
entrypoint=${1:-app.main};
shift 1;
set -ex
exec clojure $OPTIONS -A:dev -M -m $entrypoint "$@";
clojure $CLOJURE_OPTIONS -A:dev -M -m $entrypoint "$@";

View File

@@ -1,11 +1,70 @@
#!/usr/bin/env bash
SCRIPT_DIR=$(dirname $0);
source $SCRIPT_DIR/_env;
export PENPOT_SECRET_KEY=super-secret-devenv-key
export PENPOT_HOST=devenv
export PENPOT_FLAGS="\
$PENPOT_FLAGS \
enable-prepl-server \
enable-urepl-server \
enable-nrepl-server \
enable-webhooks \
enable-backend-asserts \
enable-audit-log \
enable-login-with-ldap \
enable-transit-readable-response \
enable-demo-users \
disable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
disable-secure-session-cookies \
enable-rpc-climit \
enable-smtp \
enable-quotes \
enable-file-snapshot \
enable-access-tokens \
disable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation \
enable-subscriptions \
disable-subscriptions-old";
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"
# Setup default upload media file size to 100MiB
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
# Setup default multipart upload size to 300MiB
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
# Initialize MINIO config
setup_minio;
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
if [ "$?" = "1" ]; then
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
fi
mc mb penpot-s3/penpot -p -q
export AWS_ACCESS_KEY_ID=penpot-devenv
export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
entrypoint=${1:-app.main};
export JAVA_OPTS="\
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
-Djdk.attach.allowAttachSelf \
-Dlog4j2.configurationFile=log4j2-devenv.xml \
-Djdk.tracePinnedThreads=full \
-Dim4java.useV7=true \
-XX:-OmitStackTraceInFastThrow \
--sun-misc-unsafe-memory-access=allow \
--enable-preview \
--enable-native-access=ALL-UNNAMED";
export OPTIONS="-A:jmx-remote -A:dev"
shift 1;
set -ex
exec clojure -A:jmx-remote -A:dev -M -m app.main "$@";
clojure $OPTIONS -M -m $entrypoint;

View File

@@ -434,10 +434,10 @@
(sm/validator schema:info))
(defn- get-info
[{:keys [::provider] :as cfg} {:keys [params] :as request}]
[{:keys [::provider ::setup/props] :as cfg} {:keys [params] :as request}]
(let [state (get params :state)
code (get params :code)
state (tokens/verify cfg {:token state :iss :oauth})
state (tokens/verify props {:token state :iss :oauth})
tdata (fetch-access-token cfg code)
info (case (cf/get :oidc-user-info-source)
:token (get-user-info cfg tdata)
@@ -516,7 +516,7 @@
:iss :prepared-register
:exp (ct/in-future {:hours 48}))
params {:token (tokens/generate cfg info)
params {:token (tokens/generate (::setup/props cfg) info)
:provider (:provider (:path-params request))
:fullname (:fullname info)}
params (d/without-nils params)]
@@ -569,7 +569,7 @@
:else
(let [sxf (session/create-fn cfg (:id profile))
token (or (:invitation-token info)
(tokens/generate cfg
(tokens/generate (::setup/props cfg)
{:iss :auth
:exp (ct/in-future "15m")
:profile-id (:id profile)}))
@@ -620,7 +620,8 @@
:external-session-id esid
:props props
:exp (ct/in-future "4h")}
state (tokens/generate cfg (d/without-nils params))
state (tokens/generate (::setup/props cfg)
(d/without-nils params))
uri (build-auth-uri cfg state)]
{::yres/status 200
::yres/body {:redirect-uri uri}}))

View File

@@ -19,7 +19,6 @@
[app.common.time :as ct]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.common.weak :as weak]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
@@ -34,7 +33,8 @@
[clojure.set :as set]
[cuerdas.core :as str]
[datoteka.fs :as fs]
[datoteka.io :as io]))
[datoteka.io :as io]
[promesa.exec :as px]))
(set! *warn-on-reflection* true)
@@ -159,7 +159,27 @@
[cfg id & {:as opts}]
(db/get-with-sql cfg [sql:get-minimal-file id] opts))
(def sql:files-with-data
;; DEPRECATED
(defn decode-file
"A general purpose file decoding function that resolves all external
pointers, run migrations and return plain vanilla file map"
[cfg {:keys [id] :as file} & {:keys [migrate?] :or {migrate? true}}]
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)]
(let [file (->> file
(fmigr/resolve-applied-migrations cfg)
(fdata/resolve-file-data cfg)
(fdata/decode-file-data cfg))
libs (delay (get-resolved-file-libraries cfg file))]
(-> file
(update :features db/decode-pgarray #{})
(update :data blob/decode)
(update :data fdata/process-pointers deref)
(update :data fdata/process-objects (partial into {}))
(update :data assoc :id id)
(cond-> migrate? (fmg/migrate-file libs))))))
(def sql:get-file
"SELECT f.id,
f.project_id,
f.created_at,
@@ -176,35 +196,13 @@
f.version,
f.vern,
p.team_id,
coalesce(fd.backend, 'legacy-db') AS backend,
coalesce(fd.backend, 'db') AS backend,
fd.metadata AS metadata,
fd.data AS data
FROM file AS f
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
INNER JOIN project AS p ON (p.id = f.project_id)")
(def sql:get-file
(str sql:files-with-data " WHERE f.id = ?"))
(def sql:get-file-without-data
(str "WITH files AS (" sql:files-with-data ")"
"SELECT f.id,
f.project_id,
f.created_at,
f.modified_at,
f.deleted_at,
f.name,
f.is_shared,
f.has_media_trimmed,
f.revn,
f.ignore_sync_until,
f.comment_thread_seqn,
f.features,
f.version,
f.vern,
f.team_id
FROM files AS f
WHERE f.id = ?"))
INNER JOIN project AS p ON (p.id = f.project_id)
WHERE f.id = ?")
(defn- migrate-file
[{:keys [::db/conn] :as cfg} {:keys [read-only?]} {:keys [id] :as file}]
@@ -227,20 +225,18 @@
(update-file! cfg file)
(fmigr/resolve-applied-migrations cfg file))))))
;; FIXME: filter by project-id
(defn- get-file*
[{:keys [::db/conn] :as cfg} id
{:keys [migrate?
{:keys [#_project-id
migrate?
realize?
decode?
skip-locked?
include-deleted?
load-data?
throw-if-not-exists?
lock-for-update?
lock-for-share?]
lock-for-update?]
:or {lock-for-update? false
lock-for-share? false
load-data? true
migrate? true
decode? true
include-deleted? false
@@ -249,25 +245,11 @@
:as options}]
(assert (db/connection? conn) "expected cfg with valid connection")
(when (and (not load-data?)
(or lock-for-share? lock-for-share? skip-locked?))
(throw (IllegalArgumentException. "locking is incompatible when `load-data?` is false")))
(let [sql
(if load-data?
sql:get-file
sql:get-file-without-data)
sql
(cond
lock-for-update?
(str sql " FOR UPDATE of f")
lock-for-share?
(str sql " FOR SHARE of f")
:else
sql)
(if lock-for-update?
(str sql:get-file " FOR UPDATE of f")
sql:get-file)
sql
(if skip-locked?
@@ -285,25 +267,23 @@
(d/update-when :metadata fdata/decode-metadata))]
(if file
(if load-data?
(let [file
(->> file
(fmigr/resolve-applied-migrations cfg)
(fdata/resolve-file-data cfg))
(let [file
(->> file
(fmigr/resolve-applied-migrations cfg)
(fdata/resolve-file-data cfg))
will-migrate?
(and migrate? (fmg/need-migration? file))]
will-migrate?
(and migrate? (fmg/need-migration? file))]
(if decode?
(cond->> (fdata/decode-file-data cfg file)
(and realize? (not will-migrate?))
(fdata/realize cfg)
(if decode?
(cond->> (fdata/decode-file-data cfg file)
(and realize? (not will-migrate?))
(fdata/realize cfg)
will-migrate?
(migrate-file cfg options))
will-migrate?
(migrate-file cfg options))
file))
file)
file))
(when-not (or skip-locked? (not throw-if-not-exists?))
(ex/raise :type :not-found
@@ -440,6 +420,7 @@
(do
(l/trc :hint "lookup index"
:file-id (str file-id)
:snap-id (str (:snapshot-id file))
:id (str id)
:result (str (get mobj :id)))
(get mobj :id))
@@ -456,6 +437,7 @@
(doseq [[old-id item] missing-index]
(l/dbg :hint "create missing references"
:file-id (str file-id)
:snap-id (str (:snapshot-id file))
:old-id (str old-id)
:id (str (:id item)))
(db/insert! conn :file-media-object item
@@ -466,16 +448,12 @@
(def sql:get-file-media
"SELECT * FROM file_media_object WHERE id = ANY(?)")
(defn get-file-media*
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
(let [used (cfh/collect-used-media data)
used (db/create-array conn "uuid" used)]
(->> (db/exec! conn [sql:get-file-media used])
(mapv (fn [row] (assoc row :file-id id))))))
(defn get-file-media
[cfg file]
(db/run! cfg get-file-media* file))
[cfg {:keys [data] :as file}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [used (cfh/collect-used-media data)
used (db/create-array conn "uuid" used)]
(db/exec! conn [sql:get-file-media used])))))
(def ^:private sql:get-team-files-ids
"SELECT f.id FROM file AS f
@@ -550,7 +528,7 @@
[cfg data file-id]
(let [library-ids (get-libraries cfg [file-id])]
(reduce (fn [data library-id]
(if-let [library (get-file cfg library-id :include-deleted? true)]
(if-let [library (get-file cfg library-id)]
(ctf/absorb-assets data (:data library))
data))
data
@@ -606,8 +584,8 @@
;; all of them, not only the applied
(vary-meta dissoc ::fmg/migrated))))
(defn- encode-file
[cfg {:keys [id features] :as file}]
(defn encode-file
[{:keys [::wrk/executor] :as cfg} {:keys [id features] :as file}]
(let [file (if (and (contains? features "fdata/objects-map")
(:data file))
(fdata/enable-objects-map file)
@@ -624,7 +602,7 @@
(-> file
(d/update-when :features into-array)
(d/update-when :data blob/encode))))
(d/update-when :data (fn [data] (px/invoke! executor #(blob/encode data)))))))
(defn- file->params
[file]
@@ -633,16 +611,21 @@
(dissoc :team-id)
(dissoc :migrations)))
(defn- file->file-data-params
[{:keys [id] :as file} & {:as opts}]
(defn file->file-data-params
[{:keys [id backend] :as file} & {:as opts}]
(let [created-at (or (:created-at file) (ct/now))
modified-at (or (:modified-at file) created-at)]
modified-at (or (:modified-at file) created-at)
backend (if (and (::overwrite-storage-backend opts) backend)
backend
(cf/get :file-storage-backend))]
(d/without-nils
{:id id
:type "main"
:file-id id
:data (:data file)
:metadata (:metadata file)
:backend backend
:created-at created-at
:modified-at modified-at})))
@@ -650,8 +633,6 @@
"Insert a new file into the database table. Expectes a not-encoded file.
Returns nil."
[{:keys [::db/conn] :as cfg} file & {:as opts}]
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(when (:migrations file)
(fmigr/upsert-migrations! conn file))
@@ -661,7 +642,7 @@
(assoc opts ::db/return-keys false))
(->> (file->file-data-params file)
(fdata/upsert! cfg))
(fdata/update! cfg))
nil))
@@ -669,7 +650,7 @@
"Update an existing file on the database. Expects not encoded file."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} & {:as opts}]
(if (::reset-migrations? opts false)
(if (::reset-migrations opts false)
(fmigr/reset-migrations! conn file)
(fmigr/upsert-migrations! conn file))
@@ -686,7 +667,7 @@
{:id id}
{::db/return-keys false})
(fdata/upsert! cfg file-data-params)
(fdata/update! cfg file-data-params)
nil))
(defn save-file!
@@ -719,7 +700,7 @@
(l/error :hint "file schema validation error" :cause result))))
(if (::overwrite cfg)
(update-file! cfg file (assoc opts ::reset-migrations? true))
(update-file! cfg file (assoc opts ::reset-migrations true))
(insert-file! cfg file opts))))
(def ^:private sql:get-file-libraries
@@ -749,7 +730,7 @@
l.version
FROM libs AS l
INNER JOIN project AS p ON (p.id = l.project_id)
WHERE l.deleted_at IS NULL;")
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
(defn get-file-libraries
[conn file-id]
@@ -761,19 +742,11 @@
(map decode-row-features))
(db/exec! conn [sql:get-file-libraries file-id])))
;; FIXME: this will use a lot of memory if file uses too many big
;; libraries, we should load required libraries on demand
(defn get-resolved-file-libraries
"Get all file libraries including itself. Returns an instance of
LoadableWeakValueMap that allows do not have strong references to
the loaded libraries and reduce possible memory pressure on having
all this libraries loaded at same time on processing file validation
or file migration.
This still requires at least one library at time to be loaded while
access to it is performed, but it improves considerable not having
the need of loading all the libraries at the same time."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [library-ids (->> (get-file-libraries conn (:id file))
(map :id)
(cons (:id file)))
load-fn #(get-file cfg % :migrate? false)]
(weak/loadable-weak-value-map library-ids load-fn {id file})))
"A helper for preload file libraries"
[{:keys [::db/conn] :as cfg} file]
(->> (get-file-libraries conn (:id file))
(into [file] (map #(get-file cfg (:id %))))
(d/index-by :id)))

View File

@@ -36,6 +36,11 @@
"fdata/shape-data-type"
nil
;; There is no migration needed, but we don't want to allow
;; copy paste nor import of variant files into no-variant teams
"variants/v1"
nil
(ex/raise :type :internal
:code :no-migration-defined
:hint (str/ffmt "no migation for feature '%' on file importation" feature)

View File

@@ -27,7 +27,7 @@
[app.common.types.page :as ctp]
[app.common.types.plugins :as ctpg]
[app.common.types.shape :as cts]
[app.common.types.tokens-lib :as ctob]
[app.common.types.tokens-lib :as cto]
[app.common.types.typography :as cty]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -41,10 +41,8 @@
[datoteka.fs :as fs]
[datoteka.io :as io])
(:import
java.io.File
java.io.InputStream
java.io.OutputStreamWriter
java.lang.AutoCloseable
java.util.zip.ZipEntry
java.util.zip.ZipFile
java.util.zip.ZipOutputStream))
@@ -105,25 +103,25 @@
(sm/encoder ctp/schema:page sm/json-transformer))
(def encode-shape
(sm/encoder cts/schema:shape sm/json-transformer))
(sm/encoder ::cts/shape sm/json-transformer))
(def encode-media
(sm/encoder ctf/schema:media sm/json-transformer))
(sm/encoder ::ctf/media sm/json-transformer))
(def encode-component
(sm/encoder ctc/schema:component sm/json-transformer))
(sm/encoder ::ctc/component sm/json-transformer))
(def encode-color
(sm/encoder ctcl/schema:library-color sm/json-transformer))
(def encode-typography
(sm/encoder cty/schema:typography sm/json-transformer))
(sm/encoder ::cty/typography sm/json-transformer))
(def encode-tokens-lib
(sm/encoder ctob/schema:tokens-lib sm/json-transformer))
(sm/encoder ::cto/tokens-lib sm/json-transformer))
(def encode-plugin-data
(sm/encoder ctpg/schema:plugin-data sm/json-transformer))
(sm/encoder ::ctpg/plugin-data sm/json-transformer))
(def encode-storage-object
(sm/encoder schema:storage-object sm/json-transformer))
@@ -140,7 +138,7 @@
(sm/decoder ctf/schema:media sm/json-transformer))
(def decode-component
(sm/decoder ctc/schema:component sm/json-transformer))
(sm/decoder ::ctc/component sm/json-transformer))
(def decode-color
(sm/decoder ctcl/schema:library-color sm/json-transformer))
@@ -149,19 +147,19 @@
(sm/decoder schema:file sm/json-transformer))
(def decode-page
(sm/decoder ctp/schema:page sm/json-transformer))
(sm/decoder ::ctp/page sm/json-transformer))
(def decode-shape
(sm/decoder cts/schema:shape sm/json-transformer))
(sm/decoder ::cts/shape sm/json-transformer))
(def decode-typography
(sm/decoder cty/schema:typography sm/json-transformer))
(sm/decoder ::cty/typography sm/json-transformer))
(def decode-tokens-lib
(sm/decoder ctob/schema:tokens-lib sm/json-transformer))
(sm/decoder cto/schema:tokens-lib sm/json-transformer))
(def decode-plugin-data
(sm/decoder ctpg/schema:plugin-data sm/json-transformer))
(sm/decoder ::ctpg/plugin-data sm/json-transformer))
(def decode-storage-object
(sm/decoder schema:storage-object sm/json-transformer))
@@ -175,31 +173,31 @@
(sm/check-fn schema:manifest))
(def validate-file
(sm/check-fn ctf/schema:file))
(sm/check-fn ::ctf/file))
(def validate-page
(sm/check-fn ctp/schema:page))
(sm/check-fn ::ctp/page))
(def validate-shape
(sm/check-fn cts/schema:shape))
(sm/check-fn ::cts/shape))
(def validate-media
(sm/check-fn ctf/schema:media))
(sm/check-fn ::ctf/media))
(def validate-color
(sm/check-fn ctcl/schema:library-color))
(def validate-component
(sm/check-fn ctc/schema:component))
(sm/check-fn ::ctc/component))
(def validate-typography
(sm/check-fn cty/schema:typography))
(sm/check-fn ::cty/typography))
(def validate-tokens-lib
(sm/check-fn ctob/schema:tokens-lib))
(sm/check-fn ::cto/tokens-lib))
(def validate-plugin-data
(sm/check-fn ctpg/schema:plugin-data))
(sm/check-fn ::ctpg/plugin-data))
(def validate-storage-object
(sm/check-fn schema:storage-object))
@@ -228,7 +226,6 @@
(db/tx-run! cfg (fn [cfg]
(cond-> (bfc/get-file cfg file-id
{:realize? true
:include-deleted? true
:lock-for-update? true})
detach?
(-> (ctf/detach-external-references file-id)
@@ -256,9 +253,9 @@
(write-entry! output path params)
(with-open [input (sto/get-object-data storage sobject)]
(.putNextEntry ^ZipOutputStream output (ZipEntry. (str "objects/" id ext)))
(.putNextEntry output (ZipEntry. (str "objects/" id ext)))
(io/copy input output :size (:size sobject))
(.closeEntry ^ZipOutputStream output))))))
(.closeEntry output))))))
(defn- export-file
[{:keys [::file-id ::output] :as cfg}]
@@ -286,12 +283,14 @@
(let [file (cond-> (select-keys file bfc/file-attrs)
(:options data)
(assoc :options (:options data)))
(assoc :options (:options data))
file (-> file
(dissoc :data)
(dissoc :deleted-at)
(encode-file))
:always
(dissoc :data))
file (cond-> file
:always
(encode-file))
path (str "files/" file-id ".json")]
(write-entry! output path file))
@@ -350,8 +349,7 @@
typography (encode-typography object)]
(write-entry! output path typography)))
(when (and tokens-lib
(not (ctob/empty-lib? tokens-lib)))
(when tokens-lib
(let [path (str "files/" file-id "/tokens.json")
encoded-tokens (encode-tokens-lib tokens-lib)]
(write-entry! output path encoded-tokens)))))
@@ -451,7 +449,7 @@
(defn- read-manifest
[^ZipFile input]
(let [entry (get-zip-entry input "manifest.json")]
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
(with-open [reader (zip-entry-reader input entry)]
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
(decode-manifest manifest)))))
@@ -541,12 +539,12 @@
(defn- read-entry
[^ZipFile input entry]
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
(with-open [reader (zip-entry-reader input entry)]
(json/read reader :key-fn json/read-kebab-key)))
(defn- read-plain-entry
[^ZipFile input entry]
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
(with-open [reader (zip-entry-reader input entry)]
(json/read reader)))
(defn- read-file
@@ -714,7 +712,7 @@
:plugin-data plugin-data}))
(defn- import-file
[{:keys [::db/conn ::bfc/project-id] :as cfg} {file-id :id file-name :name}]
[{:keys [::bfc/project-id] :as cfg} {file-id :id file-name :name}]
(let [file-id' (bfc/lookup-index file-id)
file (read-file cfg file-id)
media (read-file-media cfg file-id)
@@ -727,48 +725,26 @@
:version (:version file)
::l/sync? true)
(vswap! bfc/*state* update :index bfc/update-index media :id)
(events/tap :progress {:section :file :name file-name})
(events/tap :progress {:section :media :file-id file-id})
(when media
;; Update index with media
(l/dbg :hint "update media index"
:file-id (str file-id')
:total (count media)
::l/sync? true)
(doseq [item media]
(let [params (-> item
(update :id bfc/lookup-index)
(assoc :file-id file-id')
(d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index))]
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
(vswap! bfc/*state* update :media into media))
(l/dbg :hint "inserting media object"
:file-id (str file-id')
:id (str (:id params))
:media-id (str (:media-id params))
:thumbnail-id (str (:thumbnail-id params))
:old-id (str (:id item))
::l/sync? true)
(when thumbnails
(l/dbg :hint "update thumbnails index"
:file-id (str file-id')
:total (count thumbnails)
::l/sync? true)
(db/insert! conn :file-media-object params
::db/on-conflict-do-nothing? (::bfc/overwrite cfg))))
(events/tap :progress {:section :thumbnails :file-id file-id})
(doseq [item thumbnails]
(let [media-id (bfc/lookup-index (:media-id item))
object-id (-> (assoc item :file-id file-id')
(cth/fmt-object-id))
params {:file-id file-id'
:object-id object-id
:tag (:tag item)
:media-id media-id}]
(l/dbg :hint "inserting object thumbnail"
:file-id (str file-id')
:media-id (str media-id)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail params
::db/on-conflict-do-nothing? true)))
(events/tap :progress {:section :file :file-id file-id})
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
(vswap! bfc/*state* update :thumbnails into thumbnails))
(let [data (-> (read-file-data cfg file-id)
(d/without-nils)
@@ -817,47 +793,95 @@
entries (keep (match-storage-entry-fn) entries)]
(doseq [{:keys [id entry]} entries]
(let [object (->> (read-entry input entry)
(decode-storage-object)
(validate-storage-object))
(let [object (->> (read-entry input entry)
(decode-storage-object)
(validate-storage-object))]
ext (cmedia/mtype->extension (:content-type object))
path (str "objects/" id ext)
content (->> path
(get-zip-entry input)
(zip-entry-storage-content input))]
(when (not= (:size object) (sto/get-size content))
(when (not= id (:id object))
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "found corrupted storage object: size does not match"
:path path
:expected-size (:size object)
:found-size (sto/get-size content)))
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"
:expected-id (str id)
:found-id (str (:id object))))
(when-let [hash (get object :hash)]
(when (not= hash (sto/get-hash content))
(let [ext (cmedia/mtype->extension (:content-type object))
path (str "objects/" id ext)
content (->> path
(get-zip-entry input)
(zip-entry-storage-content input))]
(when (not= (:size object) (sto/get-size content))
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "found corrupted storage object: hash does not match"
:hint "found corrupted storage object: size does not match"
:path path
:expected-hash (:hash object)
:found-hash (sto/get-hash content))))
:expected-size (:size object)
:found-size (sto/get-size content)))
(let [params (-> object
(dissoc :id :size)
(assoc ::sto/content content)
(assoc ::sto/deduplicate? true)
(assoc ::sto/touched-at timestamp))
sobject (sto/put-object! storage params)]
(when-let [hash (get object :hash)]
(when (not= hash (sto/get-hash content))
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "found corrupted storage object: hash does not match"
:path path
:expected-hash (:hash object)
:found-hash (sto/get-hash content))))
(l/dbg :hint "persisted storage object"
:id (str (:id sobject))
:prev-id (str id)
:bucket (:bucket params)
::l/sync? true)
(let [params (-> object
(dissoc :id :size)
(assoc ::sto/content content)
(assoc ::sto/deduplicate? true)
(assoc ::sto/touched-at timestamp))
sobject (sto/put-object! storage params)]
(vswap! bfc/*state* update :index assoc id (:id sobject)))))))
(l/dbg :hint "persisted storage object"
:id (str (:id sobject))
:prev-id (str id)
:bucket (:bucket params)
::l/sync? true)
(vswap! bfc/*state* update :index assoc id (:id sobject))))))))
(defn- import-file-media
[{:keys [::db/conn] :as cfg}]
(events/tap :progress {:section :media})
(doseq [item (:media @bfc/*state*)]
(let [params (-> item
(update :id bfc/lookup-index)
(update :file-id bfc/lookup-index)
(d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index))]
(l/dbg :hint "inserting file media object"
:old-id (str (:id item))
:id (str (:id params))
:file-id (str (:file-id params))
::l/sync? true)
(db/insert! conn :file-media-object params
::db/on-conflict-do-nothing? (::bfc/overwrite cfg)))))
(defn- import-file-thumbnails
[{:keys [::db/conn] :as cfg}]
(events/tap :progress {:section :thumbnails})
(doseq [item (:thumbnails @bfc/*state*)]
(let [file-id (bfc/lookup-index (:file-id item))
media-id (bfc/lookup-index (:media-id item))
object-id (-> (assoc item :file-id file-id)
(cth/fmt-object-id))
params {:file-id file-id
:object-id object-id
:tag (:tag item)
:media-id media-id}]
(l/dbg :hint "inserting file object thumbnail"
:file-id (str file-id)
:media-id (str media-id)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail params
{::db/on-conflict-do-nothing? true}))))
(defn- import-files*
[{:keys [::manifest] :as cfg}]
@@ -865,8 +889,6 @@
(vswap! bfc/*state* update :index bfc/update-index (:files manifest) :id)
(import-storage-objects cfg)
(let [files (get manifest :files)
result (reduce (fn [result {:keys [id] :as file}]
(let [name' (get file :name)
@@ -879,6 +901,10 @@
files)]
(import-file-relations cfg)
(import-storage-objects cfg)
(import-file-media cfg)
(import-file-thumbnails cfg)
(bfm/apply-pending-migrations! cfg)
result))
@@ -903,8 +929,9 @@
(binding [bfc/*options* cfg
bfc/*reference-file* ref-file]
(import-storage-objects cfg)
(import-file cfg file)
(import-storage-objects cfg)
(import-file-media cfg)
(bfc/invalidate-thumbnails cfg file-id)
(bfm/apply-pending-migrations! cfg)
@@ -981,8 +1008,8 @@
(try
(l/info :hint "start exportation" :export-id (str id))
(binding [bfc/*state* (volatile! (bfc/initial-state))]
(with-open [^AutoCloseable output (io/output-stream output)]
(with-open [^AutoCloseable output (ZipOutputStream. output)]
(with-open [output (io/output-stream output)]
(with-open [output (ZipOutputStream. output)]
(let [cfg (assoc cfg ::output output)]
(export-files cfg)
(export-storage-objects cfg)))))
@@ -1026,7 +1053,7 @@
(l/info :hint "import: started" :id (str id))
(try
(with-open [input (ZipFile. ^File (fs/file input))]
(with-open [input (ZipFile. (fs/file input))]
(import-files (assoc cfg ::bfc/input input)))
(catch Throwable cause
@@ -1041,6 +1068,6 @@
(defn get-manifest
[path]
(with-open [^AutoCloseable input (ZipFile. ^File (fs/file path))]
(with-open [input (ZipFile. (fs/file path))]
(-> (read-manifest input)
(validate-manifest))))

View File

@@ -52,7 +52,7 @@
:redis-uri "redis://redis/0"
:file-data-backend "legacy-db"
:file-storage-backend "db"
:objects-storage-backend "fs"
:objects-storage-fs-directory "assets"
@@ -98,9 +98,7 @@
[:http-server-max-body-size {:optional true} ::sm/int]
[:http-server-max-multipart-body-size {:optional true} ::sm/int]
[:http-server-io-threads {:optional true} ::sm/int]
[:http-server-max-worker-threads {:optional true} ::sm/int]
[:management-api-shared-key {:optional true} :string]
[:http-server-worker-threads {:optional true} ::sm/int]
[:telemetry-uri {:optional true} :string]
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
@@ -151,6 +149,7 @@
[:quotes-team-access-requests-per-team {:optional true} ::sm/int]
[:quotes-team-access-requests-per-requester {:optional true} ::sm/int]
[:auth-data-cookie-domain {:optional true} :string]
[:auth-token-cookie-name {:optional true} :string]
[:auth-token-cookie-max-age {:optional true} ::ct/duration]
@@ -214,27 +213,26 @@
[:prepl-host {:optional true} :string]
[:prepl-port {:optional true} ::sm/int]
[:file-data-backend {:optional true} [:enum "db" "legacy-db" "storage"]]
[:file-storage-backend :string]
[:media-directory {:optional true} :string] ;; REVIEW
[:media-uri {:optional true} :string]
[:assets-path {:optional true} :string]
[:netty-io-threads {:optional true} ::sm/int]
[:executor-threads {:optional true} ::sm/int]
;; DEPRECATED
;; Legacy, will be removed in 2.5
[:assets-storage-backend {:optional true} :keyword]
[:storage-assets-fs-directory {:optional true} :string]
[:storage-assets-s3-bucket {:optional true} :string]
[:storage-assets-s3-region {:optional true} :keyword]
[:storage-assets-s3-endpoint {:optional true} ::sm/uri]
[:storage-assets-s3-io-threads {:optional true} ::sm/int]
[:objects-storage-backend {:optional true} :keyword]
[:objects-storage-fs-directory {:optional true} :string]
[:objects-storage-s3-bucket {:optional true} :string]
[:objects-storage-s3-region {:optional true} :keyword]
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]]))
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]
[:objects-storage-s3-io-threads {:optional true} ::sm/int]]))
(defn- parse-flags
[config]
@@ -319,9 +317,5 @@
([key default]
(c/get config key default)))
(defn logging-context
[]
{:version/backend (:full version)})
;; Set value for all new threads bindings.
(alter-var-root #'*assert* (constantly (contains? flags :backend-asserts)))

View File

@@ -298,7 +298,7 @@
(defn insert!
"A helper that builds an insert sql statement and executes it. By
default returns the inserted row with all the field; you can delimit
the returned columns with the `::sql/columns` option."
the returned columns with the `::columns` option."
[ds table params & {:as opts}]
(let [conn (get-connectable ds)
sql (sql/insert table params opts)
@@ -379,7 +379,9 @@
(defn is-row-deleted?
[{:keys [deleted-at]}]
(some? deleted-at))
(and (ct/inst? deleted-at)
(< (inst-ms deleted-at)
(inst-ms (ct/now)))))
(defn get*
"Retrieve a single row from database that matches a simple filters. Do
@@ -404,15 +406,15 @@
:hint "database object not found"))
row))
(defn get-with-sql
[ds sql & {:as opts}]
(let [rows
(cond->> (exec! ds sql opts)
(::remove-deleted opts true)
(remove is-row-deleted?)
(let [rows (cond->> (exec! ds sql opts)
(::remove-deleted opts true)
(remove is-row-deleted?)
:always
(not-empty))]
:always
(not-empty))]
(when (and (not rows) (::throw-if-not-exists opts true))
(ex/raise :type :not-found
@@ -421,6 +423,7 @@
(first rows)))
(def ^:private default-plan-opts
(-> default-opts
(assoc :fetch-size 1000)
@@ -575,10 +578,10 @@
[system f & params]
(cond
(connection? system)
(apply run! {::conn system} f params)
(run! {::conn system} f)
(pool? system)
(apply run! {::pool system} f params)
(run! {::pool system} f)
(::conn system)
(apply f system params)
@@ -704,12 +707,6 @@
(and (sql-exception? cause)
(= "40001" (.getSQLState ^java.sql.SQLException cause))))
(defn duplicate-key-error?
[cause]
(and (sql-exception? cause)
(= "23505" (.getSQLState ^java.sql.SQLException cause))))
(extend-protocol jdbc.prepare/SettableParameter
clojure.lang.Keyword
(set-parameter [^clojure.lang.Keyword v ^PreparedStatement s ^long i]

View File

@@ -53,15 +53,8 @@
opts (cond-> opts
(::order-by opts) (assoc :order-by (::order-by opts))
(::columns opts) (assoc :columns (::columns opts))
(or (::db/for-update opts)
(::for-update opts))
(assoc :suffix "FOR UPDATE")
(or (::db/for-share opts)
(::for-share opts))
(assoc :suffix "FOR SHARE"))]
(::for-update opts) (assoc :suffix "FOR UPDATE")
(::for-share opts) (assoc :suffix "FOR SHARE"))]
(sql/for-query table where-params opts))))
(defn update

View File

@@ -7,7 +7,6 @@
(ns app.email
"Main api for send emails."
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.logging :as l]
@@ -94,44 +93,36 @@
headers)))
(defn- assign-body
[^MimeMessage mmsg {:keys [body charset attachments] :or {charset "utf-8"}}]
(let [mixed-mpart (MimeMultipart. "mixed")]
[^MimeMessage mmsg {:keys [body charset] :or {charset "utf-8"}}]
(let [mpart (MimeMultipart. "mixed")]
(cond
(string? body)
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String body ^String charset)
(.addBodyPart mixed-mpart text-part))
(let [bpart (MimeBodyPart.)]
(.setContent bpart ^String body (str "text/plain; charset=" charset))
(.addBodyPart mpart bpart))
(vector? body)
(let [mmp (MimeMultipart. "alternative")
mbp (MimeBodyPart.)]
(.addBodyPart mpart mbp)
(.setContent mbp mmp)
(doseq [item body]
(let [mbp (MimeBodyPart.)]
(.setContent mbp
^String (:content item)
^String (str (:type item "text/plain") "; charset=" charset))
(.addBodyPart mmp mbp))))
(map? body)
(let [content-part (MimeBodyPart.)
alternative-mpart (MimeMultipart. "alternative")]
(when-let [content (get body "text/html")]
(let [html-part (MimeBodyPart.)]
(.setContent html-part ^String content
(str "text/html; charset=" charset))
(.addBodyPart alternative-mpart html-part)))
(when-let [content (get body "text/plain")]
(let [text-part (MimeBodyPart.)]
(.setText text-part ^String content ^String charset)
(.addBodyPart alternative-mpart text-part)))
(.setContent content-part alternative-mpart)
(.addBodyPart mixed-mpart content-part))
(let [bpart (MimeBodyPart.)]
(.setContent bpart
^String (:content body)
^String (str (:type body "text/plain") "; charset=" charset))
(.addBodyPart mpart bpart))
:else
(throw (IllegalArgumentException. "invalid email body provided")))
(doseq [[name content] attachments]
(prn "attachment" name)
(let [attachment-part (MimeBodyPart.)]
(.setFileName attachment-part ^String name)
(.setContent attachment-part ^String content (str "text/plain; charset=" charset))
(.addBodyPart mixed-mpart attachment-part)))
(.setContent mmsg mixed-mpart)
(throw (ex-info "Unsupported type" {:body body})))
(.setContent mmsg mpart)
mmsg))
(defn- opts->props
@@ -219,26 +210,24 @@
(ex/raise :type :internal
:code :missing-email-templates))
{:subject subj
:body (d/without-nils
{"text/plain" text
"text/html" html})}))
:body (into
[{:type "text/plain"
:content text}]
(when html
[{:type "text/html"
:content html}]))}))
(def ^:private schema:params
[:map {:title "Email Params"}
(def ^:private schema:context
[:map
[:to [:or ::sm/email [::sm/vec ::sm/email]]]
[:reply-to {:optional true} ::sm/email]
[:from {:optional true} ::sm/email]
[:lang {:optional true} ::sm/text]
[:subject {:optional true} ::sm/text]
[:priority {:optional true} [:enum :high :low]]
[:extra-data {:optional true} ::sm/text]
[:body {:optional true}
[:or :string [:map-of :string :string]]]
[:attachments {:optional true}
[:map-of :string :string]]])
[:extra-data {:optional true} ::sm/text]])
(def ^:private check-params
(sm/check-fn schema:params))
(def ^:private check-context
(sm/check-fn schema:context))
(defn template-factory
[& {:keys [id schema]}]
@@ -246,9 +235,9 @@
(let [check-fn (if schema
(sm/check-fn schema)
(constantly nil))]
(fn [params]
(let [params (-> params check-params check-fn)
email (build-email-template id params)]
(fn [context]
(let [context (-> context check-context check-fn)
email (build-email-template id context)]
(when-not email
(ex/raise :type :internal
:code :email-template-does-not-exists
@@ -256,40 +245,35 @@
:template-id id))
(cond-> (assoc email :id (name id))
(:extra-data params)
(assoc :extra-data (:extra-data params))
(:extra-data context)
(assoc :extra-data (:extra-data context))
(seq (:attachments params))
(assoc :attachments (:attachments params))
(:from context)
(assoc :from (:from context))
(:from params)
(assoc :from (:from params))
(:reply-to context)
(assoc :reply-to (:reply-to context))
(:reply-to params)
(assoc :reply-to (:reply-to params))
(:to params)
(assoc :to (:to params)))))))
(:to context)
(assoc :to (:to context)))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PUBLIC HIGH-LEVEL API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn render
[email-factory params]
(email-factory params))
[email-factory context]
(email-factory context))
(defn send!
"Schedule an already defined email to be sent using asynchronously
using worker task."
[{:keys [::conn ::factory] :as params}]
[{:keys [::conn ::factory] :as context}]
(assert (db/connectable? conn) "expected a valid database connection or pool")
(let [email (if factory
(factory params)
(-> params
(dissoc params)
(check-params)))]
(factory context)
(dissoc context ::conn))]
(wrk/submit! {::wrk/task :sendmail
::wrk/delay 0
::wrk/max-retries 4
@@ -359,10 +343,8 @@
(def ^:private schema:feedback
[:map
[:feedback-subject ::sm/text]
[:feedback-type ::sm/text]
[:feedback-content ::sm/text]
[:profile :map]])
[:subject ::sm/text]
[:content ::sm/text]])
(def user-feedback
"A profile feedback email."

View File

@@ -9,22 +9,43 @@
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.files.helpers :as cfh]
[app.common.files.migrations :as fmg]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.objects-map :as omap]
[app.common.types.path :as path]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.objects-map :as omap.legacy]
[app.util.pointer-map :as pmap]))
[app.util.objects-map :as omap]
[app.util.pointer-map :as pmap]
[app.worker :as wrk]
[promesa.exec :as px]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OBJECTS-MAP
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn enable-objects-map
[file & _opts]
(let [update-page
(fn [page]
(if (and (pmap/pointer-map? page)
(not (pmap/loaded? page)))
page
(update page :objects omap/wrap)))
update-data
(fn [fdata]
(update fdata :pages-index d/update-vals update-page))]
(-> file
(update :data update-data)
(update :features conj "fdata/objects-map"))))
(defn process-objects
"Apply a function to all objects-map on the file. Usualy used for convert
the objects-map instances to plain maps"
@@ -34,226 +55,20 @@
(fn [page]
(update page :objects
(fn [objects]
(if (or (omap/objects-map? objects)
(omap.legacy/objects-map? objects))
(if (omap/objects-map? objects)
(update-fn objects)
objects)))))
fdata))
(defn realize-objects
"Process a file and remove all instances of objects map realizing them
"Process a file and remove all instances of objects mao realizing them
to a plain data. Used in operation where is more efficient have the
whole file loaded in memory or we going to persist it in an
alterantive storage."
[_cfg file]
(update file :data process-objects (partial into {})))
(defn enable-objects-map
[file & _opts]
(let [update-page
(fn [page]
(update page :objects omap/wrap))
update-data
(fn [fdata]
(update fdata :pages-index d/update-vals update-page))]
(-> file
(update :data update-data)
(update :features conj "fdata/objects-map"))))
(defn disable-objects-map
[file & _opts]
(let [update-page
(fn [page]
(update page :objects #(into {} %)))
update-data
(fn [fdata]
(update fdata :pages-index d/update-vals update-page))]
(-> file
(update :data update-data)
(update :features disj "fdata/objects-map"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; STORAGE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmulti resolve-file-data
(fn [_cfg file] (get file :backend "legacy-db")))
(defmethod resolve-file-data "legacy-db"
[_cfg {:keys [legacy-data] :as file}]
(-> file
(assoc :data legacy-data)
(dissoc :legacy-data)))
(defmethod resolve-file-data "db"
[_cfg file]
(dissoc file :legacy-data))
(defmethod resolve-file-data "storage"
[cfg {:keys [metadata] :as file}]
(let [storage (sto/resolve cfg ::db/reuse-conn true)
ref-id (:storage-ref-id metadata)
data (->> (sto/get-object storage ref-id)
(sto/get-object-bytes storage))]
(-> file
(assoc :data data)
(dissoc :legacy-data))))
(defn decode-file-data
[_cfg {:keys [data] :as file}]
(cond-> file
(bytes? data)
(assoc :data (blob/decode data))))
(def ^:private sql:insert-file-data
"INSERT INTO file_data (file_id, id, created_at, modified_at, deleted_at,
type, backend, metadata, data)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)")
(def ^:private sql:upsert-file-data
(str sql:insert-file-data
" ON CONFLICT (file_id, id)
DO UPDATE SET modified_at=?,
deleted_at=?,
backend=?,
metadata=?,
data=?"))
(defn- upsert-in-database
[cfg {:keys [id file-id created-at modified-at deleted-at type backend data metadata]}]
(let [created-at (or created-at (ct/now))
metadata (some-> metadata db/json)
modified-at (or modified-at created-at)]
(db/exec-one! cfg [sql:upsert-file-data
file-id id
created-at
modified-at
deleted-at
type
backend
metadata
data
modified-at
deleted-at
backend
metadata
data])))
(defn- handle-persistence
[cfg {:keys [type backend id file-id data] :as params}]
(cond
(= backend "storage")
(let [storage (sto/resolve cfg)
content (sto/content data)
sobject (sto/put-object! storage
{::sto/content content
::sto/touch true
:bucket "file-data"
:content-type "application/octet-stream"
:file-id file-id
:id id})
metadata {:storage-ref-id (:id sobject)}
params (-> params
(assoc :metadata metadata)
(assoc :data nil))]
(upsert-in-database cfg params))
(= backend "db")
(->> (dissoc params :metadata)
(upsert-in-database cfg))
(= backend "legacy-db")
(cond
(= type "main")
(do
(db/delete! cfg :file-data
{:id id :file-id file-id :type "main"}
{::db/return-keys false})
(db/update! cfg :file
{:data data}
{:id file-id}
{::db/return-keys false}))
(= type "snapshot")
(do
(db/delete! cfg :file-data
{:id id :file-id file-id :type "snapshot"}
{::db/return-keys false})
(db/update! cfg :file-change
{:data data}
{:file-id file-id :id id}
{::db/return-keys false}))
(= type "fragment")
(upsert-in-database cfg
(-> (dissoc params :metadata)
(assoc :backend "db")))
:else
(throw (RuntimeException. "not implemented")))
:else
(throw (IllegalArgumentException.
(str "backend '" backend "' not supported")))))
(defn process-metadata
[cfg metadata]
(when-let [storage-id (:storage-ref-id metadata)]
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
(sto/touch-object! storage storage-id))))
(defn- default-backend
[backend]
(or backend (cf/get :file-data-backend)))
(def ^:private schema:metadata
[:map {:title "Metadata"}
[:storage-ref-id {:optional true} ::sm/uuid]])
(def decode-metadata-with-schema
(sm/decoder schema:metadata sm/json-transformer))
(defn decode-metadata
[metadata]
(some-> metadata
(db/decode-json-pgobject)
(decode-metadata-with-schema)))
(def ^:private schema:update-params
[:map {:closed true}
[:id ::sm/uuid]
[:type [:enum "main" "snapshot" "fragment"]]
[:file-id ::sm/uuid]
[:backend {:optional true} [:enum "db" "legacy-db" "storage"]]
[:metadata {:optional true} [:maybe schema:metadata]]
[:data {:optional true} bytes?]
[:created-at {:optional true} ::ct/inst]
[:modified-at {:optional true} [:maybe ::ct/inst]]
[:deleted-at {:optional true} [:maybe ::ct/inst]]])
(def ^:private check-update-params
(sm/check-fn schema:update-params :hint "invalid params received for update"))
(defn upsert!
"Create or update file data"
[cfg params & {:as opts}]
(let [params (-> (check-update-params params)
(update :backend default-backend))]
(some->> (:metadata params)
(process-metadata cfg))
(-> (handle-persistence cfg params)
(db/get-update-count)
(pos?))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; POINTER-MAP
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -261,10 +76,9 @@
(defn load-pointer
"A database loader pointer helper"
[cfg file-id id]
(let [fragment (some-> (db/get* cfg :file-data
{:id id :file-id file-id :type "fragment"}
{::sql/columns [:data :backend :id :metadata]})
(update :metadata decode-metadata))]
(let [fragment (db/get* cfg :file-data
{:id id :file-id file-id :type "fragment"}
{::sql/columns [:content :backend :id]})]
(l/trc :hint "load pointer"
:file-id (str file-id)
@@ -278,21 +92,22 @@
:file-id file-id
:fragment-id id))
(-> (resolve-file-data cfg fragment)
(get :data)
(blob/decode))))
;; FIXME: conditional thread scheduling for decoding big objects
(blob/decode (:data fragment))))
(defn persist-pointers!
"Persist all currently tracked pointer objects"
[cfg file-id]
(doseq [[id item] @pmap/*tracked*]
(when (pmap/modified? item)
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
(let [content (-> item deref blob/encode)]
(upsert! cfg {:id id
:file-id file-id
:type "fragment"
:data content})))))
(let [conn (db/get-connection cfg)]
(doseq [[id item] @pmap/*tracked*]
(when (pmap/modified? item)
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
(let [content (-> item deref blob/encode)]
(db/insert! conn :file-data
{:id id
:file-id file-id
:type "fragment"
:content content}))))))
(defn process-pointers
"Apply a function to all pointers on the file. Usuly used for
@@ -332,6 +147,53 @@
(d/update-when :components pmap/wrap))))
(update :features conj "fdata/pointer-map")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PATH-DATA
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn enable-path-data
"Enable the fdata/path-data feature on the file."
[file & _opts]
(letfn [(update-object [object]
(if (or (cfh/path-shape? object)
(cfh/bool-shape? object))
(update object :content path/content)
object))
(update-container [container]
(d/update-when container :objects d/update-vals update-object))]
(-> file
(update :data (fn [data]
(-> data
(update :pages-index d/update-vals update-container)
(d/update-when :components d/update-vals update-container))))
(update :features conj "fdata/path-data"))))
(defn disable-path-data
[file & _opts]
(letfn [(update-object [object]
(if (or (cfh/path-shape? object)
(cfh/bool-shape? object))
(update object :content vec)
object))
(update-container [container]
(d/update-when container :objects d/update-vals update-object))]
(when-let [conn db/*conn*]
(db/delete! conn :file-migration {:file-id (:id file)
:name "0003-convert-path-content"}))
(-> file
(update :data (fn [data]
(-> data
(update :pages-index d/update-vals update-container)
(d/update-when :components d/update-vals update-container))))
(update :features disj "fdata/path-data")
(update :migrations disj "0003-convert-path-content")
(vary-meta update ::fmg/migrated disj "0003-convert-path-content"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; GENERAL PURPOSE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -342,3 +204,302 @@
(->> file
(realize-pointers cfg)
(realize-objects cfg)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; STORAGE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmulti resolve-file-data
(fn [_cfg file] (or (get file :backend) "db")))
(defmethod resolve-file-data "db"
[_cfg {:keys [legacy-data data] :as file}]
(if (and (some? legacy-data) (not data))
(-> file
(assoc :data legacy-data)
(dissoc :legacy-data))
(dissoc file :legacy-data)))
(defmethod resolve-file-data "storage"
[cfg object]
(let [storage (sto/resolve cfg ::db/reuse-conn true)
ref-id (-> object :metadata :storage-ref-id)
data (->> (sto/get-object storage ref-id)
(sto/get-object-bytes storage))]
(-> object
(assoc :data data)
(dissoc :legacy-data))))
(defn decode-file-data
[{:keys [::wrk/executor]} {:keys [data] :as file}]
(cond-> file
(bytes? data)
(assoc :data (px/invoke! executor #(blob/decode data)))))
(def ^:private sql:insert-file-data
"INSERT INTO file_data (file_id, id, created_at, modified_at,
type, backend, metadata, data)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)")
(def ^:private sql:upsert-file-data
(str sql:insert-file-data
" ON CONFLICT (file_id, id)
DO UPDATE SET modified_at=?,
backend=?,
metadata=?,
data=?;"))
(defn- create-in-database
[cfg {:keys [id file-id created-at modified-at type backend data metadata]}]
(let [metadata (some-> metadata db/json)
created-at (or created-at (ct/now))
modified-at (or modified-at created-at)]
(db/exec-one! cfg [sql:insert-file-data
file-id id
created-at
modified-at
type
backend
metadata
data])))
(defn- upsert-in-database
[cfg {:keys [id file-id created-at modified-at type backend data metadata]}]
(let [metadata (some-> metadata db/json)
created-at (or created-at (ct/now))
modified-at (or modified-at created-at)]
(db/exec-one! cfg [sql:upsert-file-data
file-id id
created-at
modified-at
type
backend
metadata
data
modified-at
backend
metadata
data])))
(defmulti ^:private handle-persistence
(fn [_cfg params] (:backend params)))
(defmethod handle-persistence "db"
[_ params]
(dissoc params :metadata))
(defmethod handle-persistence "storage"
[{:keys [::sto/storage] :as cfg}
{:keys [id file-id data] :as params}]
(let [content (sto/content data)
sobject (sto/put-object! storage
{::sto/content content
::sto/touch true
:bucket "file-data"
:content-type "application/octet-stream"
:file-id file-id
:id id})
metadata {:storage-ref-id (:id sobject)}]
(-> params
(assoc :metadata metadata)
(assoc :data nil))))
(defn- process-metadata
[cfg metadata]
(when-let [storage-id (:storage-ref-id metadata)]
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
(sto/touch-object! storage storage-id))))
(defn- default-backend
[backend]
(or backend (cf/get :file-storage-backend "db")))
(def ^:private schema:metadata
[:map {:title "Metadata"}
[:storage-ref-id {:optional true} ::sm/uuid]])
(def decode-metadata-with-schema
(sm/decoder schema:metadata sm/json-transformer))
(defn decode-metadata
[metadata]
(some-> metadata
(db/decode-json-pgobject)
(decode-metadata-with-schema)))
(def ^:private schema:update-params
[:map {:closed true}
[:id ::sm/uuid]
[:type [:enum "main" "snapshot"]]
[:file-id ::sm/uuid]
[:backend {:optional true} [:enum "db" "storage"]]
[:metadata {:optional true} [:maybe schema:metadata]]
[:data {:optional true} bytes?]
[:created-at {:optional true} ::ct/inst]
[:modified-at {:optional true} ::ct/inst]])
(def ^:private check-update-params
(sm/check-fn schema:update-params :hint "invalid params received for update"))
(defn update!
[cfg params & {:keys [throw-if-not-exists?]}]
(let [params (-> (check-update-params params)
(update :backend default-backend))]
(some->> (:metadata params) (process-metadata cfg))
(let [result (handle-persistence cfg params)
result (if throw-if-not-exists?
(create-in-database cfg result)
(upsert-in-database cfg result))]
(-> result db/get-update-count pos?))))
(defn create!
[cfg params]
(update! cfg params :throw-on-conflict? true))
(def ^:private schema:delete-params
[:map {:closed true}
[:id ::sm/uuid]
[:type [:enum "main" "snapshot"]]
[:file-id ::sm/uuid]])
(def check-delete-params
(sm/check-fn schema:delete-params :hint "invalid params received for delete"))
(defn delete!
[cfg params]
(when-let [fdata (db/get* cfg :file-data
(check-delete-params params))]
(some->> (get fdata :metadata)
(decode-metadata)
(process-metadata cfg))
(-> (db/delete! cfg :file-data params)
(db/get-update-count)
(pos?))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SCRIPTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private sql:get-unmigrated-files
"SELECT f.id, f.data, f.created_at, f.modified_at
FROM file AS f
WHERE f.data IS NOT NULL
ORDER BY f.modified_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn migrate-files-to-storage
"Migrate the current existing files to store data in new storage
tables."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(reduce (fn [total {:keys [id data index created-at modified-at]}]
(l/dbg :hint "migrating file" :file-id (str id))
(db/update! conn :file {:data nil} {:id id} ::db/return-keys false)
(db/insert! conn :file-data
{:backend "db"
:metadata nil
:type "main"
:data data
:created-at created-at
:modified-at modified-at
:file-id id
:id id}
{::db/return-keys false})
(inc total))
0
(db/plan conn [sql:get-unmigrated-files chunk-size]
{:fetch-size 1})))))
(def ^:private sql:get-migrated-files
"SELECT f.id, f.data
FROM file_data AS f
WHERE f.data IS NOT NULL
AND f.id = f.file_id
ORDER BY f.id ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn rollback-files-from-storage
"Migrate back to the file table storage."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(reduce (fn [total {:keys [id data]}]
(l/dbg :hint "rollback file" :file-id (str id))
(db/update! conn :file {:data data} {:id id} ::db/return-keys false)
(db/delete! conn :file-data {:id id} ::db/return-keys false)
(inc total))
0
(db/plan conn [sql:get-migrated-files chunk-size]
{:fetch-size 1})))))
(def ^:private sql:get-unmigrated-snapshots
"SELECT fc.id, fc.data, fc.file_id, fc.created_at, fc.updated_at AS modified_at
FROM file_change AS fc
WHERE fc.data IS NOT NULL
AND f.label IS NOT NULL
ORDER BY f.id ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn migrate-snapshots-to-storage
"Migrate the current existing files to store data in new storage
tables."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(reduce (fn [total {:keys [id file-id data created-at modified-at]}]
(l/dbg :hint "migrating snapshot" :file-id (str file-id) :id (str id))
(db/update! conn :file-change {:data nil} {:id id :file-id file-id} ::db/return-keys false)
(db/insert! conn :file-data
{:backend "db"
:metadata nil
:type "snapshot"
:data data
:created-at created-at
:modified-at modified-at
:file-id file-id
:id id}
{::db/return-keys false})
(inc total))
0
(db/plan conn [sql:get-unmigrated-snapshots chunk-size]
{:fetch-size 1})))))
(def ^:private sql:get-migrated-snapshots
"SELECT f.id, f.data, f.file_id
FROM file_data AS f
WHERE f.data IS NOT NULL
AND f.type = 'snapshot'
AND f.id != f.file_id
ORDER BY f.id ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn rollback-snapshots-from-storage
"Migrate back to the file table storage."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(db/exec! conn ["SET statement_timeout = 0"])
(db/exec! conn ["SET idle_in_transaction_session_timeout = 0"])
(reduce (fn [total {:keys [id file-id data]}]
(l/dbg :hint "rollback snapshot" :file-id (str id) :id (str id))
(db/update! conn :file-change {:data data} {:id id :file-id file-id} ::db/return-keys false)
(db/delete! conn :file-data {:id id :file-id file-id} ::db/return-keys false)
(inc total))
0
(db/plan conn [sql:get-migrated-snapshots chunk-size]
{:fetch-size 1})))))

View File

@@ -22,7 +22,8 @@
[app.storage :as sto]
[app.util.blob :as blob]
[app.worker :as wrk]
[cuerdas.core :as str]))
[cuerdas.core :as str]
[promesa.exec :as px]))
(def sql:snapshots
"SELECT c.id,
@@ -40,7 +41,7 @@
c.file_id,
c.data AS legacy_data,
fd.data AS data,
coalesce(fd.backend, 'legacy-db') AS backend,
coalesce(fd.backend, 'db') AS backend,
fd.metadata AS metadata
FROM file_change AS c
LEFT JOIN file_data AS fd ON (fd.file_id = c.file_id
@@ -48,28 +49,11 @@
AND fd.type = 'snapshot')
WHERE c.label IS NOT NULL")
(defn- decode-snapshot
[snapshot]
(some-> snapshot
(-> (d/update-when :metadata fdata/decode-metadata)
(d/update-when :migrations db/decode-pgarray [])
(d/update-when :features db/decode-pgarray #{}))))
(def ^:private sql:get-snapshot
(str sql:snapshots " AND c.file_id = ? AND c.id = ?"))
(def ^:private sql:get-minimal-file
"SELECT f.id,
f.revn,
f.modified_at,
f.deleted_at,
fd.backend AS backend,
fd.metadata AS metadata
FROM file AS f
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
WHERE f.id = ?")
(defn- get-minimal-file
[cfg id & {:as opts}]
(-> (db/get-with-sql cfg [sql:get-minimal-file id] opts)
(d/update-when :metadata fdata/decode-metadata)))
(def ^:private sql:get-snapshots
(str sql:snapshots " AND c.file_id = ?"))
(def ^:private sql:get-snapshot-without-data
(str "WITH snapshots AS (" sql:snapshots ")"
@@ -81,46 +65,48 @@
c.deleted_at,
c.profile_id,
c.created_by,
c.locked_by,
c.features,
c.metadata,
c.migrations,
c.version,
c.file_id
FROM snapshots AS c
WHERE c.id = ?
AND CASE WHEN c.created_by = 'user'
THEN c.deleted_at IS NULL
WHEN c.created_by = 'system'
THEN c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz
END"))
WHERE c.id = ?"))
(defn- decode-snapshot
[snapshot]
(some-> snapshot (-> (d/update-when :metadata fdata/decode-metadata)
(d/update-when :migrations db/decode-pgarray [])
(d/update-when :features db/decode-pgarray #{}))))
(def sql:get-minimal-file
"SELECT f.id,
f.revn,
f.modified_at,
f.deleted_at,
fd.backend AS backend,
fd.metadata AS metadata
FROM file AS f
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
WHERE f.id = ?")
(defn get-minimal-file
[cfg id & {:as opts}]
(-> (db/get-with-sql cfg [sql:get-minimal-file id] opts)
(d/update-when :metadata fdata/decode-metadata)))
(defn get-minimal-snapshot
[cfg snapshot-id]
(let [now (ct/now)]
(-> (db/get-with-sql cfg [sql:get-snapshot-without-data snapshot-id now]
{::db/remove-deleted false})
(decode-snapshot))))
(-> (db/get-with-sql cfg [sql:get-snapshot-without-data snapshot-id])
(decode-snapshot)))
(def ^:private sql:get-snapshot
(str sql:snapshots
" AND c.file_id = ?
AND c.id = ?
AND CASE WHEN c.created_by = 'user'
THEN (c.deleted_at IS NULL)
WHEN c.created_by = 'system'
THEN (c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz)
END"))
(defn- get-snapshot
(defn get-snapshot
"Get snapshot with decoded data"
[cfg file-id snapshot-id]
(let [now (ct/now)]
(->> (db/get-with-sql cfg [sql:get-snapshot file-id snapshot-id now]
{::db/remove-deleted false})
(decode-snapshot)
(fdata/resolve-file-data cfg)
(fdata/decode-file-data cfg))))
(->> (db/get-with-sql cfg [sql:get-snapshot file-id snapshot-id])
(decode-snapshot)
(fdata/resolve-file-data cfg)
(fdata/decode-file-data cfg)))
(def ^:private sql:get-visible-snapshots
(str "WITH "
@@ -128,30 +114,22 @@
"snapshots2 AS (
SELECT c.id,
c.label,
c.revn,
c.version,
c.created_at,
c.modified_at,
c.created_by,
c.locked_by,
c.profile_id,
c.deleted_at
c.profile_id
FROM snapshots1 AS c
WHERE c.file_id = ?
AND (c.deleted_at IS NULL OR deleted_at > now())
), snapshots3 AS (
(SELECT * FROM snapshots2
WHERE created_by = 'system'
AND (deleted_at IS NULL OR
deleted_at >= ?::timestamptz)
LIMIT 500)
(SELECT * FROM snapshots2 WHERE created_by = 'system' LIMIT 1000)
UNION ALL
(SELECT * FROM snapshots2
WHERE created_by = 'user'
AND deleted_at IS NULL
LIMIT 500)
(SELECT * FROM snapshots2 WHERE created_by != 'system' LIMIT 1000)
)
SELECT * FROM snapshots3
ORDER BY created_at DESC"))
ORDER BY created_at DESC;"))
(defn get-visible-snapshots
"Return a list of snapshots fecheable from the API, it has a limited
@@ -159,9 +137,8 @@
snapshots. It return a ordered vector by the snapshot date of
creation."
[cfg file-id]
(let [now (ct/now)]
(->> (db/exec! cfg [sql:get-visible-snapshots file-id now])
(mapv decode-snapshot))))
(->> (db/exec! cfg [sql:get-visible-snapshots file-id])
(mapv decode-snapshot)))
(def ^:private schema:decoded-file
[:map {:title "DecodedFile"}
@@ -188,9 +165,19 @@
[:modified-at ::ct/inst]
[:created-at ::ct/inst]])
(def ^:private schema:snapshot-params
[:map {:title "SnapshotParams"}
[:id ::sm/uuid]
[:file-id ::sm/uuid]
[:label ::sm/text]
[:modified-at {:optional true} ::ct/inst]])
(def ^:private check-snapshot
(sm/check-fn schema:snapshot))
(def ^:private check-snapshot-params
(sm/check-fn schema:snapshot-params))
(def ^:private check-decoded-file
(sm/check-fn schema:decoded-file))
@@ -202,62 +189,49 @@
(str/rtrim "Z"))]
(str "snapshot-" ts)))
(def ^:private schema:create-params
[:map {:title "SnapshotCreateParams"}
[:profile-id ::sm/uuid]
[:created-by {:optional true} [:enum "user" "system"]]
[:label {:optional true} ::sm/text]
[:session-id {:optional true} ::sm/uuid]
[:modified-at {:optional true} ::ct/inst]
[:deleted-at {:optional true} ::ct/inst]])
(def ^:private check-create-params
(sm/check-fn schema:create-params))
(defn create!
"Create a file snapshot; expects a non-encoded file"
[cfg file & {:as params}]
(let [{:keys [label created-by deleted-at profile-id session-id]}
(check-create-params params)
"Create a file snapshot; expects a non-encoded file."
[cfg file & {:keys [label created-by deleted-at profile-id session-id]
:or {deleted-at :default
created-by "system"}}]
file
(check-decoded-file file)
(let [file (check-decoded-file file)
created-by
(or created-by "system")
snapshot-id
(uuid/next)
snapshot-id (uuid/next)
created-at (ct/now)
deleted-at (cond
(= deleted-at :default)
(ct/plus (ct/now) (cf/get-deletion-delay))
created-at
(ct/now)
(ct/inst? deleted-at)
deleted-at
deleted-at
(or deleted-at
(if (= created-by "system")
(ct/in-future (cf/get-deletion-delay))
nil))
:else
nil)
label
(or label (generate-snapshot-label))
label (or label (generate-snapshot-label))
data (px/invoke! (::wrk/executor cfg) #(blob/encode (:data file)))
features (:features file)
migrations (:migrations file)
snapshot
(cond-> {:id snapshot-id
:revn (:revn file)
:version (:version file)
:file-id (:id file)
:features (:features file)
:migrations (:migrations file)
:label label
:created-at created-at
:modified-at created-at
:created-by created-by}
snapshot {:id snapshot-id
:revn (:revn file)
:version (:version file)
:file-id (:id file)
:features features
:migrations migrations
:label label
:created-at created-at
:modified-at created-at
:created-by created-by}
deleted-at
(assoc :deleted-at deleted-at)
snapshot (cond-> snapshot
deleted-at
(assoc :deleted-at deleted-at)
:always
(check-snapshot))]
:always
(check-snapshot))]
(db/insert! cfg :file-change
(-> snapshot
@@ -269,43 +243,25 @@
(dissoc :modified-at))
{::db/return-keys false})
(fdata/upsert! cfg
(fdata/create! cfg
{:id snapshot-id
:file-id (:id file)
:type "snapshot"
:data (blob/encode (:data file))
:data data
:created-at created-at
:deleted-at deleted-at})
:modified-at created-at})
snapshot))
(def ^:private schema:update-params
[:map {:title "SnapshotUpdateParams"}
[:id ::sm/uuid]
[:file-id ::sm/uuid]
[:label ::sm/text]
[:modified-at {:optional true} ::ct/inst]])
(def ^:private check-update-params
(sm/check-fn schema:update-params))
(defn update!
[cfg params]
(let [{:keys [id file-id label modified-at]}
(check-update-params params)
(check-snapshot-params params)
modified-at
(or modified-at (ct/now))]
(db/update! cfg :file-data
{:deleted-at nil
:modified-at modified-at}
{:file-id file-id
:id id
:type "snapshot"}
{::db/return-keys false})
(-> (db/update! cfg :file-change
{:label label
:created-by "user"
@@ -370,7 +326,7 @@
;; In the same way, on reseting the file data, we need to restore
;; the applied migrations on the moment of taking the snapshot
(bfc/update-file! cfg file ::bfc/reset-migrations? true)
(bfc/update-file! cfg file ::bfc/reset-migrations true)
;; FIXME: this should be separated functions, we should not have
;; inline sql here.
@@ -394,47 +350,17 @@
vern)))
(defn delete!
[cfg & {:keys [id file-id deleted-at]}]
(assert (uuid? id) "missing id")
(assert (uuid? file-id) "missing file-id")
(assert (ct/inst? deleted-at) "missing deleted-at")
[cfg {:keys [id file-id]}]
(let [deleted-at (ct/now)]
(db/update! cfg :file-change
{:deleted-at deleted-at}
{:id id :file-id file-id}
{::db/return-keys false})
true))
(wrk/submit! {::db/conn (db/get-connection cfg)
::wrk/task :delete-object
::wrk/params {:object :snapshot
:deleted-at deleted-at
:file-id file-id
:id id}})
(db/update! cfg :file-change
{:deleted-at deleted-at}
{:id id :file-id file-id}
{::db/return-keys false})
true)
(def ^:private sql:get-snapshots
(str sql:snapshots " AND c.file_id = ?"))
(defn lock-by!
[conn id profile-id]
(-> (db/update! conn :file-change
{:locked-by profile-id}
{:id id}
{::db/return-keys false})
(db/get-update-count)
(pos?)))
(defn unlock!
[conn id]
(-> (db/update! conn :file-change
{:locked-by nil}
{:id id}
{::db/return-keys false})
(db/get-update-count)
(pos?)))
(defn reduce-snapshots
"Process the file snapshots using efficient reduction; the file
reduction comes with all snapshots, including maked as deleted"
"Process the file snapshots using efficient reduction."
[cfg file-id xform f init]
(let [conn (db/get-connection cfg)
xform (comp
@@ -444,3 +370,4 @@
(->> (db/plan conn [sql:get-snapshots file-id] {:fetch-size 1})
(transduce xform f init))))

View File

@@ -17,9 +17,7 @@
[app.http.awsns :as-alias awsns]
[app.http.debug :as-alias debug]
[app.http.errors :as errors]
[app.http.management :as mgmt]
[app.http.middleware :as mw]
[app.http.security :as sec]
[app.http.session :as session]
[app.http.websocket :as-alias ws]
[app.main :as-alias main]
@@ -27,7 +25,9 @@
[app.rpc :as-alias rpc]
[app.rpc.doc :as-alias rpc.doc]
[app.setup :as-alias setup]
[app.worker :as wrk]
[integrant.core :as ig]
[promesa.exec :as px]
[reitit.core :as r]
[reitit.middleware :as rr]
[yetti.adapter :as yt]
@@ -54,8 +54,6 @@
[:map
[::port ::sm/int]
[::host ::sm/text]
[::io-threads {:optional true} ::sm/int]
[::max-worker-threads {:optional true} ::sm/int]
[::max-body-size {:optional true} ::sm/int]
[::max-multipart-body-size {:optional true} ::sm/int]
[::router {:optional true} [:fn r/router?]]
@@ -66,41 +64,31 @@
(assert (sm/check schema:server-params params)))
(defmethod ig/init-key ::server
[_ {:keys [::handler ::router ::host ::port ::mtx/metrics] :as cfg}]
[_ {:keys [::handler ::router ::host ::port ::wrk/executor] :as cfg}]
(l/info :hint "starting http server" :port port :host host)
(let [on-dispatch
(fn [_ start-at-ns]
(let [timing (- (System/nanoTime) start-at-ns)
timing (int (/ timing 1000000))]
(mtx/run! metrics
:id :http-server-dispatch-timing
:val timing)))
(let [options {:http/port port
:http/host host
:http/max-body-size (::max-body-size cfg)
:http/max-multipart-body-size (::max-multipart-body-size cfg)
:xnio/direct-buffers false
:xnio/io-threads (or (::io-threads cfg)
(max 3 (px/get-available-processors)))
:xnio/dispatch executor
:ring/compat :ring2
:socket/backlog 4069}
options
{:http/port port
:http/host host
:http/max-body-size (::max-body-size cfg)
:http/max-multipart-body-size (::max-multipart-body-size cfg)
:xnio/direct-buffers false
:xnio/io-threads (::io-threads cfg)
:xnio/max-worker-threads (::max-worker-threads cfg)
:ring/compat :ring2
:events/on-dispatch on-dispatch
:socket/backlog 4069}
handler (cond
(some? router)
(router-handler router)
handler
(cond
(some? router)
(router-handler router)
(some? handler)
handler
(some? handler)
handler
:else
(throw (UnsupportedOperationException. "handler or router are required")))
:else
(throw (UnsupportedOperationException. "handler or router are required")))
server
(yt/server handler (d/without-nils options))]
options (d/without-nils options)
server (yt/server handler options)]
(assoc cfg ::server (yt/start! server))))
@@ -155,7 +143,6 @@
[::debug/routes schema:routes]
[::mtx/routes schema:routes]
[::awsns/routes schema:routes]
[::mgmt/routes schema:routes]
::session/manager
::setup/props
::db/pool])
@@ -168,7 +155,6 @@
[_ cfg]
(rr/router
[["" {:middleware [[mw/server-timing]
[sec/sec-fetch-metadata]
[mw/params]
[mw/format-response]
[session/soft-auth cfg]
@@ -184,13 +170,9 @@
["/webhooks"
(::awsns/routes cfg)]
["/management"
(::mgmt/routes cfg)]
(::ws/routes cfg)
["/api" {:middleware [[mw/cors]
[sec/client-header-check]]}
["/api" {:middleware [[mw/cors]]}
(::oidc/routes cfg)
(::rpc.doc/routes cfg)
(::rpc/routes cfg)]]]))

View File

@@ -14,18 +14,18 @@
[app.tokens :as tokens]
[yetti.request :as yreq]))
(def header-re #"(?i)^Token\s+(.*)")
(def header-re #"^Token\s+(.*)")
(defn get-token
(defn- get-token
[request]
(some->> (yreq/get-header request "authorization")
(re-matches header-re)
(second)))
(defn- decode-token
[cfg token]
[props token]
(when token
(tokens/verify cfg {:token token :iss "access-token"})))
(tokens/verify props {:token token :iss "access-token"})))
(def sql:get-token-data
"SELECT perms, profile_id, expires_at
@@ -43,11 +43,11 @@
(defn- wrap-soft-auth
"Soft Authentication, will be executed synchronously on the undertow
worker thread."
[handler cfg]
[handler {:keys [::setup/props]}]
(letfn [(handle-request [request]
(try
(let [token (get-token request)
claims (decode-token cfg token)]
claims (decode-token props token)]
(cond-> request
(map? claims)
(assoc ::id (:tid claims))))

View File

@@ -17,9 +17,11 @@
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.worker :as-alias wrk]
[clojure.data.json :as j]
[cuerdas.core :as str]
[integrant.core :as ig]
[promesa.exec :as px]
[yetti.request :as yreq]
[yetti.response :as-alias yres]))
@@ -38,8 +40,8 @@
[_ cfg]
(letfn [(handler [request]
(let [data (-> request yreq/body slurp)]
(handle-request cfg data)
{::yres/status 200}))]
(px/run! :vthread (partial handle-request cfg data)))
{::yres/status 200})]
["/sns" {:handler handler
:allowed-methods #{:post}}]))
@@ -107,7 +109,7 @@
[cfg headers]
(let [tdata (get headers "x-penpot-data")]
(when-not (str/empty? tdata)
(let [result (tokens/verify cfg {:token tdata :iss :profile-identity})]
(let [result (tokens/verify (::setup/props cfg) {:token tdata :iss :profile-identity})]
(:profile-id result)))))
(defn- parse-notification

View File

@@ -27,7 +27,6 @@
[app.rpc.commands.profile :as profile]
[app.rpc.commands.teams :as teams]
[app.setup :as-alias setup]
[app.setup.clock :as clock]
[app.srepl.main :as srepl]
[app.storage :as-alias sto]
[app.storage.tmp :as tmp]
@@ -50,17 +49,11 @@
(defn index-handler
[_cfg _request]
(let [{:keys [clock offset]} @clock/current]
{::yres/status 200
::yres/headers {"content-type" "text/html"}
::yres/body (-> (io/resource "app/templates/debug.tmpl")
(tmpl/render {:version (:full cf/version)
:current-clock (str clock)
:current-offset (if offset
(ct/format-duration offset)
"NO OFFSET")
:current-time (ct/format-inst (ct/now) :http)
:supported-features cfeat/supported-features}))}))
{::yres/status 200
::yres/headers {"content-type" "text/html"}
::yres/body (-> (io/resource "app/templates/debug.tmpl")
(tmpl/render {:version (:full cf/version)
:supported-features cfeat/supported-features}))})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FILE CHANGES
@@ -397,6 +390,34 @@
::yres/headers {"content-type" "text/plain"}
::yres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
(defn- reset-file-version
[cfg {:keys [params] :as request}]
(let [file-id (some-> params :file-id d/parse-uuid)
version (some-> params :version d/parse-integer)]
(when-not (contains? params :force)
(ex/raise :type :validation
:code :missing-force
:hint "missing force checkbox"))
(when (nil? file-id)
(ex/raise :type :validation
:code :invalid-file-id
:hint "provided invalid file id"))
(when (nil? version)
(ex/raise :type :validation
:code :invalid-version
:hint "provided invalid version"))
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
{::yres/status 200
::yres/headers {"content-type" "text/plain"}
::yres/body "OK"}))
(defn- handle-team-features
[cfg {:keys [params] :as request}]
(let [team-id (some-> params :team-id d/parse-uuid)
@@ -441,24 +462,6 @@
::yres/headers {"content-type" "text/plain"}
::yres/body "OK"}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; VIRTUAL CLOCK
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- set-virtual-clock
[_ {:keys [params] :as request}]
(let [offset (some-> params :offset str/trim not-empty ct/duration)
reset? (contains? params :reset)]
(if (= "production" (cf/get :tenant))
{::yres/status 501
::yres/body "OPERATION NOT ALLOWED"}
(do
(if (or reset? (zero? (inst-ms offset)))
(clock/set-offset! nil)
(clock/set-offset! offset))
{::yres/status 302
::yres/headers {"location" "/dbg"}}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OTHER SMALL VIEWS/HANDLERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -545,10 +548,10 @@
["/error/:id" {:handler (partial error-handler cfg)}]
["/error" {:handler (partial error-list-handler cfg)}]
["/actions" {:middleware [[errors]]}
["/set-virtual-clock"
{:handler (partial set-virtual-clock cfg)}]
["/resend-email-verification"
{:handler (partial resend-email-notification cfg)}]
["/reset-file-version"
{:handler (partial reset-file-version cfg)}]
["/handle-team-features"
{:handler (partial handle-team-features cfg)}]
["/file-export" {:handler (partial export-handler cfg)}]

View File

@@ -25,14 +25,15 @@
(let [claims (-> {}
(into (::session/token-claims request))
(into (::actoken/token-claims request)))]
(-> (cf/logging-context)
(assoc :request/path (:path request))
(assoc :request/method (:method request))
(assoc :request/params (:params request))
(assoc :request/user-agent (yreq/get-header request "user-agent"))
(assoc :request/ip-addr (inet/parse-request request))
(assoc :request/profile-id (:uid claims))
(assoc :version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")))))
{:request/path (:path request)
:request/method (:method request)
:request/params (:params request)
:request/user-agent (yreq/get-header request "user-agent")
:request/ip-addr (inet/parse-request request)
:request/profile-id (:uid claims)
:version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")
:version/backend (:full cf/version)}))
(defmulti handle-error
(fn [cause _ _]
@@ -60,7 +61,8 @@
::yres/body data}
(binding [l/*context* (request->context request)]
(l/wrn :hint "restriction error" :cause err)
(l/err :hint "restriction error"
:cause err)
{::yres/status 400
::yres/body data}))))

View File

@@ -1,250 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.http.management
"Internal mangement HTTP API"
(:require
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.schema.generators :as sg]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.http.access-token :refer [get-token]]
[app.main :as-alias main]
[app.rpc.commands.profile :as cmd.profile]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.worker :as-alias wrk]
[integrant.core :as ig]
[yetti.response :as-alias yres]))
;; ---- ROUTES
(declare ^:private authenticate)
(declare ^:private get-customer)
(declare ^:private update-customer)
(defmethod ig/assert-key ::routes
[_ params]
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
(def ^:private auth
{:name ::auth
:compile
(fn [_ _]
(fn [handler shared-key]
(if shared-key
(fn [request]
(let [token (get-token request)]
(if (= token shared-key)
(handler request)
{::yres/status 403})))
(fn [_ _]
{::yres/status 403}))))})
(def ^:private default-system
{:name ::default-system
:compile
(fn [_ _]
(fn [handler cfg]
(fn [request]
(handler cfg request))))})
(def ^:private transaction
{:name ::transaction
:compile
(fn [data _]
(when (:transaction data)
(fn [handler]
(fn [cfg request]
(db/tx-run! cfg handler request)))))})
(defmethod ig/init-key ::routes
[_ cfg]
["" {:middleware [[auth (cf/get :management-api-shared-key)]
[default-system cfg]
[transaction]]}
["/authenticate"
{:handler authenticate
:allowed-methods #{:post}}]
["/get-customer"
{:handler get-customer
:transaction true
:allowed-methods #{:post}}]
["/update-customer"
{:handler update-customer
:allowed-methods #{:post}
:transaction true}]])
;; ---- HELPERS
(defn- coercer
[schema & {:as opts}]
(let [decode-fn (sm/decoder schema sm/json-transformer)
check-fn (sm/check-fn schema opts)]
(fn [data]
(-> data decode-fn check-fn))))
;; ---- API: AUTHENTICATE
(defn- authenticate
[cfg request]
(let [token (-> request :params :token)
result (tokens/verify cfg {:token token :iss "authentication"})]
{::yres/status 200
::yres/body result}))
;; ---- API: GET-CUSTOMER
(def ^:private schema:get-customer
[:map [:id ::sm/uuid]])
(def ^:private coerce-get-customer-params
(coercer schema:get-customer
:type :validation
:hint "invalid data provided for `get-customer` rpc call"))
(def ^:private sql:get-customer-slots
"WITH teams AS (
SELECT tpr.team_id AS id,
tpr.profile_id AS profile_id
FROM team_profile_rel AS tpr
WHERE tpr.is_owner IS true
AND tpr.profile_id = ?
), teams_with_slots AS (
SELECT tpr.team_id AS id,
count(*) AS total
FROM team_profile_rel AS tpr
WHERE tpr.team_id IN (SELECT id FROM teams)
AND tpr.can_edit IS true
GROUP BY 1
ORDER BY 2
)
SELECT max(total) AS total FROM teams_with_slots;")
(defn- get-customer-slots
[cfg profile-id]
(let [result (db/exec-one! cfg [sql:get-customer-slots profile-id])]
(:total result)))
(defn- get-customer
[cfg request]
(let [profile-id (-> request :params coerce-get-customer-params :id)
profile (cmd.profile/get-profile cfg profile-id)
result {:id (get profile :id)
:name (get profile :fullname)
:email (get profile :email)
:num-editors (get-customer-slots cfg profile-id)
:subscription (-> profile :props :subscription)}]
{::yres/status 200
::yres/body result}))
;; ---- API: UPDATE-CUSTOMER
(def ^:private schema:timestamp
(sm/type-schema
{:type ::timestamp
:pred ct/inst?
:type-properties
{:title "inst"
:description "The same as :app.common.time/inst but encodes to epoch"
:error/message "should be an instant"
:gen/gen (->> (sg/small-int)
(sg/fmap (fn [v] (ct/inst v))))
:decode/string ct/inst
:encode/string inst-ms
:decode/json ct/inst
:encode/json inst-ms}}))
(def ^:private schema:subscription
[:map {:title "Subscription"}
[:id ::sm/text]
[:customer-id ::sm/text]
[:type [:enum
"unlimited"
"professional"
"enterprise"]]
[:status [:enum
"active"
"canceled"
"incomplete"
"incomplete_expired"
"past_due"
"paused"
"trialing"
"unpaid"]]
[:billing-period [:enum
"month"
"day"
"week"
"year"]]
[:quantity :int]
[:description [:maybe ::sm/text]]
[:created-at schema:timestamp]
[:start-date [:maybe schema:timestamp]]
[:ended-at [:maybe schema:timestamp]]
[:trial-end [:maybe schema:timestamp]]
[:trial-start [:maybe schema:timestamp]]
[:cancel-at [:maybe schema:timestamp]]
[:canceled-at [:maybe schema:timestamp]]
[:current-period-end [:maybe schema:timestamp]]
[:current-period-start [:maybe schema:timestamp]]
[:cancel-at-period-end :boolean]
[:cancellation-details
[:map {:title "CancellationDetails"}
[:comment [:maybe ::sm/text]]
[:reason [:maybe ::sm/text]]
[:feedback [:maybe
[:enum
"customer_service"
"low_quality"
"missing_feature"
"other"
"switched_service"
"too_complex"
"too_expensive"
"unused"]]]]]])
(def ^:private schema:update-customer
[:map
[:id ::sm/uuid]
[:subscription [:maybe schema:subscription]]])
(def ^:private coerce-update-customer-params
(coercer schema:update-customer
:type :validation
:hint "invalid data provided for `update-customer` rpc call"))
(defn- update-customer
[cfg request]
(let [{:keys [id subscription]}
(-> request :params coerce-update-customer-params)
{:keys [props] :as profile}
(cmd.profile/get-profile cfg id ::db/for-update true)
props
(assoc props :subscription subscription)]
(l/dbg :hint "update customer"
:profile-id (str id)
:subscription-type (get subscription :type)
:subscription-status (get subscription :status)
:subscription-quantity (get subscription :quantity))
(db/update! cfg :profile
{:props (db/tjson props)}
{:id id}
{::db/return-keys false})
{::yres/status 201
::yres/body nil}))

View File

@@ -1,55 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.http.security
"Additional security layer middlewares"
(:require
[app.config :as cf]
[yetti.request :as yreq]
[yetti.response :as yres]))
(def ^:private safe-methods
#{:get :head :options})
(defn- wrap-sec-fetch-metadata
"Sec-Fetch metadata security layer middleware"
[handler]
(fn [request]
(let [site (yreq/get-header request "sec-fetch-site")]
(cond
(= site "same-origin")
(handler request)
(or (= site "same-site")
(= site "cross-site"))
(if (contains? safe-methods (yreq/method request))
(handler request)
{::yres/status 403})
:else
(handler request)))))
(def sec-fetch-metadata
{:name ::sec-fetch-metadata
:compile (fn [_ _]
(when (contains? cf/flags :sec-fetch-metadata-middleware)
wrap-sec-fetch-metadata))})
(defn- wrap-client-header-check
"Check for a penpot custom header to be present as additional CSRF
protection"
[handler]
(fn [request]
(let [client (yreq/get-header request "x-client")]
(if (some? client)
(handler request)
{::yres/status 403}))))
(def client-header-check
{:name ::client-header-check
:compile (fn [_ _]
(when (contains? cf/flags :client-header-check-middleware)
wrap-client-header-check))})

View File

@@ -11,6 +11,7 @@
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
@@ -71,7 +72,7 @@
(sm/validator schema:params))
(defn- prepare-session-params
[params key]
[key params]
(assert (string? key) "expected key to be a string")
(assert (not (str/blank? key)) "expected key to be not empty")
(assert (valid-params? params) "expected valid params")
@@ -89,9 +90,7 @@
(db/exec-one! pool (sql/select :http-session {:id token})))
(write! [_ key params]
(let [params (-> params
(assoc :created-at (ct/now))
(prepare-session-params key))]
(let [params (prepare-session-params key params)]
(db/insert! pool :http-session params)
params))
@@ -114,9 +113,7 @@
(get @cache token))
(write! [_ key params]
(let [params (-> params
(assoc :created-at (ct/now))
(prepare-session-params key))]
(let [params (prepare-session-params key params)]
(swap! cache assoc key params)
params))
@@ -147,23 +144,27 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare ^:private assign-auth-token-cookie)
(declare ^:private assign-auth-data-cookie)
(declare ^:private clear-auth-token-cookie)
(declare ^:private clear-auth-data-cookie)
(declare ^:private gen-token)
(defn create-fn
[{:keys [::manager] :as cfg} profile-id]
[{:keys [::manager ::setup/props]} profile-id]
(assert (manager? manager) "expected valid session manager")
(assert (uuid? profile-id) "expected valid uuid for profile-id")
(fn [request response]
(let [uagent (yreq/get-header request "user-agent")
params {:profile-id profile-id
:user-agent uagent}
token (gen-token cfg params)
:user-agent uagent
:created-at (ct/now)}
token (gen-token props params)
session (write! manager token params)]
(l/trc :hint "create" :profile-id (str profile-id))
(l/trace :hint "create" :profile-id (str profile-id))
(-> response
(assign-auth-token-cookie session)))))
(assign-auth-token-cookie session)
(assign-auth-data-cookie session)))))
(defn delete-fn
[{:keys [::manager]}]
@@ -171,22 +172,23 @@
(fn [request response]
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (yreq/get-cookie request cname)]
(l/trc :hint "delete" :profile-id (:profile-id request))
(l/trace :hint "delete" :profile-id (:profile-id request))
(some->> (:value cookie) (delete! manager))
(-> response
(assoc :status 204)
(assoc :body nil)
(clear-auth-token-cookie)))))
(clear-auth-token-cookie)
(clear-auth-data-cookie)))))
(defn- gen-token
[cfg {:keys [profile-id created-at]}]
(tokens/generate cfg {:iss "authentication"
:iat created-at
:uid profile-id}))
[props {:keys [profile-id created-at]}]
(tokens/generate props {:iss "authentication"
:iat created-at
:uid profile-id}))
(defn- decode-token
[cfg token]
[props token]
(when token
(tokens/verify cfg {:token token :iss "authentication"})))
(tokens/verify props {:token token :iss "authentication"})))
(defn- get-token
[request]
@@ -206,18 +208,18 @@
(neg? (compare default-renewal-max-age elapsed)))))
(defn- wrap-soft-auth
[handler {:keys [::manager] :as cfg}]
[handler {:keys [::manager ::setup/props]}]
(assert (manager? manager) "expected valid session manager")
(letfn [(handle-request [request]
(try
(let [token (get-token request)
claims (decode-token cfg token)]
claims (decode-token props token)]
(cond-> request
(map? claims)
(-> (assoc ::token-claims claims)
(assoc ::token token))))
(catch Throwable cause
(l/trc :hint "exception on decoding malformed token" :cause cause)
(l/trace :hint "exception on decoding malformed token" :cause cause)
request)))]
(fn [request]
@@ -237,7 +239,8 @@
(if (renew-session? session)
(let [session (update! manager session)]
(-> response
(assign-auth-token-cookie session)))
(assign-auth-token-cookie session)
(assign-auth-data-cookie session)))
response))))
(def soft-auth
@@ -253,7 +256,7 @@
(defn- assign-auth-token-cookie
[response {token :id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
created-at updated-at
created-at (or updated-at (ct/now))
renewal (ct/plus created-at default-renewal-max-age)
expires (ct/plus created-at max-age)
secure? (contains? cf/flags :secure-session-cookies)
@@ -270,15 +273,53 @@
:secure secure?}]
(update response :cookies assoc name cookie)))
(defn- assign-auth-data-cookie
[response {profile-id :profile-id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
domain (cf/get :auth-data-cookie-domain)
cname default-auth-data-cookie-name
created-at (or updated-at (ct/now))
renewal (ct/plus created-at default-renewal-max-age)
expires (ct/plus created-at max-age)
comment (str "Renewal at: " (ct/format-inst renewal :rfc1123))
secure? (contains? cf/flags :secure-session-cookies)
strict? (contains? cf/flags :strict-session-cookies)
cors? (contains? cf/flags :cors)
cookie {:domain domain
:expires expires
:path "/"
:comment comment
:value (u/map->query-string {:profile-id profile-id})
:same-site (if cors? :none (if strict? :strict :lax))
:secure secure?}]
(cond-> response
(string? domain)
(update :cookies assoc cname cookie))))
(defn- clear-auth-token-cookie
[response]
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
(update response :cookies assoc cname {:path "/" :value "" :max-age 0})))
(defn- clear-auth-data-cookie
[response]
(let [cname default-auth-data-cookie-name
domain (cf/get :auth-data-cookie-domain)]
(cond-> response
(string? domain)
(update :cookies assoc cname {:domain domain :path "/" :value "" :max-age 0}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TASK: SESSION GC
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FIXME: MOVE
(defmethod ig/assert-key ::tasks/gc
[_ params]
(assert (db/pool? (::db/pool params)) "expected valid database pool")
@@ -291,23 +332,22 @@
(def ^:private
sql:delete-expired
"DELETE FROM http_session
WHERE updated_at < ?::timestamptz
"delete from http_session
where updated_at < now() - ?::interval
or (updated_at is null and
created_at < ?::timestamptz)")
created_at < now() - ?::interval)")
(defn- collect-expired-tasks
[{:keys [::db/conn ::tasks/max-age]}]
(let [threshold (ct/minus (ct/now) max-age)
result (-> (db/exec-one! conn [sql:delete-expired threshold threshold])
(db/get-update-count))]
(l/dbg :task "gc"
:hint "clean http sessions"
:deleted result)
(let [interval (db/interval max-age)
result (db/exec-one! conn [sql:delete-expired interval interval])
result (:next.jdbc/update-count result)]
(l/debug :task "gc"
:hint "clean http sessions"
:deleted result)
result))
(defmethod ig/init-key ::tasks/gc
[_ {:keys [::tasks/max-age] :as cfg}]
(l/dbg :hint "initializing session gc task" :max-age max-age)
(fn [_]
(db/tx-run! cfg collect-expired-tasks)))
(l/debug :hint "initializing session gc task" :max-age max-age)
(fn [_] (db/tx-run! cfg collect-expired-tasks)))

View File

@@ -33,7 +33,7 @@
(println "event:" (d/name name))
(println "data:" (t/encode-str data {:type :json-verbose}))
(println))]
(.getBytes ^String data "UTF-8"))
(.getBytes data "UTF-8"))
(catch Throwable cause
(l/err :hint "unexpected error on encoding value on sse stream"
:cause cause)
@@ -44,8 +44,7 @@
(def default-headers
{"Content-Type" "text/event-stream;charset=UTF-8"
"Cache-Control" "no-cache, no-store, max-age=0, must-revalidate"
"Pragma" "no-cache"
"X-Accel-Buffering" "no"})
"Pragma" "no-cache"})
(defn response
[handler & {:keys [buf] :or {buf 32} :as opts}]
@@ -55,7 +54,7 @@
::yres/body (yres/stream-body
(fn [_ output]
(let [channel (sp/chan :buf buf :xf (keep encode))
listener (events/spawn-listener
listener (events/start-listener
channel
(partial write! output)
(partial pu/close! output))]

View File

@@ -9,6 +9,7 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -52,8 +53,9 @@
(defn- send!
[{:keys [::uri] :as cfg} events]
(let [token (tokens/generate cfg
(let [token (tokens/generate (::setup/props cfg)
{:iss "authentication"
:iat (ct/now)
:uid uuid/zero})
body (t/encode {:events events})
headers {"content-type" "application/transit+json"

View File

@@ -20,7 +20,6 @@
[app.http.awsns :as http.awsns]
[app.http.client :as-alias http.client]
[app.http.debug :as-alias http.debug]
[app.http.management :as mgmt]
[app.http.session :as-alias session]
[app.http.session.tasks :as-alias session.tasks]
[app.http.websocket :as http.ws]
@@ -42,7 +41,6 @@
[app.svgo :as-alias svgo]
[app.util.cron]
[app.worker :as-alias wrk]
[app.worker.executor]
[clojure.test :as test]
[clojure.tools.namespace.repl :as repl]
[cuerdas.core :as str]
@@ -149,11 +147,23 @@
::mdef/labels []
::mdef/type :histogram}
:http-server-dispatch-timing
{::mdef/name "penpot_http_server_dispatch_timing"
::mdef/help "Histogram of dispatch handler"
::mdef/labels []
::mdef/type :histogram}})
:executors-active-threads
{::mdef/name "penpot_executors_active_threads"
::mdef/help "Current number of threads available in the executor service."
::mdef/labels ["name"]
::mdef/type :gauge}
:executors-completed-tasks
{::mdef/name "penpot_executors_completed_tasks_total"
::mdef/help "Approximate number of completed tasks by the executor."
::mdef/labels ["name"]
::mdef/type :counter}
:executors-running-threads
{::mdef/name "penpot_executors_running_threads"
::mdef/help "Current number of threads with state RUNNING."
::mdef/labels ["name"]
::mdef/type :gauge}})
(def system-config
{::db/pool
@@ -165,12 +175,14 @@
::db/max-size (cf/get :database-max-pool-size 60)
::mtx/metrics (ig/ref ::mtx/metrics)}
;; Default netty IO pool (shared between several services)
::wrk/netty-io-executor
{:threads (cf/get :netty-io-threads)}
;; Default thread pool for IO operations
::wrk/executor
{}
::wrk/netty-executor
{:threads (cf/get :executor-threads)}
::wrk/monitor
{::mtx/metrics (ig/ref ::mtx/metrics)
::wrk/executor (ig/ref ::wrk/executor)
::wrk/name "default"}
:app.migrations/migrations
{::db/pool (ig/ref ::db/pool)}
@@ -181,27 +193,17 @@
::mtx/routes
{::mtx/metrics (ig/ref ::mtx/metrics)}
::rds/client
{::rds/uri
(cf/get :redis-uri)
::wrk/netty-executor
(ig/ref ::wrk/netty-executor)
::wrk/netty-io-executor
(ig/ref ::wrk/netty-io-executor)}
::rds/pool
{::rds/client (ig/ref ::rds/client)
::mtx/metrics (ig/ref ::mtx/metrics)}
::rds/redis
{::rds/uri (cf/get :redis-uri)
::mtx/metrics (ig/ref ::mtx/metrics)
::wrk/executor (ig/ref ::wrk/executor)}
::mbus/msgbus
{::wrk/executor (ig/ref ::wrk/netty-executor)
::rds/client (ig/ref ::rds/client)
::mtx/metrics (ig/ref ::mtx/metrics)}
{::wrk/executor (ig/ref ::wrk/executor)
::rds/redis (ig/ref ::rds/redis)}
:app.storage.tmp/cleaner
{::wrk/executor (ig/ref ::wrk/netty-executor)}
{::wrk/executor (ig/ref ::wrk/executor)}
::sto.gc-deleted/handler
{::db/pool (ig/ref ::db/pool)
@@ -229,10 +231,9 @@
::http/host (cf/get :http-server-host)
::http/router (ig/ref ::http/router)
::http/io-threads (cf/get :http-server-io-threads)
::http/max-worker-threads (cf/get :http-server-max-worker-threads)
::http/max-body-size (cf/get :http-server-max-body-size)
::http/max-multipart-body-size (cf/get :http-server-max-multipart-body-size)
::mtx/metrics (ig/ref ::mtx/metrics)}
::wrk/executor (ig/ref ::wrk/executor)}
::ldap/provider
{:host (cf/get :ldap-host)
@@ -272,10 +273,6 @@
::email/blacklist (ig/ref ::email/blacklist)
::email/whitelist (ig/ref ::email/whitelist)}
::mgmt/routes
{::db/pool (ig/ref ::db/pool)
::setup/props (ig/ref ::setup/props)}
:app.http/router
{::session/manager (ig/ref ::session/manager)
::db/pool (ig/ref ::db/pool)
@@ -284,7 +281,6 @@
::setup/props (ig/ref ::setup/props)
::mtx/routes (ig/ref ::mtx/routes)
::oidc/routes (ig/ref ::oidc/routes)
::mgmt/routes (ig/ref ::mgmt/routes)
::http.debug/routes (ig/ref ::http.debug/routes)
::http.assets/routes (ig/ref ::http.assets/routes)
::http.ws/routes (ig/ref ::http.ws/routes)
@@ -310,24 +306,23 @@
::rpc/climit
{::mtx/metrics (ig/ref ::mtx/metrics)
::wrk/executor (ig/ref ::wrk/netty-executor)
::wrk/executor (ig/ref ::wrk/executor)
::climit/config (cf/get :rpc-climit-config)
::climit/enabled (contains? cf/flags :rpc-climit)}
:app.rpc/rlimit
{::wrk/executor (ig/ref ::wrk/netty-executor)}
{::wrk/executor (ig/ref ::wrk/executor)}
:app.rpc/methods
{::http.client/client (ig/ref ::http.client/client)
::db/pool (ig/ref ::db/pool)
::rds/pool (ig/ref ::rds/pool)
::wrk/executor (ig/ref ::wrk/netty-executor)
::wrk/executor (ig/ref ::wrk/executor)
::session/manager (ig/ref ::session/manager)
::ldap/provider (ig/ref ::ldap/provider)
::sto/storage (ig/ref ::sto/storage)
::mtx/metrics (ig/ref ::mtx/metrics)
::mbus/msgbus (ig/ref ::mbus/msgbus)
::rds/client (ig/ref ::rds/client)
::rds/redis (ig/ref ::rds/redis)
::rpc/climit (ig/ref ::rpc/climit)
::rpc/rlimit (ig/ref ::rpc/rlimit)
@@ -340,7 +335,7 @@
:app.rpc.doc/routes
{:app.rpc/methods (ig/ref :app.rpc/methods)}
::rpc/routes
:app.rpc/routes
{::rpc/methods (ig/ref :app.rpc/methods)
::db/pool (ig/ref ::db/pool)
::session/manager (ig/ref ::session/manager)
@@ -432,9 +427,6 @@
;; module requires the migrations to run before initialize.
::migrations (ig/ref :app.migrations/migrations)}
::setup/clock
{}
:app.loggers.audit.archive-task/handler
{::setup/props (ig/ref ::setup/props)
::db/pool (ig/ref ::db/pool)
@@ -478,14 +470,13 @@
(cf/get :objects-storage-s3-bucket))
::sto.s3/io-threads (or (cf/get :storage-assets-s3-io-threads)
(cf/get :objects-storage-s3-io-threads))
::wrk/netty-io-executor
(ig/ref ::wrk/netty-io-executor)}
::wrk/executor (ig/ref ::wrk/executor)}
:app.storage.fs/backend
{::sto.fs/directory (or (cf/get :storage-assets-fs-directory)
(cf/get :objects-storage-fs-directory))}})
(def worker-config
{::wrk/cron
{::wrk/registry (ig/ref ::wrk/registry)
@@ -521,7 +512,7 @@
:task :audit-log-gc})]}
::wrk/dispatcher
{::rds/client (ig/ref ::rds/client)
{::rds/redis (ig/ref ::rds/redis)
::mtx/metrics (ig/ref ::mtx/metrics)
::db/pool (ig/ref ::db/pool)
::wrk/tenant (cf/get :tenant)}
@@ -530,7 +521,7 @@
{::wrk/parallelism (cf/get ::worker-default-parallelism 1)
::wrk/queue :default
::wrk/tenant (cf/get :tenant)
::rds/client (ig/ref ::rds/client)
::rds/redis (ig/ref ::rds/redis)
::wrk/registry (ig/ref ::wrk/registry)
::mtx/metrics (ig/ref ::mtx/metrics)
::db/pool (ig/ref ::db/pool)}
@@ -539,7 +530,7 @@
{::wrk/parallelism (cf/get ::worker-webhook-parallelism 1)
::wrk/queue :webhooks
::wrk/tenant (cf/get :tenant)
::rds/client (ig/ref ::rds/client)
::rds/redis (ig/ref ::rds/redis)
::wrk/registry (ig/ref ::wrk/registry)
::mtx/metrics (ig/ref ::mtx/metrics)
::db/pool (ig/ref ::db/pool)}})

View File

@@ -38,13 +38,15 @@
org.im4java.core.Info))
(def schema:upload
[:map {:title "Upload"}
[:filename :string]
[:size ::sm/int]
[:path ::fs/path]
[:mtype {:optional true} :string]
[:headers {:optional true}
[:map-of :string :string]]])
(sm/register!
^{::sm/type ::upload}
[:map {:title "Upload"}
[:filename :string]
[:size ::sm/int]
[:path ::fs/path]
[:mtype {:optional true} :string]
[:headers {:optional true}
[:map-of :string :string]]]))
(def ^:private schema:input
[:map {:title "Input"}
@@ -116,7 +118,7 @@
(defn- parse-svg
[text]
(let [text (strip-doctype text)]
(dm/with-open [istream (IOUtils/toInputStream ^String text "UTF-8")]
(dm/with-open [istream (IOUtils/toInputStream text "UTF-8")]
(xml/parse istream secure-parser-factory))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -446,9 +446,6 @@
{:name "0140-add-locked-by-column-to-file-change-table"
:fn (mg/resource "app/migrations/sql/0140-add-locked-by-column-to-file-change-table.sql")}
{:name "0141-add-idx-to-file-library-rel"
:fn (mg/resource "app/migrations/sql/0141-add-idx-to-file-library-rel.sql")}
{:name "0141-add-file-data-table.sql"
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}])

View File

@@ -10,8 +10,8 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.pprint]
[app.srepl.fixes.media-refs :refer [process-file]]
[app.srepl.main :as srepl]
[app.srepl.procs.media-refs]
[clojure.edn :as edn]))
(def ^:private required-services
@@ -20,10 +20,7 @@
:app.storage/storage
:app.metrics/metrics
:app.db/pool
:app.worker/netty-io-executor])
(def default-options
{:rollback? false})
:app.worker/executor])
(defn -main
[& [options]]
@@ -31,20 +28,22 @@
(let [config-var (requiring-resolve 'app.main/system-config)
start-var (requiring-resolve 'app.main/start-custom)
stop-var (requiring-resolve 'app.main/stop)
config (select-keys @config-var required-services)
options (if (string? options)
(ex/ignoring (edn/read-string options))
{})
options (-> (merge default-options options)
(assoc :proc-fn #'app.srepl.procs.media-refs/fix-media-refs))]
config (select-keys @config-var required-services)]
(start-var config)
(l/inf :hint "executing media-refs migration" :options options)
(srepl/process! options)
(let [options (if (string? options)
(ex/ignoring (edn/read-string options))
{})]
(l/inf :hint "executing media-refs migration" :options options)
(srepl/process-files! process-file options))
(stop-var)
(System/exit 0))
(catch Throwable cause
(ex/print-throwable cause)
(flush)
(System/exit -1))))

View File

@@ -4,9 +4,8 @@ CREATE TABLE file_data (
created_at timestamptz NOT NULL DEFAULT now(),
modified_at timestamptz NOT NULL DEFAULT now(),
deleted_at timestamptz NULL,
type text NOT NULL,
type text NULL,
backend text NULL,
metadata jsonb NULL,
@@ -14,7 +13,7 @@ CREATE TABLE file_data (
PRIMARY KEY (file_id, id)
) PARTITION BY HASH (file_id);
) PARTITION BY HASH (file_id, id);
CREATE TABLE file_data_00 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 0);
CREATE TABLE file_data_01 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 1);
@@ -32,7 +31,3 @@ CREATE TABLE file_data_12 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, RE
CREATE TABLE file_data_13 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 13);
CREATE TABLE file_data_14 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 14);
CREATE TABLE file_data_15 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 15);
CREATE INDEX file_data__deleted_at__idx
ON file_data (deleted_at, file_id, id)
WHERE deleted_at IS NOT NULL;

View File

@@ -1,2 +0,0 @@
CREATE INDEX IF NOT EXISTS file_library_rel__library_file_id__idx
ON file_library_rel (library_file_id);

View File

@@ -16,6 +16,7 @@
[app.redis :as rds]
[app.worker :as wrk]
[integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px]
[promesa.exec.csp :as sp]))
@@ -58,16 +59,14 @@
(assoc ::timeout (ct/duration {:seconds 30})))})
(def ^:private schema:params
[:map
::rds/client
::wrk/executor])
[:map ::rds/redis ::wrk/executor])
(defmethod ig/assert-key ::msgbus
[_ params]
(assert (sm/check schema:params params)))
(defmethod ig/init-key ::msgbus
[_ {:keys [::buffer-size ::wrk/executor ::timeout] :as cfg}]
[_ {:keys [::buffer-size ::wrk/executor ::timeout ::rds/redis] :as cfg}]
(l/info :hint "initialize msgbus" :buffer-size buffer-size)
(let [cmd-ch (sp/chan :buf buffer-size)
rcv-ch (sp/chan :buf (sp/dropping-buffer buffer-size))
@@ -75,9 +74,8 @@
:xf xform-prefix-topic)
state (agent {})
;; Open persistent connections to redis
pconn (rds/connect cfg :timeout timeout)
sconn (rds/connect-pubsub cfg :timeout timeout)
pconn (rds/connect redis :type :default :timeout timeout)
sconn (rds/connect redis :type :pubsub :timeout timeout)
_ (set-error-handler! state #(l/error :cause % :hint "unexpected error on agent" ::l/sync? true))
_ (set-error-mode! state :continue)
@@ -191,13 +189,14 @@
(defn- create-listener
[rcv-ch]
{:on-message (fn [_ topic message]
(rds/pubsub-listener
:on-message (fn [_ topic message]
;; There are no back pressure, so we use a slidding
;; buffer for cases when the pubsub broker sends
;; more messages that we can process.
(let [val {:topic topic :message (t/decode-str message)}]
(let [val {:topic topic :message (t/decode message)}]
(when-not (sp/offer! rcv-ch val)
(l/warn :msg "dropping message on subscription loop"))))})
(l/warn :msg "dropping message on subscription loop"))))))
(defn- process-input
[{:keys [::state ::wrk/executor] :as cfg} topic message]
@@ -217,7 +216,8 @@
(rds/add-listener sconn (create-listener rcv-ch))
(px/thread
{:name "penpot/msgbus"}
{:name "penpot/msgbus/io-loop"
:virtual true}
(try
(loop []
(let [timeout-ch (sp/timeout-chan 1000)
@@ -263,7 +263,7 @@
intended to be used in core.async go blocks."
[{:keys [::pconn] :as cfg} {:keys [topic message]}]
(try
(rds/publish pconn topic (t/encode-str message))
(p/await! (rds/publish pconn topic (t/encode message)))
(catch InterruptedException cause
(throw cause))
(catch Throwable cause

View File

@@ -6,22 +6,23 @@
(ns app.redis
"The msgbus abstraction implemented using redis as underlying backend."
(:refer-clojure :exclude [eval get set run!])
(:refer-clojure :exclude [eval])
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.generic-pool :as gpool]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.metrics :as mtx]
[app.redis.script :as-alias rscript]
[app.worker :as wrk]
[app.worker.executor]
[app.util.cache :as cache]
[app.worker :as-alias wrk]
[clojure.core :as c]
[clojure.java.io :as io]
[cuerdas.core :as str]
[integrant.core :as ig])
[integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px])
(:import
clojure.lang.MapEntry
io.lettuce.core.KeyValue
@@ -31,10 +32,12 @@
io.lettuce.core.RedisException
io.lettuce.core.RedisURI
io.lettuce.core.ScriptOutputType
io.lettuce.core.SetArgs
io.lettuce.core.api.StatefulConnection
io.lettuce.core.api.StatefulRedisConnection
io.lettuce.core.api.async.RedisAsyncCommands
io.lettuce.core.api.async.RedisScriptingAsyncCommands
io.lettuce.core.api.sync.RedisCommands
io.lettuce.core.api.sync.RedisScriptingCommands
io.lettuce.core.codec.ByteArrayCodec
io.lettuce.core.codec.RedisCodec
io.lettuce.core.codec.StringCodec
io.lettuce.core.pubsub.RedisPubSubListener
@@ -42,238 +45,244 @@
io.lettuce.core.pubsub.api.sync.RedisPubSubCommands
io.lettuce.core.resource.ClientResources
io.lettuce.core.resource.DefaultClientResources
io.netty.channel.nio.NioEventLoopGroup
io.netty.util.HashedWheelTimer
io.netty.util.Timer
io.netty.util.concurrent.EventExecutorGroup
java.lang.AutoCloseable
java.time.Duration))
(set! *warn-on-reflection* true)
(def ^:const MAX-EVAL-RETRIES 18)
(declare ^:private initialize-resources)
(declare ^:private shutdown-resources)
(declare ^:private impl-eval)
(def default-timeout
(ct/duration "10s"))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; IMPL & PRIVATE API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defprotocol IRedis
(-connect [_ options])
(-get-or-connect [_ key options]))
(defprotocol IConnection
(-set-timeout [_ timeout] "set connection timeout")
(-get-timeout [_] "get current timeout")
(-reset-timeout [_] "reset to default timeout"))
(defprotocol IDefaultConnection
"Public API of default redis connection"
(-publish [_ topic message])
(-rpush [_ key payload])
(-blpop [_ timeout keys])
(-eval [_ script])
(-get [_ key])
(-set [_ key val args])
(-del [_ key-or-keys])
(-ping [_]))
(publish [_ topic message])
(rpush [_ key payload])
(blpop [_ timeout keys])
(eval [_ script]))
(defprotocol IPubSubConnection
(-add-listener [_ listener])
(-subscribe [_ topics])
(-unsubscribe [_ topics]))
(add-listener [_ listener])
(subscribe [_ topics])
(unsubscribe [_ topics]))
(def ^:private default-codec
(def default-codec
(RedisCodec/of StringCodec/UTF8 ByteArrayCodec/INSTANCE))
(def string-codec
(RedisCodec/of StringCodec/UTF8 StringCodec/UTF8))
(defn- impl-eval
[cmd cache metrics script]
(let [keys (into-array String (map str (::rscript/keys script)))
vals (into-array String (map str (::rscript/vals script)))
sname (::rscript/name script)
(sm/register!
{:type ::connection
:pred #(satisfies? IConnection %)
:type-properties
{:title "connection"
:description "redis connection instance"}})
read-script
(fn []
(-> script ::rscript/path io/resource slurp))
(sm/register!
{:type ::pubsub-connection
:pred #(satisfies? IPubSubConnection %)
:type-properties
{:title "connection"
:description "redis connection instance"}})
load-script
(fn []
(let [id (.scriptLoad ^RedisScriptingCommands cmd
^String (read-script))]
(swap! cache assoc sname id)
(l/trc :hint "load script" :name sname :id id)
(defn redis?
[o]
(satisfies? IRedis o))
id))
(sm/register!
{:type ::redis
:pred redis?})
eval-script
(fn [id]
(try
(let [tpoint (ct/tpoint)
result (.evalsha ^RedisScriptingCommands cmd
^String id
^ScriptOutputType ScriptOutputType/MULTI
^"[Ljava.lang.String;" keys
^"[Ljava.lang.String;" vals)
elapsed (tpoint)]
(def ^:private schema:script
[:map {:title "script"}
[::rscript/name qualified-keyword?]
[::rscript/path ::sm/text]
[::rscript/keys {:optional true} [:vector :any]]
[::rscript/vals {:optional true} [:vector :any]]])
(mtx/run! metrics {:id :redis-eval-timing
:labels [(name sname)]
:val (inst-ms elapsed)})
(def valid-script?
(sm/lazy-validator schema:script))
(l/trc :hint "eval script"
:name (name sname)
:id id
:params (str/join "," (::rscript/vals script))
:elapsed (ct/format-duration elapsed))
(defmethod ig/expand-key ::redis
[k v]
(let [cpus (px/get-available-processors)
threads (max 1 (int (* cpus 0.2)))]
{k (-> (d/without-nils v)
(assoc ::timeout (ct/duration "10s"))
(assoc ::io-threads (max 3 threads))
(assoc ::worker-threads (max 3 threads)))}))
result)
(def ^:private schema:redis-params
[:map {:title "redis-params"}
::wrk/executor
::mtx/metrics
[::uri ::sm/uri]
[::worker-threads ::sm/int]
[::io-threads ::sm/int]
[::timeout ::ct/duration]])
(catch io.lettuce.core.RedisNoScriptException _cause
::load)
(defmethod ig/assert-key ::redis
[_ params]
(assert (sm/check schema:redis-params params)))
(catch Throwable cause
(when-let [on-error (::rscript/on-error script)]
(on-error cause))
(throw cause))))
(defmethod ig/init-key ::redis
[_ params]
(initialize-resources params))
eval-script'
(fn [id]
(loop [id id
retries 0]
(if (> retries MAX-EVAL-RETRIES)
(ex/raise :type :internal
:code ::max-eval-retries-reached
:hint (str "unable to eval redis script " sname))
(let [result (eval-script id)]
(if (= result ::load)
(recur (load-script)
(inc retries))
result)))))]
(defmethod ig/halt-key! ::redis
[_ instance]
(d/close! instance))
(if-let [id (c/get @cache sname)]
(eval-script' id)
(-> (load-script)
(eval-script')))))
(defn- initialize-resources
"Initialize redis connection resources"
[{:keys [::uri ::io-threads ::worker-threads ::wrk/executor ::mtx/metrics] :as params}]
(deftype Connection [^StatefulRedisConnection conn
^RedisCommands cmd
^Duration timeout
cache metrics]
AutoCloseable
(close [_]
(ex/ignoring (.close conn)))
(l/inf :hint "initialize redis resources"
:uri (str uri)
:io-threads io-threads
:worker-threads worker-threads)
IConnection
(-set-timeout [_ timeout]
(.setTimeout conn ^Duration timeout))
(let [timer (HashedWheelTimer.)
resources (.. (DefaultClientResources/builder)
(ioThreadPoolSize ^long io-threads)
(computationThreadPoolSize ^long worker-threads)
(timer ^Timer timer)
(build))
(-reset-timeout [_]
(.setTimeout conn timeout))
redis-uri (RedisURI/create ^String (str uri))
(-get-timeout [_]
(.getTimeout conn))
shutdown (fn [client conn]
(ex/ignoring (.close ^StatefulConnection conn))
(ex/ignoring (.close ^RedisClient client))
(l/trc :hint "disconnect" :hid (hash client)))
IDefaultConnection
(-publish [_ topic message]
(.publish cmd ^String topic ^String message))
on-remove (fn [key val cause]
(l/trace :hint "evict connection (cache)" :key key :reason cause)
(some-> val d/close!))
(-rpush [_ key elements]
(try
(let [vals (make-array String (count elements))]
(loop [i 0 xs (seq elements)]
(when xs
(aset ^"[[Ljava.lang.String;" vals i ^String (first xs))
(recur (inc i) (next xs))))
cache (cache/create :executor executor
:on-remove on-remove
:keepalive "5m")]
(reify
java.lang.AutoCloseable
(close [_]
(ex/ignoring (cache/invalidate! cache))
(ex/ignoring (.shutdown ^ClientResources resources))
(ex/ignoring (.stop ^Timer timer)))
(.rpush cmd
^String key
^"[[Ljava.lang.String;" vals))
IRedis
(-get-or-connect [this key options]
(let [create (fn [_] (-connect this options))]
(cache/get cache key create)))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(-connect [_ options]
(let [timeout (or (:timeout options) (::timeout params))
codec (get options :codec default-codec)
type (get options :type :default)
client (RedisClient/create ^ClientResources resources
^RedisURI redis-uri)]
(-blpop [_ keys timeout]
(try
(let [keys (into-array String keys)]
(when-let [res (.blpop cmd
^double timeout
^"[Ljava.lang.String;" keys)]
(MapEntry/create
(.getKey ^KeyValue res)
(.getValue ^KeyValue res))))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(l/trc :hint "connect" :hid (hash client))
(if (= type :pubsub)
(let [conn (.connectPubSub ^RedisClient client
^RedisCodec codec)]
(.setTimeout ^StatefulConnection conn
^Duration timeout)
(reify
IPubSubConnection
(add-listener [_ listener]
(assert (instance? RedisPubSubListener listener) "expected listener instance")
(.addListener ^StatefulRedisPubSubConnection conn
^RedisPubSubListener listener))
(-get [_ key]
(assert (string? key) "key expected to be string")
(.get cmd ^String key))
(subscribe [_ topics]
(try
(let [topics (into-array String (map str topics))
cmd (.sync ^StatefulRedisPubSubConnection conn)]
(.subscribe ^RedisPubSubCommands cmd topics))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(-set [_ key val args]
(.set cmd
^String key
^bytes val
^SetArgs args))
(-del [_ keys]
(let [keys (into-array String keys)]
(.del cmd ^String/1 keys)))
(-ping [_]
(.ping cmd))
(-eval [_ script]
(impl-eval cmd cache metrics script)))
(unsubscribe [_ topics]
(try
(let [topics (into-array String (map str topics))
cmd (.sync ^StatefulRedisPubSubConnection conn)]
(.unsubscribe ^RedisPubSubCommands cmd topics))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(deftype SubscriptionConnection [^StatefulRedisPubSubConnection conn
^RedisPubSubCommands cmd
^Duration timeout]
AutoCloseable
(close [_]
(ex/ignoring (.close conn)))
AutoCloseable
(close [_] (shutdown client conn))))
IConnection
(-set-timeout [_ timeout]
(.setTimeout conn ^Duration timeout))
(let [conn (.connect ^RedisClient client ^RedisCodec codec)]
(.setTimeout ^StatefulConnection conn ^Duration timeout)
(reify
IConnection
(publish [_ topic message]
(assert (string? topic) "expected topic to be string")
(assert (bytes? message) "expected message to be a byte array")
(-reset-timeout [_]
(.setTimeout conn timeout))
(let [pcomm (.async ^StatefulRedisConnection conn)]
(.publish ^RedisAsyncCommands pcomm ^String topic ^bytes message)))
(-get-timeout [_]
(.getTimeout conn))
(rpush [_ key payload]
(assert (or (and (vector? payload)
(every? bytes? payload))
(bytes? payload)))
(try
(let [cmd (.sync ^StatefulRedisConnection conn)
data (if (vector? payload) payload [payload])
vals (make-array (. Class (forName "[B")) (count data))]
IPubSubConnection
(-add-listener [_ listener]
(.addListener conn ^RedisPubSubListener listener))
(loop [i 0 xs (seq data)]
(when xs
(aset ^"[[B" vals i ^bytes (first xs))
(recur (inc i) (next xs))))
(-subscribe [_ topics]
(try
(let [topics (into-array String topics)]
(.subscribe cmd topics))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(.rpush ^RedisCommands cmd
^String key
^"[[B" vals))
(-unsubscribe [_ topics]
(try
(let [topics (into-array String topics)]
(.unsubscribe cmd topics))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause)))))))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PUBLIC API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(blpop [_ timeout keys]
(try
(let [keys (into-array Object (map str keys))
cmd (.sync ^StatefulRedisConnection conn)
timeout (/ (double (inst-ms timeout)) 1000.0)]
(when-let [res (.blpop ^RedisCommands cmd
^double timeout
^"[Ljava.lang.String;" keys)]
(MapEntry/create
(.getKey ^KeyValue res)
(.getValue ^KeyValue res))))
(catch RedisCommandInterruptedException cause
(throw (InterruptedException. (ex-message cause))))))
(defn build-set-args
[options]
(reduce-kv (fn [^SetArgs args k v]
(case k
:ex (if (instance? Duration v)
(.ex args ^Duration v)
(.ex args (long v)))
:px (.px args (long v))
:nx (if v (.nx args) args)
:keep-ttl (if v (.keepttl args) args)))
(SetArgs.)
options))
(eval [_ script]
(assert (valid-script? script) "expected valid script")
(impl-eval conn metrics script))
AutoCloseable
(close [_] (shutdown client conn))))))))))
(defn connect
[instance & {:as opts}]
(assert (satisfies? IRedis instance) "expected valid redis instance")
(-connect instance opts))
(defn get-or-connect
[instance key & {:as opts}]
(assert (satisfies? IRedis instance) "expected valid redis instance")
(-get-or-connect instance key opts))
(defn pubsub-listener
[& {:keys [on-message on-subscribe on-unsubscribe]}]
@@ -302,172 +311,61 @@
(when on-unsubscribe
(on-unsubscribe nil topic count)))))
(defn connect
[cfg & {:as options}]
(assert (contains? cfg ::mtx/metrics) "missing ::mtx/metrics on provided system")
(assert (contains? cfg ::client) "missing ::rds/client on provided system")
(def ^:private scripts-cache (atom {}))
(let [state (::client cfg)
(defn- impl-eval
[^StatefulRedisConnection connection metrics script]
(let [cmd (.async ^StatefulRedisConnection connection)
keys (into-array String (map str (::rscript/keys script)))
vals (into-array String (map str (::rscript/vals script)))
sname (::rscript/name script)]
cache (::cache state)
client (::client state)
timeout (or (some-> (:timeout options) ct/duration)
(::timeout state))
(letfn [(on-error [cause]
(if (instance? io.lettuce.core.RedisNoScriptException cause)
(do
(l/error :hint "no script found" :name sname :cause cause)
(->> (load-script)
(p/mcat eval-script)))
(if-let [on-error (::rscript/on-error script)]
(on-error cause)
(p/rejected cause))))
conn (.connect ^RedisClient client
^RedisCodec default-codec)
cmd (.sync ^StatefulRedisConnection conn)]
(eval-script [sha]
(let [tpoint (ct/tpoint)]
(->> (.evalsha ^RedisScriptingAsyncCommands cmd
^String sha
^ScriptOutputType ScriptOutputType/MULTI
^"[Ljava.lang.String;" keys
^"[Ljava.lang.String;" vals)
(p/fmap (fn [result]
(let [elapsed (tpoint)]
(mtx/run! metrics {:id :redis-eval-timing
:labels [(name sname)]
:val (inst-ms elapsed)})
(l/trace :hint "eval script"
:name (name sname)
:sha sha
:params (str/join "," (::rscript/vals script))
:elapsed (ct/format-duration elapsed))
result)))
(p/merr on-error))))
(.setTimeout ^StatefulRedisConnection conn ^Duration timeout)
(->Connection conn cmd timeout cache (::mtx/metrics cfg))))
(read-script []
(-> script ::rscript/path io/resource slurp))
(defn connect-pubsub
[cfg & {:as options}]
(let [state (::client cfg)
client (::client state)
(load-script []
(l/trace :hint "load script" :name sname)
(->> (.scriptLoad ^RedisScriptingAsyncCommands cmd
^String (read-script))
(p/fmap (fn [sha]
(swap! scripts-cache assoc sname sha)
sha))))]
timeout (or (some-> (:timeout options) ct/duration)
(::timeout state))
conn (.connectPubSub ^RedisClient client
^RedisCodec default-codec)
cmd (.sync ^StatefulRedisPubSubConnection conn)]
(.setTimeout ^StatefulRedisPubSubConnection conn
^Duration timeout)
(->SubscriptionConnection conn cmd timeout)))
(defn get
[conn key]
(assert (string? key) "key must be string instance")
(try
(-get conn key)
(catch RedisCommandTimeoutException cause
(l/err :hint "timeout on get redis key" :key key :cause cause)
nil)))
(defn set
([conn key val]
(set conn key val nil))
([conn key val args]
(assert (string? key) "key must be string instance")
(assert (string? val) "val must be string instance")
(let [args (cond
(or (instance? SetArgs args)
(nil? args))
args
(map? args)
(build-set-args args)
:else
(throw (IllegalArgumentException. "invalid args")))]
(try
(-set conn key val args)
(catch RedisCommandTimeoutException cause
(l/err :hint "timeout on set redis key" :key key :cause cause)
nil)))))
(defn del
[conn key-or-keys]
(let [keys (if (vector? key-or-keys) key-or-keys [key-or-keys])]
(assert (every? string? keys) "only string keys allowed")
(try
(-del conn keys)
(catch RedisCommandTimeoutException cause
(l/err :hint "timeout on del redis key" :key key :cause cause)
nil))))
(defn ping
[conn]
(-ping conn))
(defn blpop
[conn key-or-keys timeout]
(let [keys (if (vector? key-or-keys) key-or-keys [key-or-keys])
timeout (cond
(ct/duration? timeout)
(/ (double (inst-ms timeout)) 1000.0)
(double? timeout)
timeout
(int? timeout)
(/ (double timeout) 1000.0)
:else
0)]
(assert (every? string? keys) "only string keys allowed")
(-blpop conn keys timeout)))
(defn rpush
[conn key elements]
(assert (string? key) "key must be string instance")
(assert (every? string? elements) "elements should be all strings")
(let [elements (vec elements)]
(-rpush conn key elements)))
(defn publish
[conn topic payload]
(assert (string? topic) "expected topic to be string")
(assert (string? payload) "expected message to be a byte array")
(-publish conn topic payload))
(def ^:private schema:script
[:map {:title "script"}
[::rscript/name qualified-keyword?]
[::rscript/path ::sm/text]
[::rscript/keys {:optional true} [:vector :any]]
[::rscript/vals {:optional true} [:vector :any]]])
(def ^:private valid-script?
(sm/lazy-validator schema:script))
(defn eval
[conn script]
(assert (valid-script? script) "expected valid script")
(-eval conn script))
(defn add-listener
[conn listener]
(let [listener (cond
(map? listener)
(pubsub-listener listener)
(instance? RedisPubSubListener listener)
listener
:else
(throw (IllegalArgumentException. "invalid listener provided")))]
(-add-listener conn listener)))
(defn subscribe
[conn topic-or-topics]
(let [topics (if (vector? topic-or-topics) topic-or-topics [topic-or-topics])]
(assert (every? string? topics))
(-subscribe conn topics)))
(defn unsubscribe
[conn topic-or-topics]
(let [topics (if (vector? topic-or-topics) topic-or-topics [topic-or-topics])]
(assert (every? string? topics))
(-unsubscribe conn topics)))
(defn set-timeout
[conn timeout]
(let [timeout (ct/duration timeout)]
(-set-timeout conn timeout)))
(defn get-timeout
[conn]
(-get-timeout conn))
(defn reset-timeout
[conn]
(-reset-timeout conn))
(p/await!
(if-let [sha (get @scripts-cache sname)]
(eval-script sha)
(->> (load-script)
(p/mapcat eval-script)))))))
(defn timeout-exception?
[cause]
@@ -476,121 +374,3 @@
(defn exception?
[cause]
(instance? RedisException cause))
(defn get-pooled
[cfg]
(let [pool (::pool cfg)]
(gpool/get pool)))
(defn close
[o]
(.close ^AutoCloseable o))
(defn pool
[cfg & {:as options}]
(gpool/create :create-fn (partial connect cfg options)
:destroy-fn close
:dispose-fn -reset-timeout))
(defn run!
[cfg f & args]
(if (gpool/pool? cfg)
(apply f {::pool cfg} f args)
(let [pool (::pool cfg)]
(with-open [^AutoCloseable conn (gpool/get pool)]
(apply f (assoc cfg ::conn @conn) args)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; INITIALIZATION
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmethod ig/expand-key ::client
[k v]
{k (-> (d/without-nils v)
(assoc ::timeout (ct/duration "10s")))})
(def ^:private schema:client
[:map {:title "RedisClient"}
[::timer [:fn #(instance? HashedWheelTimer %)]]
[::cache ::sm/atom]
[::timeout ::ct/duration]
[::resources [:fn #(instance? DefaultClientResources %)]]])
(def check-client
(sm/check-fn schema:client))
(sm/register! ::client schema:client)
(sm/register!
{:type ::pool
:pred gpool/pool?})
(def ^:private schema:client-params
[:map {:title "redis-params"}
::wrk/netty-io-executor
::wrk/netty-executor
[::uri ::sm/uri]
[::timeout ::ct/duration]])
(def ^:private check-client-params
(sm/check-fn schema:client-params))
(defmethod ig/assert-key ::client
[_ params]
(check-client-params params))
(defmethod ig/init-key ::client
[_ {:keys [::uri ::wrk/netty-io-executor ::wrk/netty-executor] :as params}]
(l/inf :hint "initialize redis client" :uri (str uri))
(let [timer (HashedWheelTimer.)
cache (atom {})
resources (.. (DefaultClientResources/builder)
(eventExecutorGroup ^EventExecutorGroup netty-executor)
;; We provide lettuce with a shared event loop
;; group instance instead of letting lettuce to
;; create its own
(eventLoopGroupProvider
(reify io.lettuce.core.resource.EventLoopGroupProvider
(allocate [_ _] netty-io-executor)
(threadPoolSize [_]
(.executorCount ^NioEventLoopGroup netty-io-executor))
(release [_ _ _ _ _]
;; Do nothing
)
(shutdown [_ _ _ _]
;; Do nothing
)))
(timer ^Timer timer)
(build))
redis-uri (RedisURI/create ^String (str uri))
client (RedisClient/create ^ClientResources resources
^RedisURI redis-uri)]
{::client client
::cache cache
::timer timer
::timeout default-timeout
::resources resources}))
(defmethod ig/halt-key! ::client
[_ {:keys [::client ::timer ::resources]}]
(ex/ignoring (.shutdown ^RedisClient client))
(ex/ignoring (.shutdown ^ClientResources resources))
(ex/ignoring (.stop ^Timer timer)))
(defmethod ig/assert-key ::pool
[_ {:keys [::client]}]
(check-client client))
(defmethod ig/init-key ::pool
[_ cfg]
(pool cfg {:timeout (ct/duration 2000)}))
(defmethod ig/halt-key! ::pool
[_ instance]
(.close ^java.lang.AutoCloseable instance))

View File

@@ -23,7 +23,6 @@
[app.main :as-alias main]
[app.metrics :as mtx]
[app.msgbus :as-alias mbus]
[app.redis :as rds]
[app.rpc.climit :as climit]
[app.rpc.cond :as cond]
[app.rpc.helpers :as rph]
@@ -64,13 +63,9 @@
(let [mdata (meta result)
response (if (fn? result)
(result request)
(let [result (rph/unwrap result)
status (::http/status mdata 200)
headers (cond-> (::http/headers mdata {})
(yres/stream-body? result)
(assoc "content-type" "application/octet-stream"))]
{::yres/status status
::yres/headers headers
(let [result (rph/unwrap result)]
{::yres/status (::http/status mdata 200)
::yres/headers (::http/headers mdata {})
::yres/body result}))]
(-> response
(handle-response-transformation request mdata)
@@ -266,7 +261,6 @@
::session/manager
::http.client/client
::db/pool
::rds/pool
::mbus/msgbus
::sto/storage
::mtx/metrics

View File

@@ -21,6 +21,7 @@
[clojure.set :as set]
[datoteka.fs :as fs]
[integrant.core :as ig]
[promesa.exec :as px]
[promesa.exec.bulkhead :as pbh])
(:import
clojure.lang.ExceptionInfo
@@ -288,9 +289,13 @@
(get-limits cfg)))
(defn invoke!
"Run a function in context of climit."
[{:keys [::rpc/climit] :as cfg} f params]
"Run a function in context of climit.
Intended to be used in virtual threads."
[{:keys [::executor ::rpc/climit] :as cfg} f params]
(let [f (if climit
(build-exec-chain cfg f)
(let [f (if (some? executor)
(fn [cfg params] (px/await! (px/submit! executor (fn [] (f cfg params)))))
f)]
(build-exec-chain cfg f))
f)]
(f cfg params)))

View File

@@ -23,14 +23,14 @@
(dissoc row :perms))
(defn create-access-token
[{:keys [::db/conn] :as cfg} profile-id name expiration]
(let [token-id (uuid/next)
expires-at (some-> expiration (ct/in-future))
created-at (ct/now)
token (tokens/generate cfg {:iss "access-token"
:iat created-at
:tid token-id})
[{:keys [::db/conn ::setup/props]} profile-id name expiration]
(let [created-at (ct/now)
token-id (uuid/next)
token (tokens/generate props {:iss "access-token"
:tid token-id
:iat created-at})
expires-at (some-> expiration ct/in-future)
token (db/insert! conn :access-token
{:id token-id
:name name

View File

@@ -6,7 +6,6 @@
(ns app.rpc.commands.auth
(:require
[app.auth :as auth]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
@@ -63,7 +62,7 @@
(ex/raise :type :validation
:code :account-without-password
:hint "the current account does not have password")
(let [result (auth/verify-password password (:password profile))]
(let [result (profile/verify-password cfg password (:password profile))]
(when (:update result)
(l/trc :hint "updating profile password"
:id (str (:id profile))
@@ -99,7 +98,7 @@
(profile/strip-private-attrs))
invitation (when-let [token (:invitation-token params)]
(tokens/verify cfg {:token token :iss :team-invitation}))
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
;; invitation because invitations matches exactly; and user can't login with other email and
@@ -153,11 +152,11 @@
(defn recover-profile
[{:keys [::db/conn] :as cfg} {:keys [token password]}]
(letfn [(validate-token [token]
(let [tdata (tokens/verify cfg {:token token :iss :password-recovery})]
(let [tdata (tokens/verify (::setup/props cfg) {:token token :iss :password-recovery})]
(:profile-id tdata)))
(update-password [conn profile-id]
(let [pwd (auth/derive-password password)]
(let [pwd (profile/derive-password cfg password)]
(db/update! conn :profile {:password pwd :is-active true} {:id profile-id})
nil))]
@@ -192,7 +191,7 @@
:hint "registration disabled"))
(when (contains? params :invitation-token)
(let [invitation (tokens/verify cfg
(let [invitation (tokens/verify (::setup/props cfg)
{:token (:invitation-token params)
:iss :team-invitation})]
(when-not (= (:email params) (:member-email invitation))
@@ -249,7 +248,7 @@
:props {:newsletter-updates (or accept-newsletter-updates false)}}
params (d/without-nils params)
token (tokens/generate cfg params)]
token (tokens/generate (::setup/props cfg) params)]
(with-meta {:token token}
{::audit/profile-id uuid/zero})))
@@ -315,13 +314,16 @@
(-> (db/insert! conn :profile params)
(profile/decode-row))
(catch org.postgresql.util.PSQLException cause
(if (db/duplicate-key-error? cause)
(ex/raise :type :validation
:code :email-already-exists
:hint "email already exists"
:cause cause)
(throw cause))))))
(let [state (.getSQLState cause)]
(if (not= state "23505")
(throw cause)
(do
(l/error :hint "not an error" :cause cause)
(ex/raise :type :validation
:code :email-already-exists
:hint "email already exists"
:cause cause))))))))
(defn create-profile-rels!
[conn {:keys [id] :as profile}]
@@ -340,14 +342,14 @@
(defn send-email-verification!
[{:keys [::db/conn] :as cfg} profile]
(let [vtoken (tokens/generate cfg
(let [vtoken (tokens/generate (::setup/props cfg)
{:iss :verify-email
:exp (ct/in-future "72h")
:profile-id (:id profile)
:email (:email profile)})
;; NOTE: this token is mainly used for possible complains
;; identification on the sns webhook
ptoken (tokens/generate cfg
ptoken (tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (ct/in-future {:days 30})})]
@@ -361,7 +363,7 @@
(defn register-profile
[{:keys [::db/conn ::wrk/executor] :as cfg} {:keys [token] :as params}]
(let [claims (tokens/verify cfg {:token token :iss :prepared-register})
(let [claims (tokens/verify (::setup/props cfg) {:token token :iss :prepared-register})
params (into claims params)
profile (if-let [profile-id (:profile-id claims)]
@@ -376,7 +378,7 @@
(not (contains? cf/flags :email-verification)))
params (-> params
(assoc :is-active is-active)
(update :password auth/derive-password))
(update :password #(profile/derive-password cfg %)))
profile (->> (create-profile! conn params)
(create-profile-rels! conn))]
(vary-meta profile assoc :created true))))
@@ -384,7 +386,7 @@
created? (-> profile meta :created true?)
invitation (when-let [token (:invitation-token params)]
(tokens/verify cfg {:token token :iss :team-invitation}))
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))
props (-> (audit/profile->props profile)
(assoc :from-invitation (some? invitation)))
@@ -417,7 +419,7 @@
(= (:email profile)
(:member-email invitation)))
(let [claims (assoc invitation :member-id (:id profile))
token (tokens/generate cfg claims)]
token (tokens/generate (::setup/props cfg) claims)]
(-> {:invitation-token token}
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/replace-props props
@@ -491,14 +493,14 @@
(defn- request-profile-recovery
[{:keys [::db/conn] :as cfg} {:keys [email] :as params}]
(letfn [(create-recovery-token [{:keys [id] :as profile}]
(let [token (tokens/generate cfg
(let [token (tokens/generate (::setup/props cfg)
{:iss :password-recovery
:exp (ct/in-future "15m")
:profile-id id})]
(assoc profile :token token)))
(send-email-notification [conn profile]
(let [ptoken (tokens/generate cfg
(let [ptoken (tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (ct/in-future {:days 30})})]

View File

@@ -25,10 +25,11 @@
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.tasks.file-gc]
[app.util.services :as sv]
[app.worker :as-alias wrk]))
[app.worker :as-alias wrk]
[promesa.exec :as px]
[yetti.response :as yres]))
(set! *warn-on-reflection* true)
@@ -44,7 +45,7 @@
(defn stream-export-v1
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(rph/stream
(yres/stream-body
(fn [_ output-stream]
(try
(-> cfg
@@ -59,7 +60,7 @@
(defn stream-export-v3
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(rph/stream
(yres/stream-body
(fn [_ output-stream]
(try
(-> cfg
@@ -79,16 +80,21 @@
::sm/params schema:export-binfile}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
(files/check-read-permissions! pool profile-id file-id)
(let [version (or version 1)]
(case (int version)
1 (stream-export-v1 cfg params)
2 (throw (ex-info "not-implemented" {}))
3 (stream-export-v3 cfg params))))
(fn [_]
(let [version (or version 1)
body (case (int version)
1 (stream-export-v1 cfg params)
2 (throw (ex-info "not-implemented" {}))
3 (stream-export-v3 cfg params))]
{::yres/status 200
::yres/headers {"content-type" "application/octet-stream"}
::yres/body body})))
;; --- Command: import-binfile
(defn- import-binfile
[{:keys [::db/pool] :as cfg} {:keys [profile-id project-id version name file]}]
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [profile-id project-id version name file]}]
(let [team (teams/get-team pool
:profile-id profile-id
:project-id project-id)
@@ -99,9 +105,13 @@
(assoc ::bfc/name name)
(assoc ::bfc/input (:path file)))
;; NOTE: the importation process performs some operations that are
;; not very friendly with virtual threads, and for avoid
;; unexpected blocking of other concurrent operations we dispatch
;; that operation to a dedicated executor.
result (case (int version)
1 (bf.v1/import-files! cfg)
3 (bf.v3/import-files! cfg))]
1 (px/invoke! executor (partial bf.v1/import-files! cfg))
3 (px/invoke! executor (partial bf.v3/import-files! cfg)))]
(db/update! pool :project
{:modified-at (ct/now)}
@@ -117,7 +127,7 @@
[:project-id ::sm/uuid]
[:file-id {:optional true} ::sm/uuid]
[:version {:optional true} ::sm/int]
[:file media/schema:upload]])
[:file ::media/upload]])
(sv/defmethod ::import-binfile
"Import a penpot file in a binary format. If `file-id` is provided,

View File

@@ -6,7 +6,6 @@
(ns app.rpc.commands.comments
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
@@ -164,16 +163,34 @@
(def xf-decode-row
(map decode-row))
(def ^:private
sql:get-file
"SELECT f.id, f.modified_at, f.revn, f.features, f.name,
f.project_id, p.team_id, f.data,
f.data_ref_id, f.data_backend
FROM file as f
INNER JOIN project as p on (p.id = f.project_id)
WHERE f.id = ?
AND (f.deleted_at IS NULL OR f.deleted_at > now())")
(defn- get-file
"A specialized version of get-file for comments module."
[cfg file-id page-id]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(let [file (bfc/get-file cfg file-id)
data (get file :data)]
(-> file
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
(assoc :page-id page-id)
(dissoc :data)))))
(let [file (db/exec-one! cfg [sql:get-file file-id])]
(when-not file
(ex/raise :type :not-found
:code :object-not-found
:hint "file not found"))
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(let [file (->> file
(feat.fdata/resolve-file-data cfg)
(feat.fdata/decode-file-data cfg))
data (get file :data)]
(-> file
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
(assoc :page-id page-id)
(dissoc :data))))))
;; FIXME: rename
(defn- get-comment-thread
@@ -234,39 +251,34 @@
(files/check-comment-permissions! conn profile-id file-id share-id)
(get-comment-threads conn profile-id file-id))))
(defn- get-comment-threads-sql
[where]
(str/ffmt
"SELECT DISTINCT ON (ct.id)
ct.*,
pf.fullname AS owner_fullname,
pf.email AS owner_email,
pf.photo_id AS owner_photo_id,
p.team_id AS team_id,
f.name AS file_name,
f.project_id AS project_id,
first_value(c.content) OVER w AS content,
(SELECT count(1)
FROM comment AS c
WHERE c.thread_id = ct.id) AS count_comments,
(SELECT count(1)
FROM comment AS c
WHERE c.thread_id = ct.id
AND c.created_at >= coalesce(cts.modified_at, ct.created_at)) AS count_unread_comments
FROM comment_thread AS ct
INNER JOIN comment AS c ON (c.thread_id = ct.id)
INNER JOIN file AS f ON (f.id = ct.file_id)
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN comment_thread_status AS cts ON (cts.thread_id = ct.id AND cts.profile_id = ?)
LEFT JOIN profile AS pf ON (ct.owner_id = pf.id)
WHERE f.deleted_at IS NULL
AND p.deleted_at IS NULL
%1
WINDOW w AS (PARTITION BY c.thread_id ORDER BY c.created_at ASC)"
where))
(def ^:private sql:comment-threads
"SELECT DISTINCT ON (ct.id)
ct.*,
pf.fullname AS owner_fullname,
pf.email AS owner_email,
pf.photo_id AS owner_photo_id,
p.team_id AS team_id,
f.name AS file_name,
f.project_id AS project_id,
first_value(c.content) OVER w AS content,
(SELECT count(1)
FROM comment AS c
WHERE c.thread_id = ct.id) AS count_comments,
(SELECT count(1)
FROM comment AS c
WHERE c.thread_id = ct.id
AND c.created_at >= coalesce(cts.modified_at, ct.created_at)) AS count_unread_comments
FROM comment_thread AS ct
INNER JOIN comment AS c ON (c.thread_id = ct.id)
INNER JOIN file AS f ON (f.id = ct.file_id)
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN comment_thread_status AS cts ON (cts.thread_id = ct.id AND cts.profile_id = ?)
LEFT JOIN profile AS pf ON (ct.owner_id = pf.id)
WINDOW w AS (PARTITION BY c.thread_id ORDER BY c.created_at ASC)")
(def ^:private sql:comment-threads-by-file-id
(get-comment-threads-sql "AND ct.file_id = ?"))
(str "WITH threads AS (" sql:comment-threads ")"
"SELECT * FROM threads WHERE file_id = ?"))
(defn- get-comment-threads
[conn profile-id file-id]
@@ -275,30 +287,7 @@
;; --- COMMAND: Get Unread Comment Threads
(def ^:private sql:unread-all-comment-threads-by-team
(str "WITH threads AS ("
(get-comment-threads-sql "AND p.team_id = ?")
")"
"SELECT t.* FROM threads AS t
WHERE t.count_unread_comments > 0"))
(def ^:private sql:unread-partial-comment-threads-by-team
(str "WITH threads AS ("
(get-comment-threads-sql "AND p.team_id = ? AND (ct.owner_id = ? OR ? = ANY(ct.mentions))")
")"
"SELECT t.* FROM threads AS t
WHERE t.count_unread_comments > 0"))
(defn- get-unread-comment-threads
[cfg profile-id team-id]
(let [profile (-> (db/get cfg :profile {:id profile-id} ::db/remove-deleted false)
(profile/decode-row))
notify (or (-> profile :props :notifications :dashboard-comments) :all)
result (case notify
:all (db/exec! cfg [sql:unread-all-comment-threads-by-team profile-id team-id])
:partial (db/exec! cfg [sql:unread-partial-comment-threads-by-team profile-id team-id profile-id profile-id])
[])]
(into [] xf-decode-row result)))
(declare ^:private get-unread-comment-threads)
(def ^:private
schema:get-unread-comment-threads
@@ -309,8 +298,41 @@
{::doc/added "1.15"
::sm/params schema:get-unread-comment-threads}
[cfg {:keys [::rpc/profile-id team-id] :as params}]
(teams/check-read-permissions! cfg profile-id team-id)
(get-unread-comment-threads cfg profile-id team-id))
(db/run!
cfg
(fn [{:keys [::db/conn]}]
(teams/check-read-permissions! conn profile-id team-id)
(get-unread-comment-threads conn profile-id team-id))))
(def sql:unread-all-comment-threads-by-team
(str "WITH threads AS (" sql:comment-threads ")"
"SELECT * FROM threads WHERE count_unread_comments > 0 AND team_id = ?"))
;; The partial configuration will retrieve only comments created by the user and
;; threads that have a mention to the user.
(def sql:unread-partial-comment-threads-by-team
(str "WITH threads AS (" sql:comment-threads ")"
"SELECT * FROM threads
WHERE count_unread_comments > 0
AND team_id = ?
AND (owner_id = ? OR ? = ANY(mentions))"))
(defn- get-unread-comment-threads
[conn profile-id team-id]
(let [profile (-> (db/get conn :profile {:id profile-id})
(profile/decode-row))
notify (or (-> profile :props :notifications :dashboard-comments) :all)]
(case notify
:all
(->> (db/exec! conn [sql:unread-all-comment-threads-by-team profile-id team-id])
(into [] xf-decode-row))
:partial
(->> (db/exec! conn [sql:unread-partial-comment-threads-by-team profile-id team-id profile-id profile-id])
(into [] xf-decode-row))
[])))
;; --- COMMAND: Get Single Comment Thread
@@ -321,17 +343,16 @@
[:id ::sm/uuid]
[:share-id {:optional true} [:maybe ::sm/uuid]]])
(def ^:private sql:get-comment-thread
(get-comment-threads-sql "AND ct.file_id = ? AND ct.id = ?"))
(sv/defmethod ::get-comment-thread
{::doc/added "1.15"
::sm/params schema:get-comment-thread}
[cfg {:keys [::rpc/profile-id file-id id share-id] :as params}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(files/check-comment-permissions! conn profile-id file-id share-id)
(some-> (db/exec-one! conn [sql:get-comment-thread profile-id file-id id])
(decode-row)))))
(let [sql (str "WITH threads AS (" sql:comment-threads ")"
"SELECT * FROM threads WHERE id = ? AND file_id = ?")]
(-> (db/exec-one! conn [sql profile-id id file-id])
(decode-row))))))
;; --- COMMAND: Retrieve Comments

View File

@@ -7,7 +7,6 @@
(ns app.rpc.commands.demo
"A demo specific mutations."
(:require
[app.auth :refer [derive-password]]
[app.common.exceptions :as ex]
[app.common.time :as ct]
[app.config :as cf]
@@ -15,6 +14,7 @@
[app.loggers.audit :as audit]
[app.rpc :as-alias rpc]
[app.rpc.commands.auth :as auth]
[app.rpc.commands.profile :as profile]
[app.rpc.doc :as-alias doc]
[app.util.services :as sv]
[buddy.core.codecs :as bc]
@@ -45,9 +45,8 @@
params {:email email
:fullname fullname
:is-active true
:is-demo true
:deleted-at (ct/in-future (cf/get-deletion-delay))
:password (derive-password password)
:password (profile/derive-password cfg password)
:props {}}
profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
(->> (auth/create-profile! conn params)

View File

@@ -7,7 +7,6 @@
(ns app.rpc.commands.feedback
"A general purpose feedback module."
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.config :as cf]
@@ -22,11 +21,8 @@
(def ^:private schema:send-user-feedback
[:map {:title "send-user-feedback"}
[:subject [:string {:max 500}]]
[:content [:string {:max 2500}]]
[:type {:optional true} :string]
[:error-href {:optional true} [:string {:max 2500}]]
[:error-report {:optional true} :string]])
[:subject [:string {:max 400}]]
[:content [:string {:max 2500}]]])
(sv/defmethod ::send-user-feedback
{::doc/added "1.18"
@@ -43,26 +39,16 @@
(defn- send-user-feedback!
[pool profile params]
(let [destination
(or (cf/get :user-feedback-destination)
;; LEGACY
(cf/get :feedback-destination))
attachments
(d/without-nils
{"error-report.txt" (:error-report params)})]
(let [dest (or (cf/get :user-feedback-destination)
;; LEGACY
(cf/get :feedback-destination))]
(eml/send! {::eml/conn pool
::eml/factory eml/user-feedback
:from (cf/get :smtp-default-from)
:to destination
:from dest
:to dest
:profile profile
:reply-to (:email profile)
:email (:email profile)
:attachments attachments
:feedback-subject (:subject params)
:feedback-type (:type params "not-specified")
:feedback-content (:content params)
:feedback-error-href (:error-href params)
:profile profile})
:subject (:subject params)
:content (:content params)})
nil))

View File

@@ -17,7 +17,6 @@
[app.common.schema :as sm]
[app.common.schema.desc-js-like :as-alias smdj]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.types.components-list :as ctkl]
[app.common.types.file :as ctf]
[app.common.uri :as uri]
@@ -26,11 +25,8 @@
[app.db.sql :as-alias sql]
[app.features.fdata :as feat.fdata]
[app.features.logical-deletion :as ldel]
[app.http.sse :as sse]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.msgbus :as mbus]
[app.redis :as rds]
[app.rpc :as-alias rpc]
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
@@ -39,7 +35,6 @@
[app.rpc.helpers :as rph]
[app.rpc.permissions :as perms]
[app.util.blob :as blob]
[app.util.events :as events]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.worker :as wrk]
@@ -79,16 +74,13 @@
;; --- FILE PERMISSIONS
(def ^:private sql:file-permissions
"select fpr.is_owner,
fpr.is_admin,
fpr.can_edit
from file_profile_rel as fpr
inner join file as f on (f.id = fpr.file_id)
where fpr.file_id = ?
and fpr.profile_id = ?
and f.deleted_at is null
union all
select tpr.is_owner,
tpr.is_admin,
@@ -98,7 +90,6 @@
inner join file as f on (p.id = f.project_id)
where f.id = ?
and tpr.profile_id = ?
and f.deleted_at is null
union all
select ppr.is_owner,
ppr.is_admin,
@@ -106,8 +97,7 @@
from project_profile_rel as ppr
inner join file as f on (f.project_id = ppr.project_id)
where f.id = ?
and ppr.profile_id = ?
and f.deleted_at is null")
and ppr.profile_id = ?")
(defn get-file-permissions
[conn profile-id file-id]
@@ -201,7 +191,7 @@
(def schema:permissions-mixin
[:map {:title "PermissionsMixin"}
[:permissions perms/schema:permissions]])
[:permissions ::perms/permissions]])
(def schema:file-with-permissions
[:merge {:title "FileWithPermissions"}
@@ -212,7 +202,8 @@
schema:get-file
[:map {:title "get-file"}
[:features {:optional true} ::cfeat/features]
[:id ::sm/uuid]])
[:id ::sm/uuid]
[:project-id {:optional true} ::sm/uuid]])
(defn get-minimal-file
[cfg id & {:as opts}]
@@ -265,23 +256,13 @@
(cfeat/check-client-features! (:features params))
(cfeat/check-file-features! (:features file)))
(as-> file file
;; This operation is needed for backward comapatibility with
;; frontends that does not support pointer-map resolution
;; mechanism; this just resolves the pointers on backend and
;; return a complete file
(if (and (contains? (:features file) "fdata/pointer-map")
(not (contains? (:features params) "fdata/pointer-map")))
(feat.fdata/realize-pointers cfg file)
file)
;; This operation is needed for backward comapatibility with
;; frontends that does not support objects-map mechanism; this
;; just converts all objects map instaces to plain maps
(if (and (contains? (:features file) "fdata/objects-map")
(not (contains? (:features params) "fdata/objects-map")))
(feat.fdata/realize-objects cfg file)
file)))))
;; This operation is needed for backward comapatibility with frontends that
;; does not support pointer-map resolution mechanism; this just resolves the
;; pointers on backend and return a complete file.
(if (and (contains? (:features file) "fdata/pointer-map")
(not (contains? (:features params) "fdata/pointer-map")))
(feat.fdata/realize-pointers cfg file)
file))))
;; --- COMMAND QUERY: get-file-fragment (by id)
@@ -290,7 +271,7 @@
[:id ::sm/uuid]
[:file-id ::sm/uuid]
[:created-at ::ct/inst]
[:content ::sm/any]])
[:data any?]])
(def schema:get-file-fragment
[:map {:title "get-file-fragment"}
@@ -355,8 +336,9 @@
::sm/params schema:get-project-files
::sm/result schema:files}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id]}]
(projects/check-read-permissions! pool profile-id project-id)
(get-project-files pool project-id))
(dm/with-open [conn (db/open pool)]
(projects/check-read-permissions! conn profile-id project-id)
(get-project-files conn project-id)))
;; --- COMMAND QUERY: has-file-libraries
@@ -390,39 +372,6 @@
(:has-libraries row)))
;; --- COMMAND QUERY: get-library-usage
(declare get-library-usage)
(def schema:get-library-usage
[:map {:title "get-library-usage"}
[:file-id ::sm/uuid]])
:sample
(sv/defmethod ::get-library-usage
"Gets the number of files that use the specified library."
{::doc/added "2.10.0"
::sm/params schema:get-library-usage
::sm/result ::sm/int}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
(dm/with-open [conn (db/open pool)]
(check-read-permissions! pool profile-id file-id)
(get-library-usage conn file-id)))
(def ^:private sql:get-library-usage
"SELECT COUNT(*) AS used
FROM file_library_rel AS flr
JOIN file AS fl ON (flr.library_file_id = fl.id)
WHERE flr.library_file_id = ?::uuid
AND (fl.deleted_at IS NULL OR
fl.deleted_at > now())")
(defn- get-library-usage
[conn file-id]
(let [row (db/exec-one! conn [sql:get-library-usage file-id])]
{:used-in (:used row)}))
;; --- QUERY COMMAND: get-page
(defn- prune-objects
@@ -514,136 +463,66 @@
;; --- COMMAND QUERY: get-team-shared-files
(defn- get-components-with-variants
"Return a set with all the variant-ids, and a list of components, but
with only one component by variant.
Returns a vector of unique components and a set of all variant ids"
[fdata]
(loop [variant-ids #{}
components' []
components (ctkl/components-seq fdata)]
(if-let [{:keys [variant-id] :as component} (first components)]
(cond
(nil? variant-id)
(recur variant-ids
(conj components' component)
(rest components))
(contains? variant-ids variant-id)
(recur variant-ids
components'
(rest components))
:else
(recur (conj variant-ids variant-id)
(conj components' component)
(rest components)))
[(d/index-by :id components') variant-ids])))
(defn- sample-assets
[assets limit]
(let [assets (into [] (map val) assets)]
{:count (count assets)
:sample (->> assets
(sort-by #(str/lower (:name %)))
(into [] (take limit)))}))
(defn- calculate-library-summary
"Calculate the file library summary (counters and samples)"
[{:keys [data] :as file}]
(let [load-objects
(fn [sample]
(mapv #(ctf/load-component-objects data %) sample))
[components variant-ids]
(get-components-with-variants data)
components-sample
(-> (sample-assets components 4)
(update :sample load-objects))]
{:components components-sample
:variants {:count (count variant-ids)}
:colors (sample-assets (:colors data) 3)
:typographies (sample-assets (:typographies data) 3)}))
(def ^:private file-summary-cache-key-ttl
(ct/duration {:days 30}))
(def file-summary-cache-key-prefix
"penpot.library-summary.")
(defn- get-file-with-summary
"Get a file without data with a summary of its local library content"
[cfg id]
(let [get-from-cache
(fn [{:keys [::rds/conn]} cache-key]
(when-let [result (rds/get conn cache-key)]
(let [file (bfc/get-file cfg id :load-data? false)
summary (t/decode-str result)]
(-> (assoc file :library-summary summary)
(dissoc :data)))))
calculate-from-db
(fn []
(let [file (bfc/get-file cfg id)
result (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(calculate-library-summary file))]
(-> file
(assoc :library-summary result)
(dissoc :legacy-data)
(dissoc :data))))
persist-to-cache
(fn [{:keys [::rds/conn]} data cache-key]
(rds/set conn cache-key (t/encode-str data)
(rds/build-set-args {:ex file-summary-cache-key-ttl})))]
(if (contains? cf/flags :redis-cache)
(let [cache-key (str file-summary-cache-key-prefix id)]
(or (rds/run! cfg get-from-cache cache-key)
(let [file (calculate-from-db)]
(rds/run! cfg persist-to-cache (:library-summary file) cache-key)
file)))
(calculate-from-db))))
(def ^:private sql:team-shared-files
"WITH file_library_agg AS (
SELECT flr.file_id,
coalesce(array_agg(flr.library_file_id) filter (WHERE flr.library_file_id IS NOT NULL), '{}') AS library_file_ids
FROM file_library_rel flr
GROUP BY flr.file_id
)
"select f.id,
f.revn,
f.vern,
f.data,
f.project_id,
f.created_at,
f.modified_at,
f.data_backend,
f.data_ref_id,
f.name,
f.version,
f.is_shared,
ft.media_id,
p.team_id
from file as f
inner join project as p on (p.id = f.project_id)
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn and ft.deleted_at is null)
where f.is_shared = true
and f.deleted_at is null
and p.deleted_at is null
and p.team_id = ?
order by f.modified_at desc")
SELECT f.id,
fla.library_file_ids,
ft.media_id AS thumbnail_id
FROM file AS f
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN file_thumbnail AS ft ON (ft.file_id = f.id AND ft.revn = f.revn AND ft.deleted_at IS NULL)
LEFT JOIN file_library_agg AS fla ON (fla.file_id = f.id)
WHERE f.is_shared = true
AND f.deleted_at IS NULL
AND p.deleted_at IS NULL
AND p.team_id = ?
ORDER BY f.modified_at DESC")
(defn- get-library-summary
[cfg {:keys [id data] :as file}]
(letfn [(assets-sample [assets limit]
(let [sorted-assets (->> (vals assets)
(sort-by #(str/lower (:name %))))]
{:count (count sorted-assets)
:sample (into [] (take limit sorted-assets))}))]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(let [load-objects (fn [component]
(ctf/load-component-objects data component))
components-sample (-> (assets-sample (ctkl/components data) 4)
(update :sample #(mapv load-objects %)))]
{:components components-sample
:media (assets-sample (:media data) 3)
:colors (assets-sample (:colors data) 3)
:typographies (assets-sample (:typographies data) 3)}))))
(defn- get-team-shared-files
[{:keys [::db/conn] :as cfg} {:keys [team-id profile-id]}]
(teams/check-read-permissions! conn profile-id team-id)
(let [process-row
(fn [{:keys [id library-file-ids]}]
(let [file (get-file-with-summary cfg id)]
(assoc file :library-file-ids (db/decode-pgarray library-file-ids #{}))))
xform
(map process-row)]
(->> (db/plan conn [sql:team-shared-files team-id] {:fetch-size 1})
(transduce xform conj #{}))))
(->> (db/exec! conn [sql:team-shared-files team-id])
(into #{} (comp
;; NOTE: this decode operation is a workaround for a
;; fast fix, this should be approached with a more
;; efficient implementation, for now it loads all
;; the files in memory.
(map (partial bfc/decode-file cfg))
(map (fn [row]
(if-let [media-id (:media-id row)]
(-> row
(dissoc :media-id)
(assoc :thumbnail-id media-id))
(dissoc row :media-id))))
(map #(assoc % :library-summary (get-library-summary cfg %)))
(map #(dissoc % :data))))))
(def ^:private schema:get-team-shared-files
[:map {:title "get-team-shared-files"}
@@ -656,28 +535,6 @@
[cfg {:keys [::rpc/profile-id] :as params}]
(db/tx-run! cfg get-team-shared-files (assoc params :profile-id profile-id)))
;; --- COMMAND QUERY: get-file-summary
(defn- get-file-summary
[cfg id]
(let [file (get-file-with-summary cfg id)]
(-> (:library-summary file)
(assoc :name (:name file)))))
(def ^:private
schema:get-file-summary
[:map {:title "get-file-summary"}
[:id ::sm/uuid]])
(sv/defmethod ::get-file-summary
"Retrieve a file summary by its ID. Only authenticated users."
{::doc/added "1.20"
::sm/params schema:get-file-summary}
[cfg {:keys [::rpc/profile-id id] :as params}]
(check-read-permissions! cfg profile-id id)
(get-file-summary cfg id))
;; --- COMMAND QUERY: get-file-libraries
(def ^:private schema:get-file-libraries
@@ -766,61 +623,46 @@
(get-team-recent-files conn team-id)))
;; --- COMMAND QUERY: get-team-deleted-files
;; --- COMMAND QUERY: get-file-summary
(def sql:team-deleted-files
"WITH deleted_files AS (
SELECT f.id,
f.revn,
f.vern,
f.project_id,
f.created_at,
f.modified_at,
f.name,
f.is_shared,
f.deleted_at AS will_be_deleted_at,
ft.media_id AS thumbnail_id,
row_number() OVER w AS row_num,
p.team_id
FROM file AS f
INNER JOIN project AS p ON (p.id = f.project_id)
LEFT JOIN file_thumbnail AS ft on (ft.file_id = f.id
AND ft.revn = f.revn
AND ft.deleted_at is null)
WHERE p.team_id = ?
AND (p.deleted_at > ?::timestamptz OR
f.deleted_at > ?::timestamptz)
WINDOW w AS (PARTITION BY f.project_id
ORDER BY f.modified_at DESC)
ORDER BY f.modified_at DESC
)
SELECT * FROM deleted_files")
(defn- get-file-summary
[{:keys [::db/conn] :as cfg} {:keys [profile-id id project-id] :as params}]
(check-read-permissions! conn profile-id id)
(let [team (teams/get-team conn
:profile-id profile-id
:project-id project-id
:file-id id)
(defn get-team-deleted-files
[conn team-id]
(let [now (ct/now)]
(db/exec! conn [sql:team-deleted-files team-id now now])))
file (bfc/get-file cfg id
:project-id project-id
:read-only? true)]
(def ^:private schema:get-team-deleted-files
[:map {:title "get-team-deleted-files"}
[:team-id ::sm/uuid]])
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-client-features! (:features params))
(cfeat/check-file-features! (:features file)))
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
{:name (:name file)
:components-count (count (ctkl/components-seq (:data file)))
:graphics-count (count (get-in file [:data :media] []))
:colors-count (count (get-in file [:data :colors] []))
:typography-count (count (get-in file [:data :typographies] []))})))
(sv/defmethod ::get-file-summary
"Retrieve a file summary by its ID. Only authenticated users."
{::doc/added "1.20"
::sm/params schema:get-file}
[cfg {:keys [::rpc/profile-id] :as params}]
(db/tx-run! cfg get-file-summary (assoc params :profile-id profile-id)))
(sv/defmethod ::get-team-deleted-files
{::doc/added "2.12"
::sm/params schema:get-team-deleted-files}
[cfg {:keys [::rpc/profile-id team-id]}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(teams/check-read-permissions! conn profile-id team-id)
(get-team-deleted-files conn team-id))))
;; --- COMMAND QUERY: get-file-info
(defn- get-file-info
[{:keys [::db/conn] :as cfg} {:keys [id] :as params}]
(db/get conn :file
{:id id}
{::sql/columns [:id :deleted-at]}))
(db/get* conn :file
{:id id}
{::sql/columns [:id]}))
(sv/defmethod ::get-file-info
"Retrieve minimal file info by its ID."
@@ -1032,14 +874,7 @@
(let [team (teams/get-team conn
:profile-id profile-id
:file-id id)
file (mark-file-deleted conn team id)
msgbus (::mbus/msgbus cfg)]
(mbus/pub! msgbus
:topic id
:message {:type :file-deleted
:file-id id
:profile-id profile-id})
file (mark-file-deleted conn team id)]
(rph/with-meta (rph/wrap)
{::audit/props {:project-id (:project-id file)
@@ -1072,7 +907,6 @@
[:library-id ::sm/uuid]])
(sv/defmethod ::link-file-to-library
"Link a file to a library. Returns the recursive list of libraries used by that library"
{::doc/added "1.17"
::webhooks/event? true
::sm/params schema:link-file-to-library}
@@ -1086,8 +920,7 @@
(fn [{:keys [::db/conn]}]
(check-edition-permissions! conn profile-id file-id)
(check-edition-permissions! conn profile-id library-id)
(link-file-to-library conn params)
(bfc/get-libraries cfg [library-id]))))
(link-file-to-library conn params))))
;; --- MUTATION COMMAND: unlink-file-from-library
@@ -1161,118 +994,3 @@
(check-edition-permissions! conn profile-id file-id)
(-> (ignore-sync conn params)
(update :features db/decode-pgarray #{})))
;; --- MUTATION COMMAND: delete-files-immediatelly
(def ^:private sql:delete-team-files
"UPDATE file AS uf SET deleted_at = ?::timestamptz
FROM (
SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])
) AS subquery
WHERE uf.id = subquery.id
RETURNING uf.id, uf.deleted_at;")
(def ^:private schema:permanently-delete-team-files
[:map {:title "permanently-delete-team-files"}
[:team-id ::sm/uuid]
[:ids [::sm/set ::sm/uuid]]])
(sv/defmethod ::permanently-delete-team-files
"Mark the specified files to be deleted immediatelly on the
specified team. The team-id on params will be used to filter and
check writable permissons on team."
{::doc/added "2.12"
::sm/params schema:permanently-delete-team-files
::db/transaction true}
[{:keys [::db/conn]} {:keys [::rpc/profile-id ::rpc/request-at team-id ids]}]
(teams/check-edition-permissions! conn profile-id team-id)
(reduce (fn [acc {:keys [id deleted-at]}]
(wrk/submit! {::db/conn conn
::wrk/task :delete-object
::wrk/params {:object :file
:deleted-at deleted-at
:id id}})
(conj acc id))
#{}
(db/plan conn [sql:delete-team-files request-at team-id
(db/create-array conn "uuid" ids)])))
;; --- MUTATION COMMAND: restore-files-immediatelly
(def ^:private sql:resolve-editable-files
"SELECT f.id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
JOIN team AS t ON (t.id = p.team_id)
WHERE t.deleted_at IS NULL
AND t.id = ?
AND f.id = ANY(?::uuid[])")
(defn- restore-file
[conn file-id]
(db/update! conn :file
{:deleted-at nil
:has-media-trimmed false}
{:id file-id}
{::db/return-keys false})
(db/update! conn :file-media-object
{:deleted-at nil}
{:file-id file-id}
{::db/return-keys false})
(db/update! conn :file-change
{:deleted-at nil}
{:file-id file-id}
{::db/return-keys false})
(db/update! conn :file-data
{:deleted-at nil}
{:file-id file-id}
{::db/return-keys false})
(db/update! conn :file-thumbnail
{:deleted-at nil}
{:file-id file-id}
{::db/return-keys false})
(db/update! conn :file-tagged-object-thumbnail
{:deleted-at nil}
{:file-id file-id}
{::db/return-keys false}))
(defn- restore-deleted-team-files
[{:keys [::db/conn]} {:keys [::rpc/profile-id team-id ids]}]
(teams/check-edition-permissions! conn profile-id team-id)
(reduce (fn [affected {:keys [id]}]
(let [index (inc (count affected))]
(events/tap :progress {:file-id id :index index :total (count ids)})
(restore-file conn id)
(conj affected id)))
#{}
(db/plan conn [sql:resolve-editable-files team-id
(db/create-array conn "uuid" ids)])))
(def ^:private schema:restore-deleted-team-files
[:map {:title "restore-deleted-team-files"}
[:team-id ::sm/uuid]
[:ids [::sm/set ::sm/uuid]]])
(sv/defmethod ::restore-deleted-team-files
"Removes the deletion mark from the specified files (and respective projects)."
{::doc/added "2.12"
::sse/stream? true
::sm/params schema:restore-deleted-team-files}
[cfg params]
(sse/response #(db/tx-run! cfg restore-deleted-team-files params)))

View File

@@ -46,7 +46,6 @@
(binding [pmap/*tracked* (pmap/create-tracked)
cfeat/*current* features]
(let [file (ctf/make-file {:id id
:project-id project-id
:name name
@@ -115,15 +114,14 @@
;; FIXME: IMPORTANT: this code can have race conditions, because
;; we have no locks for updating team so, creating two files
;; concurrently can lead to lost team features updating
(when-let [features (-> features
(set/difference (:features team))
(set/difference cfeat/no-team-inheritable-features)
(not-empty))]
(let [features (-> features
(set/union (:features team))
(set/difference cfeat/no-team-inheritable-features)
(into-array))]
(let [features (->> features
(set/union (:features team))
(db/create-array conn "text"))]
(db/update! conn :team
{:features features}
{:id (:id team)}

View File

@@ -9,16 +9,13 @@
[app.binfile.common :as bfc]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.file-snapshots :as fsnap]
[app.features.logical-deletion :as ldel]
[app.main :as-alias main]
[app.msgbus :as mbus]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.quotes :as quotes]
[app.util.services :as sv]))
@@ -73,17 +70,10 @@
::db/transaction true}
[{:keys [::db/conn ::mbus/msgbus] :as cfg} {:keys [::rpc/profile-id file-id id] :as params}]
(files/check-edition-permissions! conn profile-id file-id)
(let [file (bfc/get-file cfg file-id)
team (teams/get-team conn
:profile-id profile-id
:file-id file-id)
delay (ldel/get-deletion-delay team)]
(let [file (bfc/get-file cfg file-id)]
(fsnap/create! cfg file
{:profile-id profile-id
:deleted-at (ct/in-future delay)
:created-by "system"})
(let [vern (fsnap/restore! cfg file-id id)]
;; Send to the clients a notification to reload the file
(mbus/pub! msgbus
@@ -125,20 +115,7 @@
:file-id (:file-id snapshot)
:snapshot-id id
:profile-id profile-id))
(when (and (some? (:locked-by snapshot))
(not= (:locked-by snapshot) profile-id))
(ex/raise :type :validation
:code :snapshot-is-locked
:file-id (:file-id snapshot)
:snapshot-id id
:profile-id profile-id))
(let [team (teams/get-team conn
:profile-id profile-id
:file-id (:file-id snapshot))
delay (ldel/get-deletion-delay team)]
(fsnap/delete! conn (assoc snapshot :deleted-at (ct/in-future delay))))))
(fsnap/delete! conn snapshot)))
;;; Lock/unlock version endpoints
@@ -146,6 +123,15 @@
[:map {:title "lock-file-snapshot"}
[:id ::sm/uuid]])
;; MOVE to fsnap
(defn- lock-file-snapshot!
[conn snapshot-id profile-id]
(db/update! conn :file-change
{:locked-by profile-id}
{:id snapshot-id}
{::db/return-keys false})
nil)
(sv/defmethod ::lock-file-snapshot
{::doc/added "1.20"
::sm/params schema:lock-file-snapshot
@@ -179,12 +165,22 @@
:profile-id profile-id
:locked-by (:locked-by snapshot)))
(fsnap/lock-by! conn id profile-id)))
(lock-file-snapshot! conn id profile-id)))
(def ^:private schema:unlock-file-snapshot
[:map {:title "unlock-file-snapshot"}
[:id ::sm/uuid]])
;; MOVE to fsnap
(defn- unlock-file-snapshot!
[conn snapshot-id]
(db/update! conn :file-change
{:locked-by nil}
{:id snapshot-id}
{::db/return-keys false})
nil)
(sv/defmethod ::unlock-file-snapshot
{::doc/added "1.20"
::sm/params schema:unlock-file-snapshot
@@ -217,4 +213,4 @@
:snapshot-id id
:profile-id profile-id))
(fsnap/unlock! conn id)))
(unlock-file-snapshot! conn id)))

View File

@@ -272,7 +272,7 @@
[:map {:title "create-file-object-thumbnail"}
[:file-id ::sm/uuid]
[:object-id [:string {:max 250}]]
[:media media/schema:upload]
[:media ::media/upload]
[:tag {:optional true} [:string {:max 50}]]])
(sv/defmethod ::create-file-object-thumbnail
@@ -383,7 +383,7 @@
[:map {:title "create-file-thumbnail"}
[:file-id ::sm/uuid]
[:revn ::sm/int]
[:media media/schema:upload]])
[:media ::media/upload]])
(sv/defmethod ::create-file-thumbnail
"Creates or updates the file thumbnail. Mainly used for paint the

View File

@@ -20,6 +20,7 @@
[app.config :as cf]
[app.db :as db]
[app.features.fdata :as fdata]
[app.features.file-migrations :as feat.fmigr]
[app.features.file-snapshots :as fsnap]
[app.features.logical-deletion :as ldel]
[app.http.errors :as errors]
@@ -27,7 +28,6 @@
[app.loggers.webhooks :as webhooks]
[app.metrics :as mtx]
[app.msgbus :as mbus]
[app.redis :as rds]
[app.rpc :as-alias rpc]
[app.rpc.climit :as climit]
[app.rpc.commands.files :as files]
@@ -37,7 +37,9 @@
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[clojure.set :as set]))
[app.worker :as wrk]
[clojure.set :as set]
[promesa.exec :as px]))
(declare ^:private get-lagged-changes)
(declare ^:private send-notifications!)
@@ -45,7 +47,6 @@
(declare ^:private update-file*)
(declare ^:private process-changes-and-validate)
(declare ^:private take-snapshot?)
(declare ^:private invalidate-caches!)
;; PUBLIC API; intended to be used outside of this module
(declare update-file!)
@@ -63,10 +64,10 @@
[:revn {:min 0} ::sm/int]
[:vern {:min 0} ::sm/int]
[:features {:optional true} ::cfeat/features]
[:changes {:optional true} [:vector cpc/schema:change]]
[:changes {:optional true} [:vector ::cpc/change]]
[:changes-with-metadata {:optional true}
[:vector [:map
[:changes [:vector cpc/schema:change]]
[:changes [:vector ::cpc/change]]
[:hint-origin {:optional true} :keyword]
[:hint-events {:optional true} [:vector [:string {:max 250}]]]]]]
[:skip-validate {:optional true} ::sm/boolean]])
@@ -75,7 +76,7 @@
schema:update-file-result
[:vector {:title "update-file-result"}
[:map
[:changes [:vector cpc/schema:change]]
[:changes [:vector ::cpc/change]]
[:file-id ::sm/uuid]
[:id ::sm/uuid]
[:revn {:min 0} ::sm/int]
@@ -160,6 +161,7 @@
tpoint (ct/tpoint)]
(when (not= (:vern params)
(:vern file))
(ex/raise :type :validation
@@ -176,16 +178,15 @@
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
;; When newly computed features does not match exactly with the
;; features defined on team row, we update it
;; When newly computed features does not match exactly with
;; the features defined on team row, we update it
(when-let [features (-> features
(set/difference (:features team))
(set/difference cfeat/no-team-inheritable-features)
(not-empty))]
(let [features (-> features
(set/union (:features team))
(set/difference cfeat/no-team-inheritable-features)
(into-array))]
(let [features (->> features
(set/union (:features team))
(db/create-array conn "text"))]
(db/update! conn :team
{:features features}
{:id (:id team)}
@@ -209,7 +210,7 @@
Follow the inner implementation to `update-file-data!` function.
Only intended for internal use on this module."
[{:keys [::db/conn ::timestamp] :as cfg}
[{:keys [::db/conn ::wrk/executor ::timestamp] :as cfg}
{:keys [profile-id file team features changes session-id skip-validate] :as params}]
(binding [pmap/*tracked* (pmap/create-tracked)
@@ -224,12 +225,19 @@
revn
(get file :revn)
;; We create a new lexical scope for clearly delimit the result of
;; executing this update file operation and all its side effects
file
(binding [cfeat/*current* features
cfeat/*previous* (:features file)]
(update-file-data! cfg file
process-changes-and-validate
changes skip-validate))
(px/invoke! executor
(fn []
;; Process the file data on separated thread
;; for avoid to do the CPU intensive operation
;; on vthread.
(binding [cfeat/*current* features
cfeat/*previous* (:features file)]
(update-file-data! cfg file
process-changes-and-validate
changes skip-validate))))
deleted-at
(ct/plus timestamp (ct/duration {:hours 1}))]
@@ -240,7 +248,6 @@
(fsnap/create! cfg file
{:label label
:created-by "system"
:deleted-at deleted-at
:profile-id profile-id
:session-id session-id})))
@@ -263,9 +270,6 @@
(persist-file! cfg file)
(when (contains? cf/flags :redis-cache)
(invalidate-caches! cfg file))
;; Send asynchronous notifications
(send-notifications! cfg params file)
@@ -277,10 +281,20 @@
:project-id (:project-id file)
:team-id (:team-id file)}}))))
;: FIXME: DEPRECATED
(defn update-file!
"A public api that allows apply a transformation to a file with all context setup."
[{:keys [::db/conn] :as cfg} file-id update-fn & args]
(let [file (get-file cfg file-id)
file (apply update-file-data! cfg file update-fn args)]
(feat.fmigr/upsert-migrations! conn file)
(persist-file! cfg file)))
(defn get-file
"Get not-decoded file, only decodes the features set."
[cfg id]
(bfc/get-file cfg id :decode? false :lock-for-share? true))
;; FIXME: lock for share
(bfc/get-file cfg id :decode? false :lock-for-update? true))
(defn persist-file!
"Function responsible of persisting already encoded file. Should be
@@ -306,17 +320,11 @@
(bfc/update-file! cfg file)))
(defn- invalidate-caches!
[cfg {:keys [id] :as file}]
(rds/run! cfg (fn [{:keys [::rds/conn]}]
(let [key (str files/file-summary-cache-key-prefix id)]
(rds/del conn key)))))
(defn- attach-snapshot
"Attach snapshot data to the file. This should be called before the
upcoming file operations are applied to the file."
[cfg migrated? file]
(let [snapshot (if migrated? file (fdata/realize cfg file))]
[file migrated? cfg]
(let [snapshot (if migrated? file (update file :data (partial fdata/realize cfg)))]
(assoc file ::snapshot snapshot)))
(defn- update-file-data!
@@ -348,14 +356,14 @@
;; before applying the migration to the file
file
(cond-> file
;; need-migration?
;; (->> (fdata/realize cfg))
need-migration?
(->> (fdata/realize cfg))
need-migration?
(fmg/migrate-file libs)
take-snapshot?
(->> (attach-snapshot cfg need-migration?)))]
(attach-snapshot need-migration? cfg))]
(apply update-fn cfg file args)))
@@ -385,6 +393,7 @@
(not skip-validate))
(bfc/get-resolved-file-libraries cfg file))
;; The main purpose of this atom is provide a contextual state
;; for the changes subsystem where optionally some hints can
;; be provided for the changes processing. Right now we are

View File

@@ -26,7 +26,9 @@
[app.rpc.helpers :as rph]
[app.rpc.quotes :as quotes]
[app.storage :as sto]
[app.util.services :as sv]))
[app.util.services :as sv]
[app.worker :as-alias wrk]
[promesa.exec :as px]))
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
(def valid-style #{"normal" "italic"})
@@ -35,13 +37,14 @@
(def ^:private
schema:get-font-variants
[:and
[:map {:title "get-font-variants"}
[:team-id {:optional true} ::sm/uuid]
[:file-id {:optional true} ::sm/uuid]
[:project-id {:optional true} ::sm/uuid]
[:share-id {:optional true} ::sm/uuid]]
[::sm/contains-any #{:team-id :file-id :project-id}]])
[:schema {:title "get-font-variants"}
[:and
[:map
[:team-id {:optional true} ::sm/uuid]
[:file-id {:optional true} ::sm/uuid]
[:project-id {:optional true} ::sm/uuid]
[:share-id {:optional true} ::sm/uuid]]
[::sm/contains-any #{:team-id :file-id :project-id}]]])
(sv/defmethod ::get-font-variants
{::doc/added "1.18"
@@ -103,7 +106,7 @@
(create-font-variant cfg (assoc params :profile-id profile-id)))))
(defn create-font-variant
[{:keys [::sto/storage ::db/conn]} {:keys [data] :as params}]
[{:keys [::sto/storage ::db/conn ::wrk/executor]} {:keys [data] :as params}]
(letfn [(generate-missing! [data]
(let [data (media/run {:cmd :generate-fonts :input data})]
(when (and (not (contains? data "font/otf"))
@@ -155,7 +158,7 @@
:otf-file-id (:id otf)
:ttf-file-id (:id ttf)}))]
(let [data (generate-missing! data)
(let [data (px/invoke! executor (partial generate-missing! data))
assets (persist-fonts-files! data)
result (insert-font-variant! assets)]
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))

View File

@@ -38,7 +38,7 @@
::doc/added "1.15"
::doc/module :auth
::sm/params schema:login-with-ldap}
[{:keys [::ldap/provider] :as cfg} params]
[{:keys [::setup/props ::ldap/provider] :as cfg} params]
(when-not provider
(ex/raise :type :restriction
:code :ldap-not-initialized
@@ -60,11 +60,11 @@
;; user comes from team-invitation process; in this case,
;; regenerate token and send back to the user a new invitation
;; token (and mark current session as logged).
(let [claims (tokens/verify cfg {:token token :iss :team-invitation})
(let [claims (tokens/verify props {:token token :iss :team-invitation})
claims (assoc claims
:member-id (:id profile)
:member-email (:email profile))
token (tokens/generate cfg claims)]
token (tokens/generate props claims)]
(-> {:invitation-token token}
(rph/with-transform (session/create-fn cfg (:id profile)))
(rph/with-meta {::audit/props (:props profile)

View File

@@ -28,14 +28,16 @@
[app.setup :as-alias setup]
[app.setup.templates :as tmpl]
[app.storage.tmp :as tmp]
[app.util.services :as sv]))
[app.util.services :as sv]
[app.worker :as-alias wrk]
[promesa.exec :as px]))
;; --- COMMAND: Duplicate File
(defn duplicate-file
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}]
(let [;; We don't touch the original file on duplication
file (bfc/get-file cfg file-id :realize? true)
file (bfc/get-file cfg file-id)
project-id (:project-id file)
file (-> file
(update :id bfc/lookup-index)
@@ -311,14 +313,15 @@
;; Update the modification date of the all affected projects
;; ensuring that the destination project is the most recent one.
(loop [project-ids (into (list project-id) source)
modified-at (ct/now)]
(when-let [project-id (first project-ids)]
(db/update! conn :project
{:modified-at modified-at}
{:id project-id})
(recur (rest project-ids)
(ct/plus modified-at 10))))
(doseq [project-id (into (list project-id) source)]
;; NOTE: as this is executed on virtual thread, sleeping does
;; not causes major issues, and allows an easy way to set a
;; trully different modification date to each file.
(px/sleep 10)
(db/update! conn :project
{:modified-at (ct/now)}
{:id project-id}))
nil))
@@ -393,7 +396,12 @@
;; --- COMMAND: Clone Template
(defn clone-template
[{:keys [::db/pool] :as cfg} {:keys [project-id profile-id] :as params} template]
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [project-id profile-id] :as params} template]
;; NOTE: the importation process performs some operations
;; that are not very friendly with virtual threads, and for
;; avoid unexpected blocking of other concurrent operations
;; we dispatch that operation to a dedicated executor.
(let [template (tmp/tempfile-from template
:prefix "penpot.template."
:suffix ""
@@ -411,8 +419,8 @@
(assoc ::bfc/features (cfeat/get-team-enabled-features cf/flags team)))
result (if (= format :binfile-v3)
(bf.v3/import-files! cfg)
(bf.v1/import-files! cfg))]
(px/invoke! executor (partial bf.v3/import-files! cfg))
(px/invoke! executor (partial bf.v1/import-files! cfg)))]
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]

View File

@@ -24,8 +24,10 @@
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[app.worker :as-alias wrk]
[cuerdas.core :as str]
[datoteka.io :as io]))
[datoteka.io :as io]
[promesa.exec :as px]))
(def default-max-file-size
(* 1024 1024 10)) ; 10 MiB
@@ -46,7 +48,7 @@
[:file-id ::sm/uuid]
[:is-local ::sm/boolean]
[:name [:string {:max 250}]]
[:content media/schema:upload]])
[:content ::media/upload]])
(sv/defmethod ::upload-file-media-object
{::doc/added "1.17"
@@ -151,9 +153,9 @@
(assoc ::image (process-main-image info)))))
(defn- create-file-media-object
[{:keys [::sto/storage ::db/conn] :as cfg}
[{:keys [::sto/storage ::db/conn ::wrk/executor] :as cfg}
{:keys [id file-id is-local name content]}]
(let [result (process-image content)
(let [result (px/invoke! executor (partial process-image content))
image (sto/put-object! storage (::image result))
thumb (when-let [params (::thumb result)]
(sto/put-object! storage params))]

View File

@@ -30,13 +30,16 @@
[app.tokens :as tokens]
[app.util.services :as sv]
[app.worker :as wrk]
[cuerdas.core :as str]))
[cuerdas.core :as str]
[promesa.exec :as px]))
(declare check-profile-existence!)
(declare decode-row)
(declare derive-password)
(declare filter-props)
(declare get-profile)
(declare strip-private-attrs)
(declare verify-password)
(def schema:props-notifications
[:map {:title "props-notifications"}
@@ -107,9 +110,7 @@
(defn get-profile
"Get profile by id. Throws not-found exception if no profile found."
[conn id & {:as opts}]
;; NOTE: We need to set ::db/remove-deleted to false because demo profiles
;; are created with a set deleted-at value
(-> (db/get-by-id conn :profile id (assoc opts ::db/remove-deleted false))
(-> (db/get-by-id conn :profile id opts)
(decode-row)))
;; --- MUTATION: Update Profile (own)
@@ -130,7 +131,9 @@
;; NOTE: we need to retrieve the profile independently if we use
;; it or not for explicit locking and avoid concurrent updates of
;; the same row/object.
(let [profile (get-profile conn profile-id ::db/for-update true)
(let [profile (-> (db/get-by-id conn :profile profile-id ::sql/for-update true)
(decode-row))
;; Update the profile map with direct params
profile (-> profile
(assoc :fullname fullname)
@@ -140,9 +143,9 @@
(db/update! conn :profile
{:fullname fullname
:lang lang
:theme theme}
{:id profile-id}
{::db/return-keys false})
:theme theme
:props (db/tjson (:props profile))}
{:id profile-id})
(-> profile
(strip-private-attrs)
@@ -191,7 +194,7 @@
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
(when (and (not= (:password profile) "!")
(not (:valid (auth/verify-password old-password (:password profile)))))
(not (:valid (verify-password cfg old-password (:password profile)))))
(ex/raise :type :validation
:code :old-password-not-match))
profile))
@@ -200,7 +203,7 @@
[{:keys [::db/conn] :as cfg} {:keys [id password] :as profile}]
(when-not (db/read-only? conn)
(db/update! conn :profile
{:password (auth/derive-password password)}
{:password (derive-password cfg password)}
{:id id})
nil))
@@ -225,22 +228,21 @@
(defn- update-notifications!
[{:keys [::db/conn] :as cfg} {:keys [profile-id dashboard-comments email-comments email-invites]}]
(let [profile
(get-profile conn profile-id ::db/for-update true)
(let [profile (get-profile conn profile-id)
notifications
{:dashboard-comments dashboard-comments
:email-comments email-comments
:email-invites email-invites}
:email-invites email-invites}]
props
(-> (get profile :props)
(assoc :notifications notifications))]
(db/update!
conn :profile
{:props
(-> (:props profile)
(assoc :notifications notifications)
(db/tjson))}
{:id (:id profile)})
(db/update! conn :profile
{:props (db/tjson props)}
{:id profile-id}
{::db/return-keys false})
nil))
;; --- MUTATION: Update Photo
@@ -251,7 +253,7 @@
(def ^:private
schema:update-profile-photo
[:map {:title "update-profile-photo"}
[:file media/schema:upload]])
[:file ::media/upload]])
(sv/defmethod ::update-profile-photo
{:doc/added "1.1"
@@ -302,11 +304,12 @@
:content-type (:mtype thumb)}))
(defn upload-photo
[{:keys [::sto/storage] :as cfg} {:keys [file] :as params}]
[{:keys [::sto/storage ::wrk/executor] :as cfg} {:keys [file] :as params}]
(let [params (-> cfg
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
[:process-image/global]])
(assoc ::climit/label "upload-photo")
(assoc ::climit/executor executor)
(climit/invoke! generate-thumbnail! file))]
(sto/put-object! storage params)))
@@ -347,12 +350,12 @@
(defn- request-email-change!
[{:keys [::db/conn] :as cfg} {:keys [profile email] :as params}]
(let [token (tokens/generate cfg
(let [token (tokens/generate (::setup/props cfg)
{:iss :change-email
:exp (ct/in-future "15m")
:profile-id (:id profile)
:email email})
ptoken (tokens/generate cfg
ptoken (tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (ct/in-future {:days 30})})]
@@ -408,7 +411,7 @@
(defn update-profile-props
[{:keys [::db/conn] :as cfg} profile-id props]
(let [profile (get-profile conn profile-id ::db/for-update true)
(let [profile (get-profile conn profile-id ::sql/for-update true)
props (reduce-kv (fn [props k v]
;; We don't accept namespaced keys
(if (simple-ident? k)
@@ -421,17 +424,16 @@
(db/update! conn :profile
{:props (db/tjson props)}
{:id profile-id}
{::db/return-keys false})
{:id profile-id})
(filter-props props)))
(sv/defmethod ::update-profile-props
{::doc/added "1.0"
::sm/params schema:update-profile-props
::db/transaction true}
::sm/params schema:update-profile-props}
[cfg {:keys [::rpc/profile-id props]}]
(update-profile-props cfg profile-id props))
(db/tx-run! cfg (fn [cfg]
(update-profile-props cfg profile-id props))))
;; --- MUTATION: Delete Profile
@@ -469,29 +471,6 @@
(-> (rph/wrap nil)
(rph/with-transform (session/delete-fn cfg)))))
(def sql:get-subscription-editors
"SELECT DISTINCT
p.id,
p.fullname AS name,
p.email AS email
FROM team_profile_rel AS tpr1
JOIN team as t
ON tpr1.team_id = t.id
JOIN team_profile_rel AS tpr2
ON (tpr1.team_id = tpr2.team_id)
JOIN profile AS p
ON (tpr2.profile_id = p.id)
WHERE tpr1.profile_id = ?
AND tpr1.is_owner IS true
AND tpr2.can_edit IS true
AND t.deleted_at IS NULL")
(sv/defmethod ::get-subscription-usage
{::doc/added "2.9"}
[cfg {:keys [::rpc/profile-id]}]
(let [editors (db/exec! cfg [sql:get-subscription-editors profile-id])]
{:editors editors}))
;; --- HELPERS
(def sql:owned-teams
@@ -549,6 +528,15 @@
[props]
(into {} (filter (fn [[k _]] (simple-ident? k))) props))
(defn derive-password
[{:keys [::wrk/executor]} password]
(when password
(px/invoke! executor (partial auth/derive-password password))))
(defn verify-password
[{:keys [::wrk/executor]} password password-data]
(px/invoke! executor (partial auth/verify-password password password-data)))
(defn decode-row
[{:keys [props] :as row}]
(cond-> row

View File

@@ -70,27 +70,7 @@
;; --- QUERY: Get projects
(def ^:private sql:projects
"SELECT p.*,
coalesce(tpp.is_pinned, false) as is_pinned,
(SELECT count(*) FROM file AS f
WHERE f.project_id = p.id
AND f.deleted_at is null) AS count,
(SELECT count(*) FROM file AS f
WHERE f.project_id = p.id) AS total_count
FROM project AS p
INNER JOIN team AS t ON (t.id = p.team_id)
LEFT JOIN team_project_profile_rel AS tpp
ON (tpp.project_id = p.id AND
tpp.team_id = p.team_id AND
tpp.profile_id = ?)
WHERE p.team_id = ?
AND t.deleted_at is null
ORDER BY p.modified_at DESC")
(defn get-projects
[conn profile-id team-id]
(db/exec! conn [sql:projects profile-id team-id]))
(declare get-projects)
(def ^:private schema:get-projects
[:map {:title "get-projects"}
@@ -98,11 +78,32 @@
(sv/defmethod ::get-projects
{::doc/added "1.18"
::doc/changes [["2.12" "This endpoint now return deleted but recoverable projects"]]
::sm/params schema:get-projects}
[cfg {:keys [::rpc/profile-id team-id]}]
(teams/check-read-permissions! cfg profile-id team-id)
(get-projects cfg profile-id team-id))
[{:keys [::db/pool]} {:keys [::rpc/profile-id team-id]}]
(dm/with-open [conn (db/open pool)]
(teams/check-read-permissions! conn profile-id team-id)
(get-projects conn profile-id team-id)))
(def sql:projects
"select p.*,
coalesce(tpp.is_pinned, false) as is_pinned,
(select count(*) from file as f
where f.project_id = p.id
and deleted_at is null) as count
from project as p
inner join team as t on (t.id = p.team_id)
left join team_project_profile_rel as tpp
on (tpp.project_id = p.id and
tpp.team_id = p.team_id and
tpp.profile_id = ?)
where p.team_id = ?
and p.deleted_at is null
and t.deleted_at is null
order by p.modified_at desc")
(defn get-projects
[conn profile-id team-id]
(db/exec! conn [sql:projects profile-id team-id]))
;; --- QUERY: Get all projects

View File

@@ -12,7 +12,7 @@
[app.common.features :as cfeat]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.team :as types.team]
[app.common.types.team :as tt]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -37,14 +37,14 @@
;; --- Helpers & Specs
(def ^:private sql:team-permissions
"SELECT tpr.is_owner,
"select tpr.is_owner,
tpr.is_admin,
tpr.can_edit
FROM team_profile_rel AS tpr
JOIN team AS t ON (t.id = tpr.team_id)
WHERE tpr.profile_id = ?
AND tpr.team_id = ?
AND t.deleted_at IS NULL")
from team_profile_rel as tpr
join team as t on (t.id = tpr.team_id)
where tpr.profile_id = ?
and tpr.team_id = ?
and t.deleted_at is null")
(defn get-permissions
[conn profile-id team-id]
@@ -443,18 +443,13 @@
[:team-id ::sm/uuid]])
(def sql:team-invitations
"SELECT email_to AS email,
role,
(valid_until < ?::timestamptz) AS expired
FROM team_invitation
WHERE team_id = ?
ORDER BY valid_until DESC, created_at DESC")
"select email_to as email, role, (valid_until < now()) as expired
from team_invitation where team_id = ? order by valid_until desc, created_at desc")
(defn get-team-invitations
[conn team-id]
(let [now (ct/now)]
(->> (db/exec! conn [sql:team-invitations now team-id])
(mapv #(update % :role keyword)))))
(->> (db/exec! conn [sql:team-invitations team-id])
(mapv #(update % :role keyword))))
(sv/defmethod ::get-team-invitations
{::doc/added "1.17"
@@ -508,7 +503,7 @@
(let [features (-> (cfeat/get-enabled-features cf/flags)
(set/difference cfeat/frontend-only-features)
(set/difference cfeat/no-team-inheritable-features))
(cfeat/check-client-features! (:features params)))
params (-> params
(assoc :profile-id profile-id)
(assoc :features features))
@@ -634,7 +629,7 @@
;; assign owner role to new profile
(db/update! conn :team-profile-rel
(get types.team/permissions-for-role :owner)
(get tt/permissions-for-role :owner)
{:team-id id :profile-id reassign-to}))
;; and finally, if all other conditions does not match and the
@@ -747,7 +742,7 @@
:team-id team-id
:role role})
(let [params (get types.team/permissions-for-role role)]
(let [params (get tt/permissions-for-role role)]
;; Only allow single owner on team
(when (= role :owner)
(db/update! conn :team-profile-rel
@@ -765,7 +760,7 @@
[:map {:title "update-team-member-role"}
[:team-id ::sm/uuid]
[:member-id ::sm/uuid]
[:role types.team/schema:role]])
[:role ::tt/role]])
(sv/defmethod ::update-team-member-role
{::doc/added "1.17"
@@ -815,7 +810,7 @@
(def ^:private schema:update-team-photo
[:map {:title "update-team-photo"}
[:team-id ::sm/uuid]
[:file media/schema:upload]])
[:file ::media/upload]])
(sv/defmethod ::update-team-photo
{::doc/added "1.17"

View File

@@ -43,7 +43,7 @@
(defn- create-invitation-token
[cfg {:keys [profile-id valid-until team-id member-id member-email role]}]
(tokens/generate cfg
(tokens/generate (::setup/props cfg)
{:iss :team-invitation
:exp valid-until
:profile-id profile-id
@@ -54,8 +54,12 @@
(defn- create-profile-identity-token
[cfg profile-id]
(assert (uuid? profile-id) "expected valid uuid for profile-id")
(tokens/generate cfg
(dm/assert!
"expected valid uuid for profile-id"
(uuid? profile-id))
(tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id profile-id
:exp (ct/in-future {:days 30})}))
@@ -71,7 +75,7 @@
[:map
[:id ::sm/uuid]
[:fullname :string]]]
[:role types.team/schema:role]
[:role ::types.team/role]
[:email ::sm/email]])
(def ^:private check-create-invitation-params
@@ -220,112 +224,62 @@
(def ^:private xf:map-email (map :email))
(defn- create-team-invitations
"Unified function to handle both create and resend team invitations.
Accepts either:
- emails (set) + role (single role for all emails)
- invitations (vector of {:email :role} maps)"
[{:keys [::db/conn] :as cfg} {:keys [profile team role emails invitations] :as params}]
(let [;; Normalize input to a consistent format: [{:email :role}]
invitation-data (cond
;; Case 1: emails + single role (create invitations style)
(and emails role)
(map (fn [email] {:email email :role role}) emails)
[{:keys [::db/conn] :as cfg} {:keys [profile team role emails] :as params}]
(let [emails (set emails)
;; Case 2: invitations with individual roles (resend invitations style)
(some? invitations)
invitations
join-requests (->> (get-valid-access-request-profiles conn (:id team))
(d/index-by :email))
:else
(throw (ex-info "Invalid parameters: must provide either emails+role or invitations" {})))
team-members (into #{} xf:map-email
(teams/get-team-members conn (:id team)))
invitation-emails (into #{} (map :email) invitation-data)
join-requests (->> (get-valid-access-request-profiles conn (:id team))
(d/index-by :email))
team-members (into #{} xf:map-email
(teams/get-team-members conn (:id team)))
invitations (into #{}
(comp
;; We don't re-send invitations to
;; already existing members
(remove #(contains? team-members (:email %)))
invitations (into #{}
(comp
;; We don't re-send inviation to
;; already existing members
(remove team-members)
;; We don't send invitations to
;; join-requested members
(remove #(contains? join-requests (:email %)))
(map (fn [{:keys [email role]}]
(create-invitation cfg
(-> params
(assoc :email email)
(assoc :role role)))))
(remove nil?))
invitation-data)]
(remove join-requests)
(map (fn [email] (assoc params :email email)))
(keep (partial create-invitation cfg)))
emails)]
;; For requested invitations, do not send invitation emails, add
;; the user directly to the team
(->> join-requests
(filter #(contains? invitation-emails (key %)))
(map (fn [[email member]]
(let [role (:role (first (filter #(= (:email %) email) invitation-data)))]
(add-member-to-team conn profile team role member))))
(doall))
(filter #(contains? emails (key %)))
(map val)
(run! (partial add-member-to-team conn profile team role)))
invitations))
(def ^:private schema:create-team-invitations
[:and
[:map {:title "create-team-invitations"}
[:team-id ::sm/uuid]
;; Support both formats:
;; 1. emails (set) + role (single role for all)
;; 2. invitations (vector of {:email :role} maps)
[:emails {:optional true} [::sm/set ::sm/email]]
[:role {:optional true} types.team/schema:role]
[:invitations {:optional true} [:vector [:map
[:email ::sm/email]
[:role types.team/schema:role]]]]]
;; Ensure exactly one format is provided
[:fn (fn [params]
(let [has-emails-role (and (contains? params :emails)
(contains? params :role))
has-invitations (contains? params :invitations)]
(and (or has-emails-role has-invitations)
(not (and has-emails-role has-invitations)))))]])
[:map {:title "create-team-invitations"}
[:team-id ::sm/uuid]
[:role ::types.team/role]
[:emails [::sm/set ::sm/email]]])
(def ^:private max-invitations-by-request-threshold
"The number of invitations can be sent in a single rpc request"
25)
(sv/defmethod ::create-team-invitations
"A rpc call that allows to send single or multiple invitations to join the team.
Supports two parameter formats:
1. emails (set) + role (single role for all emails)
2. invitations (vector of {:email :role} maps for individual roles)"
"A rpc call that allow to send a single or multiple invitations to
join the team."
{::doc/added "1.17"
::doc/module :teams
::sm/params schema:create-team-invitations}
[cfg {:keys [::rpc/profile-id team-id role emails] :as params}]
[cfg {:keys [::rpc/profile-id team-id emails] :as params}]
(let [perms (teams/get-permissions cfg profile-id team-id)
profile (db/get-by-id cfg :profile profile-id)
;; Determine which format is being used
using-emails-format? (and emails role)
;; Handle both parameter formats
emails (if using-emails-format?
(into #{} (map profile/clean-email) emails)
#{})
;; Calculate total invitation count for both formats
invitation-count (if using-emails-format?
(count emails)
(count (:invitations params)))]
emails (into #{} (map profile/clean-email) emails)]
(when-not (:is-admin perms)
(ex/raise :type :validation
:code :insufficient-permissions))
(when (> invitation-count max-invitations-by-request-threshold)
(when (> (count emails) max-invitations-by-request-threshold)
(ex/raise :type :validation
:code :max-invitations-by-request
:hint "the maximum of invitation on single request is reached"
@@ -334,7 +288,7 @@
(-> cfg
(assoc ::quotes/profile-id profile-id)
(assoc ::quotes/team-id team-id)
(assoc ::quotes/incr invitation-count)
(assoc ::quotes/incr (count emails))
(quotes/check! {::quotes/id ::quotes/invitations-per-team}
{::quotes/id ::quotes/profiles-per-team}))
@@ -350,12 +304,7 @@
(-> params
(assoc :profile profile)
(assoc :team team)
;; Pass parameters in the correct format for the unified function
(cond-> using-emails-format?
;; If using emails+role format, ensure both are present
(assoc :emails emails :role role)
;; If using invitations format, the :invitations key is already in params
(not using-emails-format?) identity)))]
(assoc :emails emails)))]
(with-meta {:total (count invitations)
:invitations invitations}
@@ -369,7 +318,7 @@
[:features {:optional true} ::cfeat/features]
[:id {:optional true} ::sm/uuid]
[:emails [::sm/set ::sm/email]]
[:role types.team/schema:role]])
[:role ::types.team/role]])
(sv/defmethod ::create-team-with-invitations
{::doc/added "1.17"
@@ -454,7 +403,7 @@
[:map {:title "update-team-invitation-role"}
[:team-id ::sm/uuid]
[:email ::sm/email]
[:role types.team/schema:role]])
[:role ::types.team/role]])
(sv/defmethod ::update-team-invitation-role
{::doc/added "1.17"
@@ -518,7 +467,7 @@
(defn- check-existing-team-access-request
"Checks if an existing team access request is still valid"
[{:keys [::db/conn]} team-id profile-id]
[conn team-id profile-id]
(when-let [request (db/get* conn :team-access-request
{:team-id team-id
:requester-id profile-id})]
@@ -536,8 +485,8 @@
(defn- upsert-team-access-request
"Create or update team access request for provided team and profile-id"
[{:keys [::db/conn] :as cfg} team-id requester-id]
(check-existing-team-access-request cfg team-id requester-id)
[conn team-id requester-id]
(check-existing-team-access-request conn team-id requester-id)
(let [valid-until (ct/in-future {:hours 24})
auto-join-until (ct/in-future {:days 7})
request-id (uuid/next)]
@@ -599,7 +548,7 @@
(teams/check-email-bounce conn (:email team-owner) false)
(teams/check-email-spam conn (:email team-owner) true)
(let [request (upsert-team-access-request cfg team-id profile-id)
(let [request (upsert-team-access-request conn team-id profile-id)
factory (cond
(and (some? file) (:is-default team) is-viewer)
eml/request-file-access-yourpenpot-view

View File

@@ -38,7 +38,7 @@
::doc/module :auth
::sm/params schema:verify-token}
[cfg {:keys [token] :as params}]
(let [claims (tokens/verify cfg {:token token})]
(let [claims (tokens/verify (::setup/props cfg) {:token token})]
(db/tx-run! cfg process-token params claims)))
(defmethod process-token :change-email
@@ -128,7 +128,7 @@
[:iss :keyword]
[:exp ::ct/inst]
[:profile-id ::sm/uuid]
[:role types.team/schema:role]
[:role ::types.team/role]
[:team-id ::sm/uuid]
[:member-email ::sm/email]
[:member-id {:optional true} ::sm/uuid]])

View File

@@ -166,6 +166,9 @@
:servers [{:url (str/ffmt "%/api/rpc" (cf/get :public-uri))
;; :description "penpot backend"
}]
:security
{:api_key []}
:paths paths
:components {:schemas @definitions}}))

View File

@@ -11,7 +11,7 @@
[app.common.data.macros :as dm]
[app.http :as-alias http]
[app.rpc :as-alias rpc]
[yetti.response :as yres]))
[yetti.response :as-alias yres]))
;; A utilty wrapper object for wrap service responses that does not
;; implements the IObj interface that make possible attach metadata to
@@ -78,8 +78,3 @@
(let [exp (if (integer? max-age) max-age (inst-ms max-age))
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
(update response ::yres/headers assoc "cache-control" val)))))
(defn stream
"A convenience allias for yetti.response/stream-body"
[f]
(yres/stream-body f))

View File

@@ -10,14 +10,15 @@
[app.common.exceptions :as ex]
[app.common.schema :as sm]))
(def schema:permissions
[:map {:title "Permissions"}
[:type {:gen/elements [:membership :share-link]} :keyword]
[:is-owner ::sm/boolean]
[:is-admin ::sm/boolean]
[:can-edit ::sm/boolean]
[:can-read ::sm/boolean]
[:is-logged ::sm/boolean]])
(sm/register!
^{::sm/type ::permissions}
[:map {:title "Permissions"}
[:type {:gen/elements [:membership :share-link]} :keyword]
[:is-owner ::sm/boolean]
[:is-admin ::sm/boolean]
[:can-edit ::sm/boolean]
[:can-read ::sm/boolean]
[:is-logged ::sm/boolean]])
(def valid-roles
#{:admin :owner :editor :viewer})

View File

@@ -102,7 +102,8 @@
::wrk/label "quotes-notification"
::wrk/params {:to (vec admins)
:subject subject
:body content}}))))
:body [{:type "text/plain"
:content content}]}}))))
(defn- generic-check!
[{:keys [::db/conn ::incr ::quote-sql ::count-sql ::default ::target] :or {incr 1} :as params}]

View File

@@ -66,6 +66,13 @@
[integrant.core :as ig]
[promesa.exec :as px]))
(def ^:private default-timeout
(ct/duration 400))
(def ^:private default-options
{:codec rds/string-codec
:timeout default-timeout})
(def ^:private bucket-rate-limit-script
{::rscript/name ::bucket-rate-limit
::rscript/path "app/rpc/rlimit/bucket.lua"})
@@ -170,11 +177,11 @@
:hint (str/ffmt "looks like '%' does not have a valid format" opts))))
(defmethod process-limit :bucket
[rconn user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
[redis user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
(let [script (-> bucket-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." user-id)])
(assoc ::rscript/vals (conj params (->seconds now))))
result (rds/eval rconn script)
result (rds/eval redis script)
allowed? (boolean (nth result 0))
remaining (nth result 1)
reset (* (/ (inst-ms interval) rate)
@@ -192,13 +199,13 @@
(assoc ::lresult/remaining remaining))))
(defmethod process-limit :window
[rconn user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
[redis user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
(let [ts (ct/truncate now unit)
ttl (ct/diff now (ct/plus ts {unit 1}))
script (-> window-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." user-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [nreq (->seconds ttl)]))
result (rds/eval rconn script)
result (rds/eval redis script)
allowed? (boolean (nth result 0))
remaining (nth result 1)]
(l/trace :hint "limit processed"
@@ -213,9 +220,9 @@
(assoc ::lresult/remaining remaining)
(assoc ::lresult/reset (ct/plus ts {unit 1})))))
(defn- process-limits
[rconn user-id limits now]
(let [results (into [] (map (partial process-limit rconn user-id now)) limits)
(defn- process-limits!
[redis user-id limits now]
(let [results (into [] (map (partial process-limit redis user-id now)) limits)
remaining (->> results
(d/index-by ::name ::lresult/remaining)
(uri/map->query-string))
@@ -252,25 +259,34 @@
(some-> request inet/parse-request)
uuid/zero)))
(defn- process-request'
[{:keys [::rds/conn] :as cfg} limits params]
(try
(let [uid (get-uid params)
result (process-limits conn uid limits (ct/now))]
(if (contains? cf/flags :soft-rpc-rlimit)
{::enabled false}
result))
(catch Throwable cause
(l/error :hint "error on processing rate-limit" :cause cause)
{::enabled false})))
(defn- process-request
[{:keys [::rpc/rlimit ::skey ::sname] :as cfg} params]
(defn process-request!
[{:keys [::rpc/rlimit ::rds/redis ::skey ::sname] :as cfg} params]
(when-let [limits (get-limits rlimit skey sname)]
(rds/run! cfg process-request' limits params)))
(let [redis (rds/get-or-connect redis ::rpc/rlimit default-options)
uid (get-uid params)
;; FIXME: why not clasic try/catch?
result (ex/try! (process-limits! redis uid limits (ct/now)))]
(l/trc :hint "process-limits"
:service sname
:remaining (::remaingin result)
:reset (::reset result))
(cond
(ex/exception? result)
(do
(l/error :hint "error on processing rate-limit" :cause result)
{::enabled false})
(contains? cf/flags :soft-rpc-rlimit)
{::enabled false}
:else
result))))
(defn wrap
[{:keys [::rpc/rlimit] :as cfg} f mdata]
[{:keys [::rpc/rlimit ::rds/redis] :as cfg} f mdata]
(assert (rds/redis? redis) "expected a valid redis instance")
(assert (or (nil? rlimit) (valid-rlimit-instance? rlimit)) "expected a valid rlimit instance")
(if rlimit
@@ -282,7 +298,7 @@
(fn [hcfg params]
(if @enabled
(let [result (process-request cfg params)]
(let [result (process-request! cfg params)]
(if (::enabled result)
(if (::allowed result)
(-> (f hcfg params)
@@ -383,7 +399,7 @@
(when-let [path (cf/get :rpc-rlimit-config)]
(and (fs/exists? path) (fs/regular-file? path) path)))
(defmethod ig/assert-key ::rpc/rlimit
(defmethod ig/assert-key :app.rpc/rlimit
[_ {:keys [::wrk/executor]}]
(assert (sm/valid? ::wrk/executor executor) "expect valid executor"))

View File

@@ -1,56 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.setup.clock
"A service/module that manages the system clock and allows runtime
modification of time offset (useful for testing and time adjustments)."
(:require
[app.common.logging :as l]
[app.common.time :as ct]
[app.setup :as-alias setup]
[integrant.core :as ig])
(:import
java.time.Clock
java.time.Duration
java.time.Instant
java.time.ZoneId))
(defonce current
(atom {:clock (Clock/systemDefaultZone)
:offset nil}))
(defmethod ig/init-key ::setup/clock
[_ _]
(add-watch current ::common
(fn [_ _ _ {:keys [clock offset]}]
(let [clock (if (ct/duration? offset)
(Clock/offset ^Clock clock
^Duration offset)
clock)]
(l/wrn :hint "altering clock" :clock (str clock))
(alter-var-root #'ct/*clock* (constantly clock))))))
(defmethod ig/halt-key! ::setup/clock
[_ _]
(remove-watch current ::common))
(defn fixed
"Get fixed clock, mainly used in tests"
[instant]
(Clock/fixed ^Instant (ct/inst instant)
^ZoneId (ZoneId/of "Z")))
(defn set-offset!
[duration]
(swap! current assoc :offset (some-> duration ct/duration)))
(defn set-clock!
([]
(swap! current assoc :clock (Clock/systemDefaultZone)))
([clock]
(when (instance? Clock clock)
(swap! current assoc :clock clock))))

View File

@@ -7,7 +7,7 @@
(ns app.srepl.cli
"PREPL API for external usage (CLI or ADMIN)"
(:require
[app.auth :refer [derive-password]]
[app.auth :as auth]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.schema.generators :as sg]
@@ -54,7 +54,7 @@
(some-> (get-current-system)
(db/tx-run!
(fn [{:keys [::db/conn] :as system}]
(let [password (derive-password password)
(let [password (cmd.profile/derive-password system password)
params {:id (uuid/next)
:email email
:fullname fullname
@@ -74,7 +74,7 @@
(assoc :fullname fullname)
(some? password)
(assoc :password (derive-password password))
(assoc :password (auth/derive-password password))
(some? is-active)
(assoc :is-active is-active))]
@@ -124,12 +124,13 @@
(defmethod exec-command "derive-password"
[{:keys [password]}]
(derive-password password))
(auth/derive-password password))
(defmethod exec-command "authenticate"
[{:keys [token]}]
(when-let [system (get-current-system)]
(tokens/verify system {:token token :iss "authentication"})))
(let [props (get system ::setup/props)]
(tokens/verify props {:token token :iss "authentication"}))))
(def ^:private schema:get-customer
[:map [:id ::sm/uuid]])

View File

@@ -0,0 +1,278 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.srepl.fixes
"A misc of fix functions"
(:refer-clojure :exclude [parse-uuid])
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.files.changes :as cpc]
[app.common.files.helpers :as cfh]
[app.common.files.repair :as cfr]
[app.common.files.validate :as cfv]
[app.common.logging :as l]
[app.common.types.component :as ctk]
[app.common.types.container :as ctn]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.db :as db]
[app.features.fdata :as feat.fdata]
[app.srepl.helpers :as h]))
(defn disable-fdata-features
[{:keys [id features] :as file} _]
(when (or (contains? features "fdata/pointer-map")
(contains? features "fdata/objects-map"))
(l/warn :hint "disable fdata features" :file-id (str id))
(-> file
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(update :features disj "fdata/pointer-map" "fdata/objects-map"))))
(def sql:get-fdata-files
"SELECT id FROM file
WHERE deleted_at is NULL
AND (features @> '{fdata/pointer-map}' OR
features @> '{fdata/objects-map}')
ORDER BY created_at DESC")
(defn find-fdata-pointers
[{:keys [id features data] :as file} _]
(when (contains? features "fdata/pointer-map")
(let [pointers (feat.fdata/get-used-pointer-ids data)]
(l/warn :hint "found pointers" :file-id (str id) :pointers pointers)
nil)))
(defn repair-file-media
"A helper intended to be used with `srepl.main/process-files!` that
fixes all not propertly referenced file-media-object for a file"
[{:keys [id data] :as file} & _]
(let [conn (db/get-connection h/*system*)
used (cfh/collect-used-media data)
ids (db/create-array conn "uuid" used)
sql "SELECT * FROM file_media_object WHERE id = ANY(?)"
rows (db/exec! conn [sql ids])
index (reduce (fn [index media]
(if (not= (:file-id media) id)
(let [media-id (uuid/next)]
(l/wrn :hint "found not referenced media"
:file-id (str id)
:media-id (str (:id media)))
(db/insert! conn :file-media-object
(-> media
(assoc :file-id id)
(assoc :id media-id)))
(assoc index (:id media) media-id))
index))
{}
rows)]
(when (seq index)
(binding [bfc/*state* (atom {:index index})]
(update file :data (fn [fdata]
(-> fdata
(update :pages-index #'bfc/relink-shapes)
(update :components #'bfc/relink-shapes)
(update :media #'bfc/relink-media)
(d/without-nils))))))))
(defn repair-file
"Internal helper for validate and repair the file. The operation is
applied multiple times untile file is fixed or max iteration counter
is reached (default 10)"
[file libs & {:keys [max-iterations] :or {max-iterations 10}}]
(let [validate-and-repair
(fn [file libs iteration]
(when-let [errors (not-empty (cfv/validate-file file libs))]
(l/trc :hint "repairing file"
:file-id (str (:id file))
:iteration iteration
:errors (count errors))
(let [changes (cfr/repair-file file libs errors)]
(-> file
(update :revn inc)
(update :data cpc/process-changes changes)))))
process-file
(fn [file libs]
(loop [file file
iteration 0]
(if (< iteration max-iterations)
(if-let [file (validate-and-repair file libs iteration)]
(recur file (inc iteration))
file)
(do
(l/wrn :hint "max retry num reached on repairing file"
:file-id (str (:id file))
:iteration iteration)
file))))
file'
(process-file file libs)]
(when (not= (:revn file) (:revn file'))
(l/trc :hint "file repaired" :file-id (str (:id file))))
file'))
(defn fix-touched-shapes-group
[file _]
;; Remove :shapes-group from the touched elements
(letfn [(fix-fdata [data]
(-> data
(update :pages-index update-vals fix-container)))
(fix-container [container]
(d/update-when container :objects update-vals fix-shape))
(fix-shape [shape]
(d/update-when shape :touched
(fn [touched]
(disj touched :shapes-group))))]
file (-> file
(update :data fix-fdata))))
(defn add-swap-slots
[file libs _opts]
;; Detect swapped copies and try to generate a valid swap-slot.
(letfn [(process-fdata [data]
;; Walk through all containers in the file, both pages and deleted components.
(reduce process-container data (ctf/object-containers-seq data)))
(process-container [data container]
;; Walk through all shapes in depth-first tree order.
(l/dbg :hint "Processing container" :type (:type container) :name (:name container))
(let [root-shape (ctn/get-container-root container)]
(ctf/update-container data
container
#(reduce process-shape % (ctn/get-direct-children container root-shape)))))
(process-shape [container shape]
;; Look for head copies in the first level (either component roots or inside main components).
;; Even if they have been swapped, we don't add slot to them because there is no way to know
;; the original shape. Only children.
(if (and (ctk/instance-head? shape)
(ctk/in-component-copy? shape)
(nil? (ctk/get-swap-slot shape)))
(process-copy-head container shape)
(reduce process-shape container (ctn/get-direct-children container shape))))
(process-copy-head [container head-shape]
;; Process recursively all children, comparing each one with the corresponding child in the main
;; component, looking by position. If the shape-ref does not point to the found child, then it has
;; been swapped and need to set up a slot.
(l/trc :hint "Processing copy-head" :id (:id head-shape) :name (:name head-shape))
(let [component-shape (ctf/find-ref-shape file container libs head-shape :include-deleted? true :with-context? true)
component-container (:container (meta component-shape))]
(loop [container container
children (map #(ctn/get-shape container %) (:shapes head-shape))
component-children (map #(ctn/get-shape component-container %) (:shapes component-shape))]
(let [child (first children)
component-child (first component-children)]
(if (or (nil? child) (nil? component-child))
container
(let [container (if (and (not (ctk/is-main-of? component-child child))
(nil? (ctk/get-swap-slot child))
(ctk/instance-head? child))
(let [slot (guess-swap-slot component-child component-container)]
(l/dbg :hint "child" :id (:id child) :name (:name child) :slot slot)
(ctn/update-shape container (:id child) #(ctk/set-swap-slot % slot)))
container)]
(recur (process-copy-head container child)
(rest children)
(rest component-children))))))))
(guess-swap-slot [shape container]
;; To guess the slot, we must follow the chain until we find the definitive main. But
;; we cannot navigate by shape-ref, because main shapes may also have been swapped. So
;; chain by position, too.
(if-let [slot (ctk/get-swap-slot shape)]
slot
(if-not (ctk/in-component-copy? shape)
(:id shape)
(let [head-copy (ctn/get-component-shape (:objects container) shape)]
(if (= (:id head-copy) (:id shape))
(:id shape)
(let [head-main (ctf/find-ref-shape file
container
libs
head-copy
:include-deleted? true
:with-context? true)
container-main (:container (meta head-main))
shape-main (find-match-by-position shape
head-copy
container
head-main
container-main)]
(guess-swap-slot shape-main container-main)))))))
(find-match-by-position [shape-copy head-copy container-copy head-main container-main]
;; Find the shape in the main that has the same position under its parent than
;; the copy under its one. To get the parent we must process recursively until
;; the component head, because mains may also have been swapped.
(let [parent-copy (ctn/get-shape container-copy (:parent-id shape-copy))
parent-main (if (= (:id parent-copy) (:id head-copy))
head-main
(find-match-by-position parent-copy
head-copy
container-copy
head-main
container-main))
index (cfh/get-position-on-parent (:objects container-copy)
(:id shape-copy))
shape-main-id (dm/get-in parent-main [:shapes index])]
(ctn/get-shape container-main shape-main-id)))]
file (-> file
(update :data process-fdata))))
(defn fix-find-duplicated-slots
[file _]
;; Find the shapes whose children have duplicated slots
(let [check-duplicate-swap-slot
(fn [shape page]
(let [shapes (map #(get (:objects page) %) (:shapes shape))
slots (->> (map #(ctk/get-swap-slot %) shapes)
(remove nil?))
counts (frequencies slots)]
#_(when (some (fn [[_ count]] (> count 1)) counts)
(l/trc :info "This shape has children with the same swap slot" :id (:id shape) :file-id (str (:id file))))
(some (fn [[_ count]] (> count 1)) counts)))
count-slots-shape
(fn [page shape]
(if (ctk/instance-root? shape)
(check-duplicate-swap-slot shape page)
false))
count-slots-page
(fn [page]
(->> (:objects page)
(vals)
(mapv #(count-slots-shape page %))
(filter true?)
count))
count-slots-data
(fn [data]
(->> (:pages-index data)
(vals)
(mapv count-slots-page)
(reduce +)))
num-missing-slots (count-slots-data (:data file))]
(when (pos? num-missing-slots)
(l/trc :info (str "Shapes with children with the same swap slot: " num-missing-slots) :file-id (str (:id file))))
file))

View File

@@ -0,0 +1,88 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.srepl.fixes.lost-colors
"A collection of adhoc fixes scripts."
(:require
[app.binfile.common :as bfc]
[app.common.logging :as l]
[app.common.types.color :as types.color]
[app.db :as db]
[app.srepl.helpers :as h]))
(def sql:get-affected-files
"SELECT fm.file_id AS id FROM file_migration AS fm WHERE fm.name = '0008-fix-library-colors-v2'")
(def sql:get-matching-snapshot
"SELECT * FROM file_change
WHERE file_id = ?
AND created_at <= ?
AND label IS NOT NULL
AND data IS NOT NULL
ORDER BY created_at DESC
LIMIT 2")
(defn get-affected-migration
[conn file-id]
(db/get* conn :file-migration
{:name "0008-fix-library-colors-v2"
:file-id file-id}))
(defn get-last-valid-snapshot
[conn migration]
(let [[snapshot] (db/exec! conn [sql:get-matching-snapshot
(:file-id migration)
(:created-at migration)])]
(when snapshot
(let [snapshot (assoc snapshot :id (:file-id snapshot))]
(bfc/decode-file h/*system* snapshot)))))
(defn restore-color
[{:keys [data] :as snapshot} color]
(when-let [scolor (get-in data [:colors (:id color)])]
(-> (select-keys scolor types.color/library-color-attrs)
(types.color/check-library-color))))
(defn restore-missing-colors
[{:keys [id] :as file} & _opts]
(l/inf :hint "process file" :file-id (str id) :name (:name file) :has-colors (-> file :data :colors not-empty boolean))
(if-let [colors (-> file :data :colors not-empty)]
(let [migration (get-affected-migration h/*system* id)]
(if-let [snapshot (get-last-valid-snapshot h/*system* migration)]
(do
(l/inf :hint "using snapshot" :snapshot (:label snapshot))
(let [colors (reduce-kv (fn [colors color-id color]
(if-let [result (restore-color snapshot color)]
(do
(l/inf :hint "restored color" :file-id (str id) :color-id (str color-id))
(assoc colors color-id result))
(do
(l/wrn :hint "ignoring color" :file-id (str id) :color (pr-str color))
colors)))
colors
colors)
file (-> file
(update :data assoc :colors colors)
(update :migrations disj "0008-fix-library-colors-v2"))]
(db/delete! h/*system* :file-migration
{:name "0008-fix-library-colors-v2"
:file-id (:id file)})
file))
(do
(db/delete! h/*system* :file-migration
{:name "0008-fix-library-colors-v2"
:file-id (:id file)})
nil)))
(do
(db/delete! h/*system* :file-migration
{:name "0008-fix-library-colors-v2"
:file-id (:id file)})
nil)))

View File

@@ -4,11 +4,10 @@
;;
;; Copyright (c) KALEIDOS INC
(ns app.srepl.procs.media-refs
(ns app.srepl.fixes.media-refs
(:require
[app.binfile.common :as bfc]
[app.common.files.helpers :as cfh]
[app.common.logging :as l]
[app.srepl.helpers :as h]))
(defn- collect-media-refs
@@ -38,22 +37,7 @@
(let [media-refs (collect-media-refs (:data file))]
(bfc/update-media-references! cfg file media-refs)))
(def ^:private sql:get-files
"SELECT f.id
FROM file AS f
LEFT JOIN file_migration AS fm ON (fm.file_id = f.id AND fm.name = 'internal/procs/media-refs')
WHERE fm.name IS NULL
ORDER BY f.project_id")
(defn fix-media-refs
{:query sql:get-files}
[cfg {:keys [id]} & {:as options}]
(l/inf :hint "processing file" :id (str id))
(h/process-file! cfg id
(fn [file _opts]
(update-all-media-references cfg file))
(assoc options
::bfc/reset-migrations? true
::h/validate? false))
(h/mark-migrated! cfg id "internal/procs/media-refs"))
(defn process-file
[file _opts]
(let [system (h/get-current-system)]
(update-all-media-references system file)))

View File

@@ -142,19 +142,8 @@
0
snap)))
(defn mark-migrated!
"A helper that inserts an entry in the file migration table for make
file migrated for the specified migration label."
[system file-id label]
(db/insert! system :file-migration
{:file-id file-id
:name label}
{::db/return-keys false}))
(defn process-file!
[system file-id update-fn
& {:keys [::snapshot-label ::validate? ::with-libraries?]
:or {validate? true} :as opts}]
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
(let [file (bfc/get-file system file-id
:lock-for-update? true
:realize? true)
@@ -174,12 +163,12 @@
(when validate?
(cfv/validate-file-schema! file'))
(when (string? snapshot-label)
(when (string? label)
(fsnap/create! system file
{:label snapshot-label
{:label label
:deleted-at (ct/in-future {:days 30})
:created-by "admin"}))
(let [file' (update file' :revn inc)]
(bfc/update-file! system file' opts)
(bfc/update-file! system file')
true))))

View File

@@ -5,6 +5,7 @@
;; Copyright (c) KALEIDOS INC
(ns app.srepl.main
"A collection of adhoc fixes scripts."
#_:clj-kondo/ignore
(:require
[app.auth :refer [derive-password]]
@@ -15,7 +16,7 @@
[app.common.features :as cfeat]
[app.common.files.validate :as cfv]
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.common.pprint :as p]
[app.common.schema :as sm]
[app.common.spec :as us]
[app.common.time :as ct]
@@ -34,12 +35,11 @@
[app.rpc.commands.profile :as profile]
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
[app.srepl.fixes :as fixes]
[app.srepl.helpers :as h]
[app.srepl.procs.file-repair :as procs.file-repair]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.worker :as wrk]
[clojure.datafy :refer [datafy]]
[clojure.java.io :as io]
[clojure.pprint :refer [print-table]]
[clojure.stacktrace :as strace]
@@ -47,7 +47,6 @@
[cuerdas.core :as str]
[datoteka.fs :as fs]
[promesa.exec :as px]
[promesa.exec.csp :as sp]
[promesa.exec.semaphore :as ps]
[promesa.util :as pu]))
@@ -58,7 +57,7 @@
(defn print-tasks
[]
(let [tasks (:app.worker/registry main/system)]
(pp/pprint (keys tasks) :level 200)))
(p/pprint (keys tasks) :level 200)))
(defn run-task!
([tname]
@@ -130,23 +129,42 @@
(defn reset-password!
"Reset a password to a specific one for a concrete user or all users
if email is `:all` keyword."
[& {:keys [email password]}]
(assert (string? email) "expected email")
(assert (string? password) "expected password")
[& {:keys [email password] :or {password "123123"} :as params}]
(when-not email
(throw (IllegalArgumentException. "email is mandatory")))
(some-> main/system
(db/tx-run!
(fn [{:keys [::db/conn] :as system}]
(let [password (derive-password password)
email (str/lower email)]
(-> (db/exec-one! conn ["update profile set password=? where email=?" password email])
(db/get-update-count)
(pos?)))))))
(let [password (derive-password password)]
(if (= email :all)
(db/exec! conn ["update profile set password=?" password])
(let [email (str/lower email)]
(db/exec! conn ["update profile set password=? where email=?" password email]))))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FEATURES
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare process-file!)
(defn enable-objects-map-feature-on-file!
[file-id & {:as opts}]
(process-file! file-id fdata/enable-objects-map opts))
(defn enable-pointer-map-feature-on-file!
[file-id & {:as opts}]
(process-file! file-id fdata/enable-pointer-map opts))
(defn enable-path-data-feature-on-file!
[file-id & {:as opts}]
(process-file! file-id fdata/enable-path-data opts))
(defn enable-storage-features-on-file!
[file-id & {:as opts}]
(enable-objects-map-feature-on-file! file-id opts)
(enable-pointer-map-feature-on-file! file-id opts))
(defn enable-team-feature!
[team-id feature & {:keys [skip-check] :or {skip-check false}}]
(when (and (not skip-check) (not (contains? cfeat/supported-features feature)))
@@ -397,19 +415,24 @@
(println (sm/humanize-explain explain))
(ex/print-throwable cause))))))))
(defn repair-file!
"Repair the list of errors detected by validation."
[file-id & {:keys [rollback?] :or {rollback? true} :as opts}]
(let [system (assoc main/system ::db/rollback rollback?)
file-id (h/parse-uuid file-id)
opts (assoc opts :with-libraries? true)]
(db/tx-run! system h/process-file! file-id fixes/repair-file opts)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PROCESSING
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn repair-file!
"Repair the list of errors detected by validation."
[file-id & {:keys [rollback?] :or {rollback? true} :as options}]
(let [system (assoc main/system ::db/rollback rollback?)
file-id (h/parse-uuid file-id)
options (assoc options ::h/with-libraries? true)]
(db/tx-run! system h/process-file! file-id procs.file-repair/repair-file options)))
(def sql:get-files
"SELECT id FROM file
WHERE deleted_at is NULL
ORDER BY created_at DESC")
(defn update-file!
(defn process-file!
"Apply a function to the file. Optionally save the changes or not.
The function receives the decoded and migrated file data."
[file-id update-fn & {:keys [rollback?] :or {rollback? true} :as opts}]
@@ -420,127 +443,175 @@
db/*conn* (db/get-connection system)]
(h/process-file! system file-id update-fn opts))))))
(defn process!
[& {:keys [max-items
max-jobs
rollback?
query
proc-fn
buffer]
:or {max-items Long/MAX_VALUE
rollback? true
max-jobs 1
buffer 128}
:as opts}]
(defn process-team-files!
"Apply a function to each file of the specified team."
[team-id update-fn & {:keys [rollback? label] :or {rollback? true} :as opts}]
(let [team-id (h/parse-uuid team-id)
opts (dissoc opts :label)]
(db/tx-run! (assoc main/system ::db/rollback rollback?)
(fn [{:keys [::db/conn] :as system}]
(when (string? label)
(h/take-team-snapshot! system team-id label))
(l/inf :hint "process start"
(binding [h/*system* system
db/*conn* (db/get-connection system)]
(->> (h/get-and-lock-team-files conn team-id)
(reduce (fn [result file-id]
(if (h/process-file! system file-id update-fn opts)
(inc result)
result))
0)))))))
(defn process-files!
"Apply a function to all files in the database"
[update-fn & {:keys [max-items
max-jobs
rollback?
query]
:or {max-jobs 1
max-items Long/MAX_VALUE
rollback? true
query sql:get-files}
:as opts}]
(l/dbg :hint "process:start"
:rollback rollback?
:max-jobs max-jobs
:max-items max-items)
(let [tpoint (ct/tpoint)
max-jobs (or max-jobs (px/get-available-processors))
query (or query
(:query (meta proc-fn))
(throw (ex-info "missing query" {})))
query (if (vector? query) query [query])
factory (px/thread-factory :virtual false :prefix "penpot/file-process/")
executor (px/cached-executor :factory factory)
sjobs (ps/create :permits max-jobs)
proc-fn (if (var? proc-fn)
(deref proc-fn)
proc-fn)
process-file
(fn [file-id idx tpoint]
(let [thread-id (px/get-thread-id)]
(try
(l/trc :hint "process:file:start"
:tid thread-id
:file-id (str file-id)
:index idx)
(let [system (assoc main/system ::db/rollback rollback?)]
(db/tx-run! system (fn [system]
(binding [h/*system* system
db/*conn* (db/get-connection system)]
(h/process-file! system file-id update-fn opts)))))
in-ch (sp/chan :buf buffer)
(catch Throwable cause
(l/wrn :hint "unexpected error on processing file (skiping)"
:tid thread-id
:file-id (str file-id)
:index idx
:cause cause))
(finally
(when-let [pause (:pause opts)]
(Thread/sleep (int pause)))
worker-fn
(fn [worker-id]
(l/dbg :hint "worker started"
:id worker-id)
(ps/release! sjobs)
(let [elapsed (ct/format-duration (tpoint))]
(l/trc :hint "process:file:end"
:tid thread-id
:file-id (str file-id)
:index idx
:elapsed elapsed))))))
(loop []
(when-let [[index item] (sp/<! in-ch)]
(l/dbg :hint "process item" :worker-id worker-id :index index :item item)
(try
(-> main/system
(assoc ::db/rollback rollback?)
(db/tx-run! (fn [system]
(binding [h/*system* system
db/*conn* (db/get-connection system)]
(proc-fn system item opts)))))
process-file*
(fn [idx file-id]
(ps/acquire! sjobs)
(px/run! executor (partial process-file file-id idx (ct/tpoint)))
(inc idx))
(catch Throwable cause
(l/wrn :hint "unexpected error on processing item (skiping)"
:worker-id worker-id
:item item
:cause cause))
(finally
(when-let [pause (:pause opts)]
(Thread/sleep (int pause)))))
(recur)))
(l/dbg :hint "worker stoped"
:id worker-id))
enqueue-item
(fn [index row]
(sp/>! in-ch [index (into {} row)])
(inc index))
process-items
process-files
(fn [{:keys [::db/conn] :as system}]
(db/exec! conn ["SET statement_timeout = 0"])
(db/exec! conn ["SET idle_in_transaction_session_timeout = 0"])
(->> (db/plan conn query {:fetch-size (* max-jobs 3)})
(transduce (take max-items)
(completing enqueue-item)
0))
(sp/close! in-ch))
threads
(->> (range max-jobs)
(map (fn [idx]
(px/fn->thread (partial worker-fn idx)
:name (str "pentpot/process/" idx))))
(doall))]
(try
(->> (db/plan conn [query])
(transduce (comp
(take max-items)
(map :id))
(completing process-file*)
0))
(finally
;; Close and await tasks
(pu/close! executor))))]
(try
(db/tx-run! main/system process-items)
;; Await threads termination
(doseq [thread threads]
(px/await! thread))
(db/tx-run! main/system process-files)
(catch Throwable cause
(l/dbg :hint "process:error" :cause cause))
(finally
(let [elapsed (ct/format-duration (tpoint))]
(l/inf :hint "process end"
(l/dbg :hint "process:end"
:rollback rollback?
:elapsed elapsed))))))
(defn process!
"Apply a function to all files in the database"
[& {:keys [max-jobs
rollback?
max-items
chunk-size
proc-fn]
:or {max-items Long/MAX_VALUE
chunk-size 100
rollback? true}
:as opts}]
(let [tpoint (ct/tpoint)
max-jobs (or max-jobs (px/get-available-processors))
processed (atom 0)
opts (-> opts
(assoc :chunk-size chunk-size)
(dissoc :rollback?)
(dissoc :proc-fn)
(dissoc :max-jobs)
(dissoc :max-items))
start-job
(fn [jid]
(l/dbg :hint "start job thread" :jid jid)
(px/sleep 1000)
(loop []
(let [result (-> main/system
(assoc ::db/rollback rollback?)
(proc-fn opts))]
(let [total (swap! processed + result)]
(l/dbg :hint "chunk processed" :jid jid :total total :chunk result ::l/sync? true)
(when (and (pos? result)
(< total max-items))
(recur))))))]
(l/dbg :hint "process:start"
:rollback rollback?
:max-jobs max-jobs
:max-items max-items)
(try
(let [jobs (->> (range max-jobs)
(map (fn [jid] (px/fn->thread (partial start-job jid))))
(doall))]
(doseq [job jobs]
(.join ^java.lang.Thread job)))
(catch Throwable cause
(l/dbg :hint "process:error" :cause cause))
(finally
(let [elapsed (ct/format-duration (tpoint))]
(l/dbg :hint "process:end"
:processed @processed
:rollback rollback?
:elapsed elapsed))))))
(defn process-file!
"A specialized, file specific process! alternative"
[& {:keys [id] :as opts}]
(let [id (h/parse-uuid id)]
(-> opts
(assoc :query ["select id from file where id = ?" id])
(assoc :max-items 1)
(assoc :max-jobs 1)
(process!))))
(defn mark-file-as-trimmed
[id]
(let [id (h/parse-uuid id)]
(db/tx-run! main/system (fn [cfg]
(-> (db/update! cfg :file
{:has-media-trimmed true}
{:id id}
{::db/return-keys false})
(db/get-update-count)
(pos?))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DELETE/RESTORE OBJECTS (WITH CASCADE, SOFT)
@@ -567,12 +638,39 @@
:id file-id})))
:deleted))
(defn- restore-file*
[{:keys [::db/conn]} file-id]
(db/update! conn :file
{:deleted-at nil
:has-media-trimmed false}
{:id file-id})
;; Fragments are not handled here because they
;; use the database cascade operation and they
;; are not marked for deletion with objects-gc
;; task
(db/update! conn :file-media-object
{:deleted-at nil}
{:file-id file-id})
;; Mark thumbnails to be deleted
(db/update! conn :file-thumbnail
{:deleted-at nil}
{:file-id file-id})
(db/update! conn :file-tagged-object-thumbnail
{:deleted-at nil}
{:file-id file-id})
:restored)
(defn restore-file!
"Mark a file and all related objects as not deleted"
[file-id]
(let [file-id (h/parse-uuid file-id)]
(db/tx-run! main/system
(fn [{:keys [::db/conn] :as system}]
(fn [system]
(when-let [file (db/get* system :file
{:id file-id}
{::db/remove-deleted false
@@ -586,9 +684,7 @@
:cause "explicit call to restore-file!"}
::audit/tracked-at (ct/now)})
(#'files/restore-file conn file-id))
:restored))))
(restore-file* system file-id))))))
(defn delete-project!
"Mark a project for deletion"
@@ -621,7 +717,7 @@
(doseq [{:keys [id]} (db/query conn :file
{:project-id project-id}
{::sql/columns [:id]})]
(#'files/restore-file conn id))
(restore-file* cfg id))
:restored)
@@ -799,6 +895,19 @@
(with-open [reader (io/reader path)]
(process-data! system deleted-at (line-seq reader))))))))
(defn process-chunks
"A generic function that executes the specified proc iterativelly
until 0 results is returned"
[cfg proc-fn & params]
(loop [total 0]
(let [result (apply proc-fn cfg params)]
(if (pos? result)
(do
(l/trc :hint "chunk processed" :size result :total total)
(recur (+ total result)))
total))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CASCADE FIXING
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@@ -1,141 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.srepl.procs.fdata-storage
(:require
[app.common.logging :as l]
[app.db :as db]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SNAPSHOTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def sql:get-unmigrated-snapshots
"SELECT fc.id, fc.file_id
FROM file_change AS fc
WHERE fc.data IS NOT NULL
AND fc.label IS NOT NULL
ORDER BY fc.id ASC")
(def sql:get-migrated-snapshots
"SELECT f.id, f.file_id
FROM file_data AS f
WHERE f.data IS NOT NULL
AND f.type = 'snapshot'
AND f.id != f.file_id
ORDER BY f.id ASC")
(defn migrate-snapshot-to-storage
"Migrate the current existing files to store data in new storage
tables."
{:query sql:get-unmigrated-snapshots}
[{:keys [::db/conn]} {:keys [id file-id]} & {:as options}]
(let [{:keys [id file-id data created-at updated-at]}
(db/get* conn :file-change {:id id :file-id file-id}
::db/for-update true
::db/remove-deleted false)]
(when data
(l/inf :hint "migrating snapshot" :file-id (str file-id) :id (str id))
(db/update! conn :file-change
{:data nil}
{:id id :file-id file-id}
{::db/return-keys false})
(db/insert! conn :file-data
{:backend "db"
:metadata nil
:type "snapshot"
:data data
:created-at created-at
:modified-at updated-at
:file-id file-id
:id id}
{::db/return-keys false}))))
(defn rollback-snapshot-from-storage
"Migrate back to the file table storage."
{:query sql:get-unmigrated-snapshots}
[{:keys [::db/conn]} {:keys [id file-id]} & {:as opts}]
(when-let [{:keys [id file-id data]}
(db/get* conn :file-data {:id id :file-id file-id :type "snapshot"}
::db/for-update true
::db/remove-deleted false)]
(l/inf :hint "rollback snapshot" :file-id (str file-id) :id (str id))
(db/update! conn :file-change
{:data data}
{:id id :file-id file-id}
{::db/return-keys false})
(db/delete! conn :file-data
{:id id :file-id file-id :type "snapshot"}
{::db/return-keys false})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FILES
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def sql:get-unmigrated-files
"SELECT f.id
FROM file AS f
WHERE f.data IS NOT NULL
ORDER BY f.modified_at ASC")
(def sql:get-migrated-files
"SELECT f.id, f.file_id
FROM file_data AS f
WHERE f.data IS NOT NULL
AND f.id = f.file_id
ORDER BY f.id ASC")
(defn migrate-file-to-storage
"Migrate the current existing files to store data in new storage
tables."
{:query sql:get-unmigrated-files}
[{:keys [::db/conn] :as cfg} {:keys [id]} & {:as opts}]
(let [{:keys [id data created-at modified-at]}
(db/get* conn :file {:id id}
::db/for-update true
::db/remove-deleted false)]
(when data
(l/inf :hint "migrating file" :file-id (str id))
(db/update! conn :file {:data nil} {:id id} ::db/return-keys false)
(db/insert! conn :file-data
{:backend "db"
:metadata nil
:type "main"
:data data
:created-at created-at
:modified-at modified-at
:file-id id
:id id}
{::db/return-keys false}))
(let [snapshots-sql
(str "WITH snapshots AS (" sql:get-unmigrated-snapshots ") "
"SELECT s.* FROM snapshots AS s WHERE s.file_id = ?")]
(run! (fn [params]
(migrate-snapshot-to-storage cfg params opts))
(db/plan cfg [snapshots-sql id])))))
(defn rollback-file-from-storage
"Migrate back to the file table storage."
{:query sql:get-migrated-files}
[{:keys [::db/conn] :as cfg} {:keys [id]} & {:as opts}]
(when-let [{:keys [id data]}
(db/get* conn :file-data {:id id :file-id id :type "main"}
::db/for-update true
::db/remove-deleted false)]
(l/inf :hint "rollback file" :file-id (str id))
(db/update! conn :file {:data data} {:id id} ::db/return-keys false)
(db/delete! conn :file-data {:file-id id :id id :type "main"} ::db/return-keys false)
(let [snapshots-sql
(str "WITH snapshots AS (" sql:get-migrated-snapshots ") "
"SELECT s.* FROM snapshots AS s WHERE s.file_id = ?")]
(run! (fn [params]
(rollback-snapshot-from-storage cfg params opts))
(db/plan cfg [snapshots-sql id])))))

View File

@@ -1,60 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.srepl.procs.file-repair
(:require
[app.common.files.changes :as cfc]
[app.common.files.repair :as cfr]
[app.common.files.validate :as cfv]
[app.common.logging :as l]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; GENERAL PURPOSE REPAIR
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn repair-file
"Internal helper for validate and repair the file. The operation is
applied multiple times untile file is fixed or max iteration counter
is reached (default 10).
This function should not be used directly, it is used throught the
app.srepl.main/repair-file! helper. In practical terms this function
is private and implementation detail."
[file libs & {:keys [max-iterations] :or {max-iterations 10}}]
(let [validate-and-repair
(fn [file libs iteration]
(when-let [errors (not-empty (cfv/validate-file file libs))]
(l/trc :hint "repairing file"
:file-id (str (:id file))
:iteration iteration
:errors (count errors))
(let [changes (cfr/repair-file file libs errors)]
(-> file
(update :revn inc)
(update :data cfc/process-changes changes)))))
process-file
(fn [file libs]
(loop [file file
iteration 0]
(if (< iteration max-iterations)
(if-let [file (validate-and-repair file libs iteration)]
(recur file (inc iteration))
file)
(do
(l/wrn :hint "max retry num reached on repairing file"
:file-id (str (:id file))
:iteration iteration)
file))))
file'
(process-file file libs)]
(when (not= (:revn file) (:revn file'))
(l/trc :hint "file repaired" :file-id (str (:id file))))
file'))

View File

@@ -1,57 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.srepl.procs.path-data
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.files.helpers :as cfh]
[app.common.logging :as l]
[app.srepl.helpers :as h]))
(def ^:private sql:get-files-with-path-data
"SELECT id FROM file WHERE features @> '{fdata/path-data}'")
(defn disable
"A script responsible for remove the path data type from file data and
allow file to be open in older penpot versions.
Should be used only in cases when you want to downgrade to an older
penpot version for some reason."
{:query sql:get-files-with-path-data}
[cfg {:keys [id]} & {:as options}]
(l/inf :hint "disabling path-data" :file-id (str id))
(let [update-object
(fn [object]
(if (or (cfh/path-shape? object)
(cfh/bool-shape? object))
(update object :content vec)
object))
update-container
(fn [container]
(d/update-when container :objects d/update-vals update-object))
update-file
(fn [file & _opts]
(-> file
(update :data (fn [data]
(-> data
(update :pages-index d/update-vals update-container)
(d/update-when :components d/update-vals update-container))))
(update :features disj "fdata/path-data")
(update :migrations disj
"0003-convert-path-content-v2"
"0003-convert-path-content")))
options
(-> options
(assoc ::bfc/reset-migrations? true)
(assoc ::h/validate? false))]
(h/process-file! cfg id update-file options)))

View File

@@ -27,9 +27,7 @@
(defn get-legacy-backend
[]
(when-let [name (cf/get :assets-storage-backend)]
(l/wrn :hint "using deprecated configuration, please read 2.11 release notes"
:href "https://github.com/penpot/penpot/releases/tag/2.11.0")
(let [name (cf/get :assets-storage-backend)]
(case name
:assets-fs :fs
:assets-s3 :s3

View File

@@ -137,12 +137,14 @@
(if-let [{:keys [id] :as object} (first objects)]
(if (has-refs? conn object)
(do
(l/debug :id (str id)
(l/debug :hint "processing object"
:id (str id)
:status "freeze"
:bucket bucket)
(recur (conj to-freeze id) to-delete (rest objects)))
(do
(l/debug :id (str id)
(l/debug :hint "processing object"
:id (str id)
:status "delete"
:bucket bucket)
(recur to-freeze (conj to-delete id) (rest objects))))
@@ -195,9 +197,14 @@
deleted 0]
(if-let [chunk (get-chunk pool)]
(let [[nfo ndo] (db/tx-run! cfg process-chunk! chunk)]
(recur (long (+ freezed nfo))
(long (+ deleted ndo))))
{:freeze freezed :delete deleted})))
(recur (+ freezed nfo)
(+ deleted ndo)))
(do
(l/inf :hint "task finished"
:to-freeze freezed
:to-delete deleted)
{:freeze freezed :delete deleted}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HANDLER

View File

@@ -31,13 +31,13 @@
java.time.Duration
java.util.Collection
java.util.Optional
java.util.concurrent.atomic.AtomicLong
org.reactivestreams.Subscriber
software.amazon.awssdk.core.ResponseBytes
software.amazon.awssdk.core.async.AsyncRequestBody
software.amazon.awssdk.core.async.AsyncResponseTransformer
software.amazon.awssdk.core.async.BlockingInputStreamAsyncRequestBody
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup
software.amazon.awssdk.regions.Region
@@ -87,11 +87,12 @@
(def ^:private schema:config
[:map {:title "s3-backend-config"}
::wrk/netty-io-executor
::wrk/executor
[::region {:optional true} :keyword]
[::bucket {:optional true} ::sm/text]
[::prefix {:optional true} ::sm/text]
[::endpoint {:optional true} ::sm/uri]])
[::endpoint {:optional true} ::sm/uri]
[::io-threads {:optional true} ::sm/int]])
(defmethod ig/expand-key ::backend
[k v]
@@ -109,7 +110,6 @@
presigner (build-s3-presigner params)]
(assoc params
::sto/type :s3
::counter (AtomicLong. 0)
::client @client
::presigner presigner
::close-fn #(.close ^java.lang.AutoCloseable client)))))
@@ -121,7 +121,7 @@
(defmethod ig/halt-key! ::backend
[_ {:keys [::close-fn]}]
(when (fn? close-fn)
(close-fn)))
(px/run! close-fn)))
(def ^:private schema:backend
[:map {:title "s3-backend"}
@@ -198,16 +198,19 @@
(Region/of (name region)))
(defn- build-s3-client
[{:keys [::region ::endpoint ::wrk/netty-io-executor]}]
[{:keys [::region ::endpoint ::io-threads ::wrk/executor]}]
(let [aconfig (-> (ClientAsyncConfiguration/builder)
(.advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR executor)
(.build))
sconfig (-> (S3Configuration/builder)
(cond-> (some? endpoint) (.pathStyleAccessEnabled true))
(.build))
thr-num (or io-threads (min 16 (px/get-available-processors)))
hclient (-> (NettyNioAsyncHttpClient/builder)
(.eventLoopGroup (SdkEventLoopGroup/create netty-io-executor))
(.eventLoopGroupBuilder (-> (SdkEventLoopGroup/builder)
(.numberOfThreads (int thr-num))))
(.connectionAcquisitionTimeout default-timeout)
(.connectionTimeout default-timeout)
(.readTimeout default-timeout)
@@ -259,7 +262,7 @@
(.close ^InputStream input))))
(defn- make-request-body
[counter content]
[executor content]
(let [size (impl/get-size content)]
(reify
AsyncRequestBody
@@ -269,19 +272,16 @@
(^void subscribe [_ ^Subscriber subscriber]
(let [delegate (AsyncRequestBody/forBlockingInputStream (long size))
input (io/input-stream content)]
(px/thread-call (partial write-input-stream delegate input)
{:name (str "penpot/storage/" (.getAndIncrement ^AtomicLong counter))})
(px/run! executor (partial write-input-stream delegate input))
(.subscribe ^BlockingInputStreamAsyncRequestBody delegate
^Subscriber subscriber))))))
(defn- put-object
[{:keys [::client ::bucket ::prefix ::counter]} {:keys [id] :as object} content]
[{:keys [::client ::bucket ::prefix ::wrk/executor]} {:keys [id] :as object} content]
(let [path (dm/str prefix (impl/id->path id))
mdata (meta object)
mtype (:content-type mdata "application/octet-stream")
rbody (make-request-body counter content)
rbody (make-request-body executor content)
request (.. (PutObjectRequest/builder)
(bucket bucket)
(contentType mtype)

View File

@@ -44,7 +44,7 @@
[_ cfg]
(fs/create-dir default-tmp-dir)
(px/fn->thread (partial io-loop cfg)
{:name "penpot/storage/tmp-cleaner"}))
{:name "penpot/storage/tmp-cleaner" :virtual true}))
(defmethod ig/halt-key! ::cleaner
[_ thread]

View File

@@ -10,7 +10,6 @@
[app.common.logging :as l]
[app.common.time :as ct]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.rpc.commands.files :as files]
[app.rpc.commands.profile :as profile]
[integrant.core :as ig]))
@@ -20,28 +19,10 @@
(defmulti delete-object
(fn [_ props] (:object props)))
(defmethod delete-object :snapshot
[{:keys [::db/conn] :as cfg} {:keys [id file-id deleted-at]}]
(l/trc :obj "snapshot" :id (str id) :file-id (str file-id)
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :file-change
{:deleted-at deleted-at}
{:id id :file-id file-id}
{::db/return-keys false})
(db/update! conn :file-data
{:deleted-at deleted-at}
{:id id :file-id file-id :type "snapshot"}
{::db/return-keys false}))
(defmethod delete-object :file
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(when-let [file (db/get* conn :file {:id id}
{::db/remove-deleted false
::sql/columns [:id :is-shared]})]
(l/trc :obj "file" :id (str id)
(when-let [file (db/get* conn :file {:id id} {::db/remove-deleted false})]
(l/trc :hint "marking for deletion" :rel "file" :id (str id)
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :file
@@ -62,35 +43,30 @@
;; Mark file change to be deleted
(db/update! conn :file-change
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys false})
{:file-id id})
;; Mark file data fragment to be deleted
(db/update! conn :file-data
(db/update! conn :file-data-fragment
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys false})
{:file-id id})
;; Mark file media objects to be deleted
(db/update! conn :file-media-object
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys false})
{:file-id id})
;; Mark thumbnails to be deleted
(db/update! conn :file-thumbnail
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys false})
{:file-id id})
(db/update! conn :file-tagged-object-thumbnail
{:deleted-at deleted-at}
{:file-id id}
{::db/return-keys false})))
{:file-id id})))
(defmethod delete-object :project
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(l/trc :obj "project" :id (str id)
(l/trc :hint "marking for deletion" :rel "project" :id (str id)
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :project
@@ -107,7 +83,7 @@
(defmethod delete-object :team
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(l/trc :obj "team" :id (str id)
(l/trc :hint "marking for deletion" :rel "team" :id (str id)
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :team
{:deleted-at deleted-at}
@@ -129,7 +105,7 @@
(defmethod delete-object :profile
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(l/trc :obj "profile" :id (str id)
(l/trc :hint "marking for deletion" :rel "profile" :id (str id)
:deleted-at (ct/format-inst deleted-at))
(db/update! conn :profile
@@ -144,7 +120,7 @@
(defmethod delete-object :default
[_cfg props]
(l/wrn :obj (:object props) :hint "not implementation found"))
(l/wrn :hint "not implementation found" :rel (:object props)))
(defmethod ig/assert-key ::handler
[_ params]

View File

@@ -32,7 +32,7 @@
(def ^:private sql:mark-file-media-object-deleted
"UPDATE file_media_object
SET deleted_at = ?
SET deleted_at = now()
WHERE file_id = ? AND id != ALL(?::uuid[])
RETURNING id")
@@ -43,7 +43,7 @@
(defn- clean-file-media!
"Performs the garbage collection of file media objects."
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [used-media
(fsnap/reduce-snapshots cfg id xf:collect-used-media conj #{})
@@ -54,24 +54,27 @@
(db/create-array conn "uuid" used-media)
unused-media
(->> (db/exec! conn [sql:mark-file-media-object-deleted timestamp id used-media])
(->> (db/exec! conn [sql:mark-file-media-object-deleted id used-media])
(into #{} (map :id)))]
(l/dbg :hint "clean" :rel "file-media-object" :file-id (str id) :total (count unused-media))
(doseq [id unused-media]
(l/trc :obj "media-object"
:file-id (str id)
:id (str id)))
(l/trc :hint "mark deleted"
:rel "file-media-object"
:id (str id)
:file-id (str id)))
file))
(def ^:private sql:mark-file-object-thumbnails-deleted
"UPDATE file_tagged_object_thumbnail
SET deleted_at = ?
SET deleted_at = now()
WHERE file_id = ? AND object_id != ALL(?::text[])
RETURNING object_id")
(defn- clean-file-object-thumbnails!
[{:keys [::db/conn ::timestamp]} {:keys [data] :as file}]
[{:keys [::db/conn]} {:keys [data] :as file}]
(let [file-id (:id file)
using (->> (vals (:pages-index data))
(into #{} (comp
@@ -83,32 +86,38 @@
(thc/fmt-object-id file-id page-id id "frame")
(thc/fmt-object-id file-id page-id id "component")))))))
ids (into-array String using)
unused (->> (db/exec! conn [sql:mark-file-object-thumbnails-deleted timestamp file-id ids])
ids (db/create-array conn "uuid" using)
unused (->> (db/exec! conn [sql:mark-file-object-thumbnails-deleted file-id ids])
(into #{} (map :object-id)))]
(l/dbg :hint "clean" :rel "file-object-thumbnail" :file-id (str file-id) :total (count unused))
(doseq [object-id unused]
(l/trc :obj "object-thumbnail"
:file-id (str file-id)
:id object-id))
(l/trc :hint "mark deleted"
:rel "file-tagged-object-thumbnail"
:object-id object-id
:file-id (str file-id)))
file))
(def ^:private sql:mark-file-thumbnails-deleted
"UPDATE file_thumbnail
SET deleted_at = ?
SET deleted_at = now()
WHERE file_id = ? AND revn < ?
RETURNING revn")
(defn- clean-file-thumbnails!
[{:keys [::db/conn ::timestamp]} {:keys [id revn] :as file}]
(let [unused (->> (db/exec! conn [sql:mark-file-thumbnails-deleted timestamp id revn])
[{:keys [::db/conn]} {:keys [id revn] :as file}]
(let [unused (->> (db/exec! conn [sql:mark-file-thumbnails-deleted id revn])
(into #{} (map :revn)))]
(l/dbg :hint "clean" :rel "file-thumbnail" :file-id (str id) :total (count unused))
(doseq [revn unused]
(l/trc :obj "thumbnail"
:file-id (str id)
:revn revn))
(l/trc :hint "mark deleted"
:rel "file-thumbnail"
:revn revn
:file-id (str id)))
file))
@@ -159,21 +168,21 @@
(update file :data
(fn [data]
(reduce (fn [data id]
(l/trc :obj "component"
:file-id (str file-id)
:id (str id))
(l/trc :hint "delete component"
:component-id (str id)
:file-id (str file-id))
(ctkl/delete-component data id))
data
unused)))]
(l/dbg :hint "clean" :rel "components" :file-id (str file-id) :total (count unused))
file))
(def ^:private sql:mark-deleted-data-fragments
"UPDATE file_data
SET deleted_at = ?
"UPDATE file_data_fragment
SET deleted_at = now()
WHERE file_id = ?
AND id != ALL(?::uuid[])
AND type = 'fragment'
AND deleted_at IS NULL
RETURNING id")
@@ -182,16 +191,19 @@
(mapcat feat.fdata/get-used-pointer-ids)))
(defn- clean-fragments!
[{:keys [::db/conn ::timestamp]} {:keys [id] :as file}]
[{:keys [::db/conn]} {:keys [id] :as file}]
(let [used (into #{} xf:collect-pointers [file])
unused (->> (db/exec! conn [sql:mark-deleted-data-fragments timestamp id
unused (->> (db/exec! conn [sql:mark-deleted-data-fragments id
(db/create-array conn "uuid" used)])
(into #{} bfc/xf-map-id))]
(l/dbg :hint "clean" :rel "file-data-fragment" :file-id (str id) :total (count unused))
(doseq [id unused]
(l/trc :obj "fragment"
:file-id (str id)
:id (str id)))
(l/trc :hint "mark deleted"
:rel "file-data-fragment"
:id (str id)
:file-id (str id)))
file))
@@ -215,13 +227,14 @@
;; We should ensure that the scheduled file and the procesing file
;; has not changed since schedule, for this reason we check the
;; revn from props with the revn from retrieved file from database
(when (or (nil? revn) (= revn (:revn file)))
(when (= revn (:revn file))
file)))
(defn- process-file!
[cfg {:keys [file-id] :as props}]
(if-let [file (get-file cfg props)]
(let [file (->> file
(bfc/decode-file cfg)
(bfl/clean-file)
(clean-media! cfg)
(clean-fragments! cfg))
@@ -249,11 +262,8 @@
(-> cfg
(assoc ::db/rollback (:rollback? props))
(db/tx-run! (fn [{:keys [::db/conn] :as cfg}]
(let [cfg (-> cfg
(update ::sto/storage sto/configure conn)
(assoc ::timestamp (ct/now)))
(let [cfg (update cfg ::sto/storage sto/configure conn)
processed? (process-file! cfg props)]
(when (and processed? (contains? cf/flags :tiered-file-data-storage))
(wrk/submit! (-> cfg
(assoc ::wrk/task :offload-file-data)

View File

@@ -28,14 +28,14 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-profiles deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id photo-id]}]
(l/trc :obj "profile" :id (str id))
(l/trc :hint "permanently delete" :rel "profile" :id (str id))
;; Mark as deleted the storage object
(some->> photo-id (sto/touch-object! storage))
(let [affected (-> (db/delete! conn :profile {:id id})
(db/get-update-count))]
(+ total affected)))
(db/delete! conn :profile {:id id})
(inc total))
0)))
(def ^:private sql:get-teams
@@ -51,7 +51,8 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-teams deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id photo-id deleted-at]}]
(l/trc :obj "team"
(l/trc :hint "permanently delete"
:rel "team"
:id (str id)
:deleted-at (ct/format-inst deleted-at))
@@ -59,9 +60,9 @@
(some->> photo-id (sto/touch-object! storage))
;; And finally, permanently delete the team.
(let [affected (-> (db/delete! conn :team {:id id})
(db/get-update-count))]
(+ total affected)))
(db/delete! conn :team {:id id})
(inc total))
0)))
(def ^:private sql:get-fonts
@@ -78,7 +79,8 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-fonts deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id team-id deleted-at] :as font}]
(l/trc :obj "font-variant"
(l/trc :hint "permanently delete"
:rel "team-font-variant"
:id (str id)
:team-id (str team-id)
:deleted-at (ct/format-inst deleted-at))
@@ -89,9 +91,10 @@
(some->> (:otf-file-id font) (sto/touch-object! storage))
(some->> (:ttf-file-id font) (sto/touch-object! storage))
(let [affected (-> (db/delete! conn :team-font-variant {:id id})
(db/get-update-count))]
(+ total affected)))
;; And finally, permanently delete the team font variant
(db/delete! conn :team-font-variant {:id id})
(inc total))
0)))
(def ^:private sql:get-projects
@@ -108,14 +111,16 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-projects deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id team-id deleted-at]}]
(l/trc :obj "project"
(l/trc :hint "permanently delete"
:rel "project"
:id (str id)
:team-id (str team-id)
:deleted-at (ct/format-inst deleted-at))
(let [affected (-> (db/delete! conn :project {:id id})
(db/get-update-count))]
(+ total affected)))
;; And finally, permanently delete the project.
(db/delete! conn :project {:id id})
(inc total))
0)))
(def ^:private sql:get-files
@@ -134,14 +139,19 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-files deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
(l/trc :obj "file"
(l/trc :hint "permanently delete"
:rel "file"
:id (str id)
:project-id (str project-id)
:deleted-at (ct/format-inst deleted-at))
(let [affected (-> (db/delete! conn :file {:id id})
(db/get-update-count))]
(+ total affected)))
;; Delete associated file data
(fdata/delete! cfg {:file-id id :id id :type "main"})
;; And finally, permanently delete the file.
(db/delete! conn :file {:id id})
(inc total))
0)))
(def ^:private sql:get-file-thumbnails
@@ -158,7 +168,8 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
(l/trc :obj "file-thumbnail"
(l/trc :hint "permanently delete"
:rel "file-thumbnail"
:file-id (str file-id)
:revn revn
:deleted-at (ct/format-inst deleted-at))
@@ -166,9 +177,10 @@
;; Mark as deleted the storage object
(some->> media-id (sto/touch-object! storage))
(let [affected (-> (db/delete! conn :file-thumbnail {:file-id file-id :revn revn})
(db/get-update-count))]
(+ total affected)))
;; And finally, permanently delete the object
(db/delete! conn :file-thumbnail {:file-id file-id :revn revn})
(inc total))
0)))
(def ^:private sql:get-file-object-thumbnails
@@ -185,7 +197,8 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-object-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
(l/trc :obj "file-object-thumbnail"
(l/trc :hint "permanently delete"
:rel "file-tagged-object-thumbnail"
:file-id (str file-id)
:object-id object-id
:deleted-at (ct/format-inst deleted-at))
@@ -193,10 +206,10 @@
;; Mark as deleted the storage object
(some->> media-id (sto/touch-object! storage))
(let [affected (-> (db/delete! conn :file-tagged-object-thumbnail
{:file-id file-id :object-id object-id})
(db/get-update-count))]
(+ total affected)))
;; And finally, permanently delete the object
(db/delete! conn :file-tagged-object-thumbnail {:file-id file-id :object-id object-id})
(inc total))
0)))
(def ^:private sql:get-file-media-objects
@@ -213,7 +226,8 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-media-objects deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
(l/trc :obj "file-media-object"
(l/trc :hint "permanently delete"
:rel "file-media-object"
:id (str id)
:file-id (str file-id)
:deleted-at (ct/format-inst deleted-at))
@@ -222,14 +236,14 @@
(some->> (:media-id fmo) (sto/touch-object! storage))
(some->> (:thumbnail-id fmo) (sto/touch-object! storage))
(let [affected (-> (db/delete! conn :file-media-object {:id id})
(db/get-update-count))]
(+ total affected)))
(db/delete! conn :file-media-object {:id id})
(inc total))
0)))
(def ^:private sql:get-file-data
"SELECT file_id, id, type, deleted_at, metadata, backend
FROM file_data
(def ^:private sql:get-file-data-fragments
"SELECT file_id, id, deleted_at
FROM file_data_fragment
WHERE deleted_at IS NOT NULL
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
@@ -237,29 +251,21 @@
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-data!
(defn- delete-file-data-fragments!
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-data deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id id type deleted-at metadata backend]}]
(some->> metadata
(fdata/decode-metadata)
(fdata/process-metadata cfg))
(l/trc :obj "file-data"
(->> (db/plan conn [sql:get-file-data-fragments deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id id deleted-at]}]
(l/trc :hint "permanently delete"
:rel "file-data-fragment"
:id (str id)
:file-id (str file-id)
:type type
:backend backend
:deleted-at (ct/format-inst deleted-at))
(let [affected (-> (db/delete! conn :file-data
{:file-id file-id
:id id
:type type})
(db/get-update-count))]
(+ total affected)))
;; Delete associated file data
(fdata/delete! cfg {:file-id file-id :id id :type "fragment"})
(db/delete! conn :file-data-fragment {:file-id file-id :id id})
(inc total))
0)))
(def ^:private sql:get-file-change
@@ -276,14 +282,18 @@
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-change deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
(l/trc :obj "file-change"
(l/trc :hint "permanently delete"
:rel "file-change"
:id (str id)
:file-id (str file-id)
:deleted-at (ct/format-inst deleted-at))
(let [affected (-> (db/delete! conn :file-change {:id id})
(db/get-update-count))]
(+ total affected)))
;; Delete associated file data, if it exists
(fdata/delete! cfg {:file-id file-id :id id :type "snapshot"})
(db/delete! conn :file-change {:id id})
(inc total))
0)))
(def ^:private deletion-proc-vars
@@ -291,8 +301,8 @@
#'delete-file-media-objects!
#'delete-file-object-thumbnails!
#'delete-file-thumbnails!
#'delete-file-data!
#'delete-file-changes!
#'delete-file-data-fragments!
#'delete-files!
#'delete-projects!
#'delete-fonts!
@@ -303,12 +313,11 @@
until 0 results is returned"
[cfg proc-fn]
(loop [total 0]
(let [result (db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(db/exec-one! conn ["SET LOCAL rules.deletion_protection TO off"])
(proc-fn cfg)))]
(let [result (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(db/exec-one! conn ["SET LOCAL rules.deletion_protection TO off"])
(proc-fn cfg)))]
(if (pos? result)
(recur (long (+ total result)))
(recur (+ total result))
total))))
(defmethod ig/assert-key ::handler
@@ -330,5 +339,7 @@
(if-let [proc-fn (first procs)]
(let [result (execute-proc! cfg proc-fn)]
(recur (rest procs)
(long (+ total result))))
{:processed total})))))
(+ total result)))
(do
(l/inf :hint "task finished" :deleted total)
{:processed total}))))))

Some files were not shown because too many files have changed in this diff Show More