mirror of
https://github.com/penpot/penpot.git
synced 2026-01-05 21:08:56 -05:00
Compare commits
1 Commits
2.0.2
...
hiru-rxspy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
582a6d0c03 |
@@ -11,11 +11,11 @@ jobs:
|
||||
- image: cimg/redis:7.0.5
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
resource_class: large
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
# Customize the JVM maximum heap limit
|
||||
JVM_OPTS: -Xmx4g
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
@@ -28,82 +28,50 @@ jobs:
|
||||
- v1-dependencies-
|
||||
|
||||
- run: cd .clj-kondo && cat config.edn
|
||||
- run: cat .cljfmt.edn
|
||||
- run: clj-kondo --version
|
||||
|
||||
- run:
|
||||
name: "fmt check backend [clj]"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "fmt check exporter [clj]"
|
||||
working_directory: "./exporter"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "fmt check common [clj]"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "fmt check frontend [clj]"
|
||||
name: frontend styles prettier
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run lint:scss
|
||||
|
||||
- run:
|
||||
name: common lint
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
clj-kondo --version
|
||||
clj-kondo --parallel --lint src/
|
||||
|
||||
- run:
|
||||
name: frontend lint
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:scss
|
||||
yarn run lint:clj
|
||||
clj-kondo --version
|
||||
clj-kondo --parallel --lint src/
|
||||
|
||||
- run:
|
||||
name: backend lint
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
clj-kondo --version
|
||||
clj-kondo --parallel --lint src/
|
||||
|
||||
- run:
|
||||
name: exporter lint
|
||||
working_directory: "./exporter"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "common tests"
|
||||
working_directory: "./common"
|
||||
name: common tests
|
||||
command: |
|
||||
yarn install
|
||||
yarn test
|
||||
clojure -X:dev:test :patterns '["common-tests.*-test"]'
|
||||
|
||||
- run:
|
||||
name: "frontend tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn test
|
||||
environment:
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
JVM_OPTS: -Xmx4g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
- run:
|
||||
name: "backend tests"
|
||||
name: backend test
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
clojure -X:dev:test :patterns '["backend-tests.*-test"]'
|
||||
@@ -113,6 +81,18 @@ jobs:
|
||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
|
||||
JVM_OPTS: -Xmx4g
|
||||
|
||||
- run:
|
||||
name: frontend tests
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn test
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
promesa.core/-> clojure.core/->
|
||||
promesa.exec.csp/go-loop clojure.core/loop
|
||||
rumext.v2/defc clojure.core/defn
|
||||
promesa.util/with-open clojure.core/with-open
|
||||
app.common.schema.generators/let clojure.core/let
|
||||
rumext.v2/fnc clojure.core/fn
|
||||
app.common.data/export clojure.core/def
|
||||
app.common.data.macros/get-in clojure.core/get-in
|
||||
app.common.data.macros/with-open clojure.core/with-open
|
||||
@@ -15,28 +14,19 @@
|
||||
:hooks
|
||||
{:analyze-call
|
||||
{app.common.data.macros/export hooks.export/export
|
||||
potok.core/reify hooks.export/potok-reify
|
||||
app.util.services/defmethod hooks.export/service-defmethod
|
||||
app.common.record/defrecord hooks.export/penpot-defrecord
|
||||
app.db/with-atomic hooks.export/penpot-with-atomic
|
||||
potok.v2.core/reify hooks.export/potok-reify
|
||||
rumext.v2/fnc hooks.export/rumext-fnc
|
||||
rumext.v2/lazy-component hooks.export/rumext-lazycomponent
|
||||
shadow.lazy/loadable hooks.export/rumext-lazycomponent
|
||||
}}
|
||||
|
||||
:output
|
||||
{:exclude-files
|
||||
["data_readers.clj"
|
||||
"src/app/util/perf.cljs"
|
||||
"src/app/common/logging.cljc"
|
||||
"src/app/common/exceptions.cljc"
|
||||
"^(?:backend|frontend|exporter|common)/build.clj"
|
||||
"^(?:backend|frontend|exporter|common)/deps.edn"
|
||||
"^(?:backend|frontend|exporter|common)/scripts/"
|
||||
"^(?:backend|frontend|exporter|common)/dev/"
|
||||
"^(?:backend|frontend|exporter|common)/test/"]
|
||||
|
||||
:linter-name true}
|
||||
"app/util/perf.cljs"
|
||||
"app/common/logging.cljc"
|
||||
"app/common/exceptions.cljc"]}
|
||||
|
||||
:linters
|
||||
{:unsorted-required-namespaces
|
||||
@@ -72,3 +62,4 @@
|
||||
:exclude-destructured-keys-in-fn-args false
|
||||
}
|
||||
}}
|
||||
|
||||
|
||||
@@ -37,9 +37,6 @@
|
||||
(api/token-node rsym)
|
||||
(api/vector-node [])]
|
||||
other))]
|
||||
|
||||
;; (prn (api/sexpr result))
|
||||
|
||||
{:node result})))
|
||||
|
||||
(defn penpot-with-atomic
|
||||
@@ -74,17 +71,6 @@
|
||||
{:node result})))
|
||||
|
||||
|
||||
(defn rumext-lazycomponent
|
||||
[{:keys [node]}]
|
||||
(let [[cname mdata params & body] (rest (:children node))
|
||||
[params body] (if (api/vector-node? mdata)
|
||||
[mdata (cons params body)]
|
||||
[params body])]
|
||||
(let [result (api/list-node [(api/token-node 'constantly) nil])]
|
||||
;; (prn (api/sexpr result))
|
||||
{:node result})))
|
||||
|
||||
|
||||
(defn penpot-defrecord
|
||||
[{:keys [:node]}]
|
||||
(let [[rnode rtype rparams & other] (:children node)
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
{:sort-ns-references? true
|
||||
:remove-multiple-non-indenting-spaces? false
|
||||
:remove-surrounding-whitespace? true
|
||||
:remove-consecutive-blank-lines? false
|
||||
:extra-indents {rumext.v2/fnc [[:inner 0]]
|
||||
cljs.test/async [[:inner 0]]
|
||||
promesa.exec/thread [[:inner 0]]
|
||||
specify! [[:inner 0] [:inner 1]]}
|
||||
}
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -1,12 +1,4 @@
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
*-init.clj
|
||||
*.css.json
|
||||
*.jar
|
||||
*.orig
|
||||
*.penpot
|
||||
@@ -23,7 +15,6 @@
|
||||
/*.jpg
|
||||
/*.md
|
||||
/*.png
|
||||
/*.svg
|
||||
/*.sql
|
||||
/*.txt
|
||||
/*.yml
|
||||
@@ -67,4 +58,3 @@
|
||||
/web
|
||||
clj-profiler/
|
||||
node_modules
|
||||
frontend/.storybook/preview-body.html
|
||||
|
||||
14
.vscode/settings.json
vendored
14
.vscode/settings.json
vendored
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
"**/.clj-kondo": true,
|
||||
"**/.cpcache": true,
|
||||
"**/.lsp": true,
|
||||
"**/.shadow-cljs": true,
|
||||
"**/node_modules": true
|
||||
}
|
||||
"files.exclude": {
|
||||
"**/.clj-kondo": true,
|
||||
"**/.cpcache": true,
|
||||
"**/.lsp": true,
|
||||
"**/.shadow-cljs": true,
|
||||
"**/node_modules": true
|
||||
}
|
||||
}
|
||||
|
||||
11
.yarnrc.yml
11
.yarnrc.yml
@@ -1,11 +0,0 @@
|
||||
enableGlobalCache: true
|
||||
|
||||
enableImmutableCache: false
|
||||
|
||||
enableImmutableInstalls: false
|
||||
|
||||
enableTelemetry: false
|
||||
|
||||
httpTimeout: 600000
|
||||
|
||||
nodeLinker: node-modules
|
||||
150
CHANGES.md
150
CHANGES.md
@@ -1,158 +1,18 @@
|
||||
# CHANGELOG
|
||||
|
||||
## 2.0.2
|
||||
|
||||
### :sparkles: Enhancements
|
||||
|
||||
- Fix locking contention on cron subsystem (causes backend start blocking)
|
||||
- Fix locking contention on file object thumbails backend RPC calls
|
||||
|
||||
|
||||
## 2.0.1
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix different issues related to components v2 migrations including [Github #4443](https://github.com/penpot/penpot/issues/4443)
|
||||
|
||||
## 2.0.0 - I Just Can't Get Enough
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
- Grid CSS layout [Taiga #4915](https://tree.taiga.io/project/penpot/epic/4915)
|
||||
- UI redesign [Taiga #4958](https://tree.taiga.io/project/penpot/epic/4958)
|
||||
- New components System [Taiga #2662](https://tree.taiga.io/project/penpot/epic/2662)
|
||||
- Swap components [Taiga #1331](https://tree.taiga.io/project/penpot/us/1331)
|
||||
- Images as fill [Taiga #2983](https://tree.taiga.io/project/penpot/us/2983)
|
||||
- HTML code generation [Taiga #5277](https://tree.taiga.io/project/penpot/us/5277)
|
||||
- Light and dark themes [Taiga #2287](https://tree.taiga.io/project/penpot/us/2287)
|
||||
## 1.20.0
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
- New strokes default to inside border [Taiga #6847](https://tree.taiga.io/project/penpot/issue/6847)
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
- New Hausa, Yoruba and Igbo translations and update translation files (by All For Tech Empowerment Foundation) [Taiga #6950](https://tree.taiga.io/project/penpot/us/6950), [Taiga #6534](https://tree.taiga.io/project/penpot/us/6534)
|
||||
- Hide bounding-box when editing shape (by @VasilevsVV) [#3930](https://github.com/penpot/penpot/pull/3930)
|
||||
- CTRL + "+" to zoom into canvas instead of browser (by @audriu) [#3848](https://github.com/penpot/penpot/pull/3848)
|
||||
- Add dev deps.edn in the project root (by @PEZ) [#3794](https://github.com/penpot/penpot/pull/3794)
|
||||
- Allow passing overrides to frontend nginx config (by @m90) [#3602](https://github.com/penpot/penpot/pull/3602)
|
||||
- Update index.njk to remove typo (by @fdvmoreira) [#155](https://github.com/penpot/penpot-docs/pull/155)
|
||||
- Typo (by StephanEggermont) [#157](https://github.com/penpot/penpot-docs/pull/157)
|
||||
|
||||
### :sparkles: New features
|
||||
- Send comments with Ctrl+Enter / Cmd + Enter [Taiga #6085](https://tree.taiga.io/project/penpot/issue/6085)
|
||||
|
||||
- Select through stroke only rectangle [Taiga #5484](https://tree.taiga.io/project/penpot/issue/5484)
|
||||
- Stroke default position [Taiga #6847](https://tree.taiga.io/project/penpot/issue/6847)
|
||||
- Override browser Ctrl+ and Ctrl- zoom with Penpot Zoom [Taiga #3200](https://tree.taiga.io/project/penpot/us/3200)
|
||||
- Improve the way handlers work on flex layouts [Taiga #6598](https://tree.taiga.io/project/penpot/us/6598)
|
||||
- Add menu entry for toggle between light/dark theme [Taiga #6829](https://tree.taiga.io/project/penpot/issue/6829)
|
||||
- Switch themes shortcut [Taiga #6644](https://tree.taiga.io/project/penpot/us/6644)
|
||||
- Constraints section at design tab new position [Taiga #6830](https://tree.taiga.io/project/penpot/issue/6830)
|
||||
- [PICKER] File library colors order [Taiga #5399](https://tree.taiga.io/project/penpot/us/5399)
|
||||
- Onboarding invitations improvements [Taiga #5974](https://tree.taiga.io/project/penpot/us/5974)
|
||||
- [PERFORMANCE] Workspace thumbnails refactor [Taiga #5828](https://tree.taiga.io/project/penpot/us/5828)
|
||||
- [PERFORMANCE] Add performance optimizations to shape rendering [Taiga #5835](https://tree.taiga.io/project/penpot/us/5835)
|
||||
- [PERFORMANCE] Optimize SVG output [Taiga #4134](https://tree.taiga.io/project/penpot/us/4134)
|
||||
- [PERFORMANCE] Optimize svg on importation [Taiga #5879](https://tree.taiga.io/project/penpot/us/5879)
|
||||
- [PERFORMANCE] Optimization tasks related to design tab file [Taiga #5760](https://tree.taiga.io/project/penpot/us/5760)
|
||||
- [INSTALL] Ability to setup features by team [Taiga #6108](https://tree.taiga.io/project/penpot/us/6108)
|
||||
- [IMAGES] Keep aspect ratio option [Taiga #6933](https://tree.taiga.io/project/penpot/us/6933)
|
||||
- [INSPECT] UI review [Taiga #5687](https://tree.taiga.io/project/penpot/us/5687)
|
||||
- [GRID LAYOUT] Phase 1 [Taiga #4303](https://tree.taiga.io/project/penpot/us/4303)
|
||||
- [GRID LAYOUT] Inspect code for Grid [Taiga #5277](https://tree.taiga.io/project/penpot/us/5277)
|
||||
- [GRID LAYOUT] Phase 1 polishing [Taiga #5612](https://tree.taiga.io/project/penpot/us/5612)
|
||||
- [GRID LAYOUT] Improvements & Feedback [Taiga #6047](https://tree.taiga.io/project/penpot/us/6047)
|
||||
- [COMPONENTS] Naming of the main component [Taiga #5291](https://tree.taiga.io/project/penpot/us/5291)
|
||||
- [COMPONENTS] Rework inside of components - Library page [Taiga #2918](https://tree.taiga.io/project/penpot/us/2918)
|
||||
- [COMPONENTS] Update component when updating main instance [Taiga #3794](https://tree.taiga.io/project/penpot/us/3794)
|
||||
- [COMPONENTS] Main component new behavior [Taiga #3796](https://tree.taiga.io/project/penpot/us/3796)
|
||||
- [COMPONENTS] Main component look & feel [Taiga #5290](https://tree.taiga.io/project/penpot/us/5290)
|
||||
- [COMPONENTS] Library view [Taiga #2880](https://tree.taiga.io/project/penpot/us/2880)
|
||||
- [COMPONENTS] Positioning inside a component should relative, as in boards [Taiga #2826](https://tree.taiga.io/project/penpot/us/2826)
|
||||
- [COMPONENTS] Update message should show only if affecting at components that are being used at a file [Taiga #1397](https://tree.taiga.io/project/penpot/us/1397)
|
||||
- [COMPONENTS] Annotations [Taiga #4957](https://tree.taiga.io/project/penpot/us/4957)
|
||||
- [COMPONENTS] Synchronization order for nested components [Taiga #5439](https://tree.taiga.io/project/penpot/us/5439)
|
||||
- [COMPONENTS] Libraries modal zero case [Taiga #5294](https://tree.taiga.io/project/penpot/us/5294)
|
||||
- [COMPONENTS] Contextual menu casuistics [Taiga #5292](https://tree.taiga.io/project/penpot/us/5292)
|
||||
- [COMPONENTS] Libraries publishing flow review [Taiga #5293](https://tree.taiga.io/project/penpot/us/5293)
|
||||
- [COMPONENTS] Add loading text to Libraries modal [Taiga #6702](https://tree.taiga.io/project/penpot/us/6702)
|
||||
- [COMPONENTS] Components rename and organization in bulk [Taiga #2877](https://tree.taiga.io/project/penpot/us/2877)
|
||||
- [COMPONENTS] Info overlay about components V2 [Taiga #6276](https://tree.taiga.io/project/penpot/us/6276)
|
||||
- [REDESIGN] New styles basics [Taiga #4967](https://tree.taiga.io/project/penpot/us/4967)
|
||||
- [REDESIGN] Layers tab redesign [Taiga #4966](https://tree.taiga.io/project/penpot/us/4966)
|
||||
- [REDESIGN] Design tab phase 1 [Taiga #4982](https://tree.taiga.io/project/penpot/us/4966)
|
||||
- [REDESIGN] Assets tab redesign [Taiga #4984](https://tree.taiga.io/project/penpot/us/4984)
|
||||
- [REDESIGN] Palette panels (colors, typographies...) [Taiga #4983](https://tree.taiga.io/project/penpot/us/4983)
|
||||
- [REDESIGN] Workspace structure [Taiga #4988](https://tree.taiga.io/project/penpot/us/4988)
|
||||
- [REDESIGN] Shortcut tab [Taiga #4989](https://tree.taiga.io/project/penpot/us/4989)
|
||||
- [REDESIGN] Toolbar [Taiga #5500](https://tree.taiga.io/project/penpot/us/5500)
|
||||
- [REDESIGN] History tab [Taiga #5481](https://tree.taiga.io/project/penpot/us/5481)
|
||||
- [REDESIGN] Path options/toolbar [Taiga #5815](https://tree.taiga.io/project/penpot/us/5815)
|
||||
- [REDESIGN] Design tab phase 2 [Taiga #5814](https://tree.taiga.io/project/penpot/us/5814)
|
||||
- [REDESIGN] Design tab phase 3 and dashboard details [Taiga #5920](https://tree.taiga.io/project/penpot/us/5920)
|
||||
- [REDESIGN] Dashboard [Taiga #5164](https://tree.taiga.io/project/penpot/us/5164)
|
||||
- [REDESIGN] New Dashboard UI [Taiga #5869](https://tree.taiga.io/project/penpot/us/5869)
|
||||
- [REDESIGN] Prototype tab [Taiga #4985](https://tree.taiga.io/project/penpot/us/4985)
|
||||
- [REDESIGN] Code tab [Taiga #4986](https://tree.taiga.io/project/penpot/us/4986)
|
||||
- [REDESIGN] Modals and alert messages [Taiga #5915](https://tree.taiga.io/project/penpot/us/5915)
|
||||
- [REDESIGN] Comments page [Taiga #5917](https://tree.taiga.io/project/penpot/us/5917)
|
||||
- [REDESIGN] View Mode [Taiga #5163](https://tree.taiga.io/project/penpot/us/5163)
|
||||
- [REDESIGN] Miscellaneous tasks [Taiga #6050](https://tree.taiga.io/project/penpot/us/6050)
|
||||
- [REDESIGN] Swap components [Taiga #6739](https://tree.taiga.io/project/penpot/us/6739)
|
||||
- [REDESIGN] Font selector [Taiga #6677](https://tree.taiga.io/project/penpot/us/6677)
|
||||
- [REDESIGN] Colour system of alerts and notifications [Taiga #6746](https://tree.taiga.io/project/penpot/us/6746)
|
||||
- [REDESIGN] Review text in paragraphs for accessibility [Taiga #6703](https://tree.taiga.io/project/penpot/us/6703)
|
||||
- [REDESIGN] Interaction icons [Taiga #6880](https://tree.taiga.io/project/penpot/us/6880)
|
||||
- [REDESIGN] Panels visual separations [Taiga #6692](https://tree.taiga.io/project/penpot/us/6692)
|
||||
- [REDESIGN] Onboarding slides [Taiga #6678](https://tree.taiga.io/project/penpot/us/6678)
|
||||
|
||||
### :bug Bugs fixed
|
||||
- Fix pixelated thumbnails [Github #3681](https://github.com/penpot/penpot/issues/3681), [Github #3661](https://github.com/penpot/penpot/issues/3661)
|
||||
- Fix problem with not applying colors to boards [Github #3941](https://github.com/penpot/penpot/issues/3941)
|
||||
- Fix problem with path editor undoing changes [Github #3998](https://github.com/penpot/penpot/issues/3998)
|
||||
- [View mode] Open overlay places frame in the wrong position when paired with a fixed element [Taiga #6385](https://tree.taiga.io/project/penpot/issue/6385)
|
||||
- Flex Layout: Fit-content not recalculated after deleting an element [Taiga #5968](https://tree.taiga.io/project/penpot/issue/5968)
|
||||
- Selecting from Color Palette does not work for board when there is no existing fill [Taiga #6464](https://tree.taiga.io/project/penpot/issue/6464)
|
||||
- Color thumbnails are consistently rounded in the inspect code mode [Taiga #5886](https://tree.taiga.io/project/penpot/issue/5886)
|
||||
- Adding vector path points before first point of existing open path not working [Taiga #6593](https://tree.taiga.io/project/penpot/issue/6593)
|
||||
- Some image formats include the extension when importing [Taiga #5485](https://tree.taiga.io/project/penpot/issue/5485)
|
||||
- Gradient color tool doesn't work properly with flipped items [Taiga #6485](https://tree.taiga.io/project/penpot/issue/6485)
|
||||
- [TEXT] Align options are not shown when several text are selected [Taiga #5948](https://tree.taiga.io/project/penpot/issue/5948)
|
||||
- [VIEW MODE] Comments not working properly on multiple pages [Taiga #6281](https://tree.taiga.io/project/penpot/issue/6281)
|
||||
- [PERFORMANCE] Alignments are slow [Taiga #5865](https://tree.taiga.io/project/penpot/issue/5865)
|
||||
- [EXPORT] Exporting an element with a non-visible drop shadow displays the shadow either way [Taiga #6768](https://tree.taiga.io/project/penpot/issue/6768)
|
||||
- [SAFARI] Color picker cursor is not pointing correctly [Taiga #6733](https://tree.taiga.io/project/penpot/issue/6733)
|
||||
- [Import Files] When user has imported .penpot file with new file name of previously downloaded library file the default library file name is set for it [Taiga #5596](https://tree.taiga.io/project/penpot/issue/5596)
|
||||
- Issue when resizing a duotone FA icon [Taiga #5935](https://tree.taiga.io/project/penpot/issue/5935)
|
||||
- "Hide grid" keyboard shortcut broken [Taiga #5102](https://tree.taiga.io/project/penpot/issue/5102)
|
||||
- Picking a gradient color in recent colors for a new color in the assets tab crashes Penpot [Taiga #5601](https://tree.taiga.io/project/penpot/issue/5601)
|
||||
- Thumbnails not loading [Taiga #6012](https://tree.taiga.io/project/penpot/issue/6012)
|
||||
- Don't show signup link/form when registration is disabled. [Taiga #1196](https://tree.taiga.io/project/penpot/issue/1196)
|
||||
- Registration Page UI UX issue with small resolutions [Taiga #1693](https://tree.taiga.io/project/penpot/issue/1693)
|
||||
- [LOGIN] "E-Mail-Adress" input field is set to type 'text' instead of 'eMail [Taiga #1921](https://tree.taiga.io/project/penpot/issue/1921)
|
||||
- Handling correctly slashes "/" in emails [Taiga #4906](https://tree.taiga.io/project/penpot/issue/4906)
|
||||
- Tab character in texts crashes the app [Taiga #4418](https://tree.taiga.io/project/penpot/issue/4418)
|
||||
- Text does not match export [Taiga #4129](https://tree.taiga.io/project/penpot/issue/4129)
|
||||
- Scrollbars cover the layers carets [Taiga #4431](https://tree.taiga.io/project/penpot/issue/4431)
|
||||
- Horizontal ruler disappear when overlapping a board [Taiga #4138](https://tree.taiga.io/project/penpot/issue/4138)
|
||||
- Resize shape + Alt key is not working [Taiga #3447](https://tree.taiga.io/project/penpot/issue/3447)
|
||||
- Libraries images broken on premise [Taiga #4573](https://tree.taiga.io/project/penpot/issue/4573)
|
||||
- [VIEWER] Cannot scroll down in code </> mode [Taiga #4655](https://tree.taiga.io/project/penpot/issue/4655)
|
||||
- Strange cursor behavior after clicking viewport with text tool [Taiga #4363](https://tree.taiga.io/project/penpot/issue/4363)
|
||||
- Selected color affects all of them [Taiga #5285](https://tree.taiga.io/project/penpot/issue/5285)
|
||||
- Fix problem with shadow negative spread [Github #3421](https://github.com/penpot/penpot/issues/3421)
|
||||
- Fix problem with linked colors to strokes [Github #3522](https://github.com/penpot/penpot/issues/3522)
|
||||
- Fix problem with hand tool stuck [Github #3318](https://github.com/penpot/penpot/issues/3318)
|
||||
- Fix problem with fix scrolling on nested elements [Github #3508](https://github.com/penpot/penpot/issues/3508)
|
||||
- Fix problem when changing typography assets [Github #3683](https://github.com/penpot/penpot/issues/3683)
|
||||
- Internal error when you copy and paste some main components between files [Taiga #7397](https://tree.taiga.io/project/penpot/issue/7397)
|
||||
- Fix toolbar disappearing [Taiga #7411](https://tree.taiga.io/project/penpot/issue/7411)
|
||||
- Fix long text on tab breaks UI [Taiga Issue #7421](https://tree.taiga.io/project/penpot/issue/7421)
|
||||
### :bug: Bugs fixed
|
||||
|
||||
## 1.19.5
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
### :bug: New features
|
||||
|
||||
- Fix problem with alignment performance
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
## 1.19.4
|
||||
|
||||
|
||||
@@ -109,7 +109,6 @@ Every sort of contribution will be very helpful to enhance Penpot. How you’ll
|
||||
- Create and [share Libraries & templates](https://penpot.app/libraries-templates.html) that will be helpful for the community
|
||||
- Become a [translator](https://help.penpot.app/contributing-guide/translations)
|
||||
- Give feedback: [Mail us](mailto:support@penpot.app)
|
||||
- **Contribute to Penpot's code:** [Watch this video](https://www.youtube.com/watch?v=TpN0osiY-8k) by Alejandro Alonso, CIO and developer at Penpot, where he gives us a hands-on demo of how to use Penpot’s repository and make changes in both front and back end
|
||||
|
||||
To find (almost) everything you need to know on how to contribute to Penpot, refer to the [contributing-guide](https://help.penpot.app/contributing-guide/).
|
||||
|
||||
|
||||
7
backend/.gitignore
vendored
7
backend/.gitignore
vendored
@@ -1,7 +0,0 @@
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
@@ -3,10 +3,8 @@
|
||||
|
||||
:deps
|
||||
{penpot/common {:local/root "../common"}
|
||||
org.clojure/clojure {:mvn/version "1.12.0-alpha5"}
|
||||
org.clojure/tools.namespace {:mvn/version "1.4.4"}
|
||||
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.5-11"}
|
||||
org.clojure/clojure {:mvn/version "1.11.1"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.5-5"}
|
||||
|
||||
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
||||
@@ -17,24 +15,21 @@
|
||||
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "6.3.0.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.2.6.RELEASE"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/yetti
|
||||
{:git/tag "v10.0"
|
||||
:git/sha "520613f"
|
||||
{:git/tag "v9.16"
|
||||
:git/sha "7df3e08"
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.909"}
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.883"}
|
||||
metosin/reitit-core {:mvn/version "0.6.0"}
|
||||
nrepl/nrepl {:mvn/version "1.1.0"}
|
||||
cider/cider-nrepl {:mvn/version "0.44.0"}
|
||||
|
||||
org.postgresql/postgresql {:mvn/version "42.7.1"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.44.1.0"}
|
||||
org.postgresql/postgresql {:mvn/version "42.6.0"}
|
||||
|
||||
com.zaxxer/HikariCP {:mvn/version "5.1.0"}
|
||||
com.zaxxer/HikariCP {:mvn/version "5.0.1"}
|
||||
|
||||
io.whitfin/siphash {:mvn/version "2.0.0"}
|
||||
|
||||
@@ -43,7 +38,7 @@
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.8"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.17.2"}
|
||||
org.jsoup/jsoup {:mvn/version "1.16.1"}
|
||||
org.im4java/im4java
|
||||
{:git/tag "1.4.0-penpot-2"
|
||||
:git/sha "e2b3e16"
|
||||
@@ -52,13 +47,14 @@
|
||||
org.lz4/lz4-java {:mvn/version "1.8.0"}
|
||||
|
||||
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
|
||||
integrant/integrant {:mvn/version "0.8.1"}
|
||||
|
||||
dawran6/emoji {:mvn/version "0.1.5"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.11.7"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.11.4"}
|
||||
|
||||
;; Pretty Print specs
|
||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.22.12"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.20.138"}
|
||||
}
|
||||
|
||||
:paths ["src" "resources" "target/classes"]
|
||||
@@ -66,6 +62,7 @@
|
||||
{:dev
|
||||
{:extra-deps
|
||||
{com.bhauman/rebel-readline {:mvn/version "RELEASE"}
|
||||
org.clojure/tools.namespace {:mvn/version "RELEASE"}
|
||||
clojure-humanize/clojure-humanize {:mvn/version "0.2.2"}
|
||||
org.clojure/data.csv {:mvn/version "RELEASE"}
|
||||
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
|
||||
@@ -91,8 +88,8 @@
|
||||
|
||||
:jmx-remote
|
||||
{:jvm-opts ["-Dcom.sun.management.jmxremote"
|
||||
"-Dcom.sun.management.jmxremote.port=9091"
|
||||
"-Dcom.sun.management.jmxremote.rmi.port=9091"
|
||||
"-Dcom.sun.management.jmxremote.port=9090"
|
||||
"-Dcom.sun.management.jmxremote.rmi.port=9090"
|
||||
"-Dcom.sun.management.jmxremote.local.only=false"
|
||||
"-Dcom.sun.management.jmxremote.authenticate=false"
|
||||
"-Dcom.sun.management.jmxremote.ssl=false"
|
||||
|
||||
@@ -7,9 +7,7 @@
|
||||
(ns user
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.debug :as debug]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.logging :as l]
|
||||
@@ -23,8 +21,7 @@
|
||||
[app.common.transit :as t]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.config :as cfg]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers :as srepl.helpers]
|
||||
[app.srepl.main :as srepl]
|
||||
@@ -44,7 +41,7 @@
|
||||
[clojure.walk :refer [macroexpand-all]]
|
||||
[criterium.core :as crit]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.core]
|
||||
[integrant.core :as ig]
|
||||
[malli.core :as m]
|
||||
[malli.dev.pretty :as mdp]
|
||||
@@ -56,11 +53,9 @@
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(repl/disable-reload! (find-ns 'integrant.core))
|
||||
(repl/disable-reload! (find-ns 'app.common.debug))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(add-tap #'debug/tap-handler)
|
||||
(defonce system nil)
|
||||
|
||||
;; --- Benchmarking Tools
|
||||
|
||||
@@ -99,14 +94,20 @@
|
||||
(defn- start
|
||||
[]
|
||||
(try
|
||||
(main/start)
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
(-> (merge main/system-config main/worker-config)
|
||||
(ig/prep)
|
||||
(ig/init))))
|
||||
:started
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause))))
|
||||
|
||||
(defn- stop
|
||||
[]
|
||||
(main/stop)
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
nil))
|
||||
:stopped)
|
||||
|
||||
(defn restart
|
||||
@@ -119,29 +120,61 @@
|
||||
(stop)
|
||||
(repl/refresh-all :after 'user/start))
|
||||
|
||||
;; (defn compression-bench
|
||||
;; [data]
|
||||
;; (let [humanize (fn [v] (hum/filesize v :binary true :format " %.4f "))
|
||||
;; v1 (time (humanize (alength (blob/encode data {:version 1}))))
|
||||
;; v3 (time (humanize (alength (blob/encode data {:version 3}))))
|
||||
;; v4 (time (humanize (alength (blob/encode data {:version 4}))))
|
||||
;; v5 (time (humanize (alength (blob/encode data {:version 5}))))
|
||||
;; v6 (time (humanize (alength (blob/encode data {:version 6}))))
|
||||
;; ]
|
||||
;; (print-table
|
||||
;; [{
|
||||
;; :v1 v1
|
||||
;; :v3 v3
|
||||
;; :v4 v4
|
||||
;; :v5 v5
|
||||
;; :v6 v6
|
||||
;; }])))
|
||||
(defn compression-bench
|
||||
[data]
|
||||
(let [humanize (fn [v] (hum/filesize v :binary true :format " %.4f "))
|
||||
v1 (time (humanize (alength (blob/encode data {:version 1}))))
|
||||
v3 (time (humanize (alength (blob/encode data {:version 3}))))
|
||||
v4 (time (humanize (alength (blob/encode data {:version 4}))))
|
||||
v5 (time (humanize (alength (blob/encode data {:version 5}))))
|
||||
v6 (time (humanize (alength (blob/encode data {:version 6}))))
|
||||
]
|
||||
(print-table
|
||||
[{
|
||||
:v1 v1
|
||||
:v3 v3
|
||||
:v4 v4
|
||||
:v5 v5
|
||||
:v6 v6
|
||||
}])))
|
||||
|
||||
(defonce debug-tap
|
||||
(do
|
||||
(add-tap #(locking debug-tap
|
||||
(prn "tap debug:" %)))
|
||||
1))
|
||||
|
||||
|
||||
(defn calculate-frames
|
||||
[{:keys [data]}]
|
||||
(->> (vals (:pages-index data))
|
||||
(mapcat (comp vals :objects))
|
||||
(filter cfh/is-direct-child-of-root?)
|
||||
(filter cfh/frame-shape?)
|
||||
(count)))
|
||||
(sm/def! ::test
|
||||
[:map {:title "Foo"}
|
||||
[:x :int]
|
||||
[:y {:min 0} :double]
|
||||
[:bar
|
||||
[:map {:title "Bar"}
|
||||
[:z :string]
|
||||
[:v ::sm/uuid]]]
|
||||
[:items
|
||||
[:vector ::dt/instant]]])
|
||||
|
||||
(sm/def! ::test2
|
||||
[:multi {:title "Foo" :dispatch :type}
|
||||
[:x
|
||||
[:map {:title "FooX"}
|
||||
[:type [:= :x]]
|
||||
[:x :int]]]
|
||||
[:y
|
||||
[:map
|
||||
[:type [:= :x]]
|
||||
[:y [::sm/one-of #{:a :b :c}]]]]
|
||||
[:z
|
||||
[:map {:title "FooZ"}
|
||||
[:z
|
||||
[:multi {:title "Bar" :dispatch :type}
|
||||
[:a
|
||||
[:map
|
||||
[:type [:= :a]]
|
||||
[:a :int]]]
|
||||
[:b
|
||||
[:map
|
||||
[:type [:= :b]]
|
||||
[:b :int]]]]]]]])
|
||||
|
||||
@@ -1,26 +1,18 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"version": "1.0.0",
|
||||
"license": "MPL-2.0",
|
||||
"author": "Kaleidos INC",
|
||||
"private": true,
|
||||
"packageManager": "yarn@4.0.2",
|
||||
"name": "uxbox-back",
|
||||
"version": "0.1.0",
|
||||
"description": "The Open-Source prototyping tool",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build-emails": "./scripts/build-email-templates.sh"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/penpot/penpot"
|
||||
},
|
||||
"dependencies": {
|
||||
"luxon": "^3.4.2",
|
||||
"sax": "^1.2.4"
|
||||
"url": "git+https://github.com/uxbox/uxbox.git"
|
||||
},
|
||||
"author": "Uxbox",
|
||||
"license": "SEE LICENSE IN <LICENSE>",
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.1",
|
||||
"source-map-support": "^0.5.21",
|
||||
"ws": "^8.13.0"
|
||||
},
|
||||
"scripts": {
|
||||
"fmt:clj:check": "cljfmt check --parallel=false src/ test/",
|
||||
"fmt:clj": "cljfmt fix --parallel=true src/ test/",
|
||||
"lint:clj": "clj-kondo --parallel --lint src/"
|
||||
"mjml": "^4.6.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +1,30 @@
|
||||
[{:id "tutorial-for-beginners"
|
||||
[{:id "material-design-3"
|
||||
:name "Material Design 3"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/Material%20Design%203.penpot"}
|
||||
{:id "tutorial-for-beginners"
|
||||
:name "Tutorial for beginners"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/tutorial-for-beginners.penpot"}
|
||||
{:id "lucide-icons"
|
||||
:name "Lucide Icons"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Lucide-icons.penpot"}
|
||||
{:id "font-awesome"
|
||||
:name "Font Awesome"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Font-Awesome.penpot"}
|
||||
{:id "plants-app"
|
||||
:name "Plants app"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Plants-app.penpot"}
|
||||
{:id "penpot-design-system"
|
||||
:name "Penpot Design System"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Penpot-Design-system.penpot"}
|
||||
{:id "flex-layout-playground"
|
||||
:name "Flex Layout Playground"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/Flex%20Layout%20Playground.penpot"}
|
||||
{:id "wireframing-kit"
|
||||
:name "Wireframing Kit"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/wireframing-kit.penpot"}
|
||||
{:id "black-white-mobile-templates"
|
||||
:name "Black & White Mobile Templates"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Black-White-Mobile-Templates.penpot"}
|
||||
{:id "avataaars"
|
||||
:name "Avataaars"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Avataaars-by-Pablo-Stanley.penpot"}
|
||||
{:id "ux-notes"
|
||||
:name "UX Notes"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/UX-Notes.penpot"}
|
||||
{:id "ant-design"
|
||||
:name "Ant Design UI Kit (lite)"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Ant-Design-UI-Kit-Lite.penpot"}
|
||||
{:id "cocomaterial"
|
||||
:name "Cocomaterial"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Cocomaterial.penpot"}
|
||||
{:id "circum-icons"
|
||||
:name "Circum Icons pack"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/CircumIcons.penpot"}
|
||||
{:id "coreui"
|
||||
:name "CoreUI"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/CoreUI%20DesignSystem%20(DEMO).penpot"}
|
||||
{:id "whiteboarding-kit"
|
||||
:name "Whiteboarding Kit"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Whiteboarding-mapping-kit.penpot"}
|
||||
{:id "open-color-scheme"
|
||||
:name "Open Color Scheme"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Open-Color-Scheme.penpot"}
|
||||
{:id "flex-layout-playground"
|
||||
:name "Flex Layout Playground"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Flex-Layout-Playground.penpot"}]
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Whiteboarding-mapping-kit.penpot"}]
|
||||
|
||||
@@ -25,12 +25,6 @@
|
||||
<span>SCHEMA</span>
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
{% if item.sse %}
|
||||
<span class="tag">
|
||||
<span>SSE</span>
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="rpc-row-detail hidden">
|
||||
|
||||
@@ -37,13 +37,6 @@
|
||||
|
||||
<h2>GENERAL NOTES</h2>
|
||||
|
||||
<h3>HTTP Transport & Methods</h3>
|
||||
<p>The HTTP is the transport method for accesing this API; all
|
||||
functions can be called using POST HTTP method; the functions
|
||||
that starts with <b>get-</b> in the name, can use GET HTTP
|
||||
method which in many cases benefits from the HTTP cache.</p>
|
||||
|
||||
|
||||
<h3>Authentication</h3>
|
||||
<p>The penpot backend right now offers two way for authenticate the request:
|
||||
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
|
||||
@@ -63,17 +56,6 @@
|
||||
desired content-type on the <b>`Accept`</b> header, the transit encoding is used
|
||||
by default.</p>
|
||||
|
||||
<h3>SSE (Server-Sent Events)</h3>
|
||||
<p>The methods marked with <b>SSE</b> returns
|
||||
a <a href="https://html.spec.whatwg.org/multipage/server-sent-events.html"> SSE
|
||||
formatted</a> stream on the response body, always with status 200. The events are
|
||||
always encoded using `application/transit+json` encoding (for now no content
|
||||
negotiation is possible on methods that return SSE streams). </p>
|
||||
<p>On the javascript side you can use
|
||||
the <a href="https://github.com/rexxars/eventsource-parser">eventsoure-parser</a>
|
||||
library for propertly parsing the response body using the
|
||||
standard <a href="https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API">Fetch
|
||||
API</a></p>
|
||||
|
||||
<h3>Limits</h3>
|
||||
<p>The rate limit work per user basis (this means that different api keys share
|
||||
|
||||
@@ -6,17 +6,11 @@ Debug Main Page
|
||||
|
||||
{% block content %}
|
||||
<nav>
|
||||
<div class="title">
|
||||
<h1>ADMIN DEBUG INTERFACE</h1>
|
||||
</div>
|
||||
<h1>Debug INDEX:</h1>
|
||||
<div>[<a href="/dbg/error">ERRORS</a>]</div>
|
||||
</nav>
|
||||
<main class="dashboard">
|
||||
<main class="index">
|
||||
<section class="widget">
|
||||
<fieldset>
|
||||
<legend>Error reports</legend>
|
||||
<desc><a href="/dbg/error">CLICK HERE TO SEE THE ERROR REPORTS</a> </desc>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>Download file data:</legend>
|
||||
<desc>Given an FILE-ID, downloads the file data as file. The file data is encoded using transit.</desc>
|
||||
@@ -43,42 +37,9 @@ Debug Main Page
|
||||
<input type="checkbox" name="reuseid" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Upload" />
|
||||
</div>
|
||||
<input type="submit" value="Upload" />
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>Profile Management</legend>
|
||||
<form method="post" action="/dbg/actions/resend-email-verification">
|
||||
<div class="row">
|
||||
<input type="email" name="email" placeholder="example@example.com" value="" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-verify">Are you sure?</label>
|
||||
<input id="force-verify" type="checkbox" name="force" />
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" name="resend" value="Resend Verification" />
|
||||
<input type="submit" name="verify" value="Verify" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" class="danger" name="block" value="Block" />
|
||||
<input type="submit" class="danger" name="unblock" value="Unblock" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
|
||||
|
||||
</section>
|
||||
|
||||
<section class="widget">
|
||||
@@ -146,37 +107,18 @@ Debug Main Page
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" name="upload" value="Upload" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
</section>
|
||||
|
||||
<section class="widget">
|
||||
<fieldset>
|
||||
<legend>Reset file version</legend>
|
||||
<desc>Allows reset file data version to a specific number/</desc>
|
||||
|
||||
<form method="post" action="/dbg/actions/reset-file-version">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="number" style="width:100px" name="version" placeholder="version" value="32" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-version">Are you sure?</label>
|
||||
<input id="force-version" type="checkbox" name="force" />
|
||||
<label>Ignore index errors?</label>
|
||||
<input type="checkbox" name="ignore-index-errors" checked/>
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
Do not break on index lookup errors (remap operation).
|
||||
Useful when importing a broken file that has broken
|
||||
relations or missing pieces.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" value="Submit" />
|
||||
<input type="submit" name="upload" value="Upload" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
@@ -116,50 +116,29 @@ nav > div:not(:last-child) {
|
||||
width: unset;
|
||||
}
|
||||
|
||||
.dashboard {
|
||||
.index {
|
||||
margin-top: 40px;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.widget {
|
||||
.index > section {
|
||||
padding: 10px;
|
||||
background-color: #e3e3e3;
|
||||
max-width: 400px;
|
||||
margin: 5px;
|
||||
height: fit-content;
|
||||
}
|
||||
|
||||
.widget input[type=submit] {
|
||||
outline: none;
|
||||
border: 1px solid gray;
|
||||
border-radius: 2px;
|
||||
padding: 3px 5px;
|
||||
}
|
||||
|
||||
.widget input[type=submit].danger {
|
||||
outline: none;
|
||||
border: 1px solid red;
|
||||
border-radius: 2px;
|
||||
padding: 3px 5px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
.widget > fieldset {
|
||||
padding: 10px;
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
|
||||
.widget > fieldset:not(:last-child) {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
.dashboard fieldset:not(:first-child) {
|
||||
.index fieldset:not(:first-child) {
|
||||
margin-top: 15px;
|
||||
}
|
||||
|
||||
/* .index > section:not(:last-child) { */
|
||||
/* margin-bottom: 10px; */
|
||||
/* } */
|
||||
|
||||
.widget > h2 {
|
||||
|
||||
.index > section > h2 {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,28 +3,12 @@
|
||||
;; Optional: queue, ommited means Integer/MAX_VALUE
|
||||
;; Optional: timeout, ommited means no timeout
|
||||
;; Note: queue and timeout are excluding
|
||||
{:update-file/global {:permits 20}
|
||||
:update-file/by-profile
|
||||
{:permits 1 :queue 5}
|
||||
{:update-file-by-id {:permits 1 :queue 3}
|
||||
:update-file {:permits 20}
|
||||
|
||||
:process-font/global {:permits 4}
|
||||
:process-font/by-profile {:permits 1}
|
||||
:derive-password {:permits 8}
|
||||
:process-font {:permits 4 :queue 32}
|
||||
:process-image {:permits 8 :queue 32}
|
||||
|
||||
:process-image/global {:permits 8}
|
||||
:process-image/by-profile {:permits 1}
|
||||
|
||||
:auth/global {:permits 8}
|
||||
|
||||
:root/global
|
||||
{:permits 40}
|
||||
|
||||
:root/by-profile
|
||||
{:permits 10}
|
||||
|
||||
:file-thumbnail-ops/global
|
||||
{:permits 20}
|
||||
:file-thumbnail-ops/by-profile
|
||||
{:permits 2}
|
||||
|
||||
:submit-audit-events/by-profile
|
||||
:submit-audit-events-by-profile
|
||||
{:permits 1 :queue 3}}
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main-latest.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="20"/>
|
||||
</RollingFile>
|
||||
</Appenders>
|
||||
|
||||
<Loggers>
|
||||
<Logger name="io.lettuce" level="error" />
|
||||
<Logger name="com.zaxxer.hikari" level="error"/>
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
<Logger name="app.rpc.climit" level="debug" />
|
||||
<Logger name="app.common.files.migrations" level="debug" />
|
||||
|
||||
<Logger name="app.loggers" level="debug" additivity="false">
|
||||
<AppenderRef ref="main" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app" level="all" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="user" level="trace" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="main" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</Configuration>
|
||||
@@ -6,13 +6,13 @@
|
||||
alwaysWriteExceptions="true" />
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main-latest.log" filePattern="logs/main-%i.log">
|
||||
<RollingFile name="main" fileName="logs/main.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="20"/>
|
||||
<DefaultRolloverStrategy max="9"/>
|
||||
</RollingFile>
|
||||
</Appenders>
|
||||
|
||||
@@ -21,36 +21,32 @@
|
||||
<Logger name="com.zaxxer.hikari" level="error"/>
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.binfile" level="debug" />
|
||||
<Logger name="app.rpc.commands.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
<Logger name="app.rpc.climit" level="debug" />
|
||||
<Logger name="app.rpc.climit" level="info" />
|
||||
<Logger name="app.rpc.mutations.files" level="info" />
|
||||
<Logger name="app.common.files.migrations" level="debug" />
|
||||
|
||||
<Logger name="app.loggers" level="debug" additivity="false">
|
||||
<AppenderRef ref="console" level="info" />
|
||||
<AppenderRef ref="main" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app" level="all" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
<AppenderRef ref="console" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="user" level="trace" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
<AppenderRef ref="console" level="info" />
|
||||
</Logger>
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="main" />
|
||||
<AppenderRef ref="console" level="info" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</Configuration>
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main-latest.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="9"/>
|
||||
</RollingFile>
|
||||
|
||||
<RollingFile name="reports" fileName="logs/reports-latest.log" filePattern="logs/reports-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="100M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="9"/>
|
||||
</RollingFile>
|
||||
</Appenders>
|
||||
|
||||
<Loggers>
|
||||
<Logger name="io.lettuce" level="error" />
|
||||
<Logger name="com.zaxxer.hikari" level="error"/>
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.rpc.commands.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
<Logger name="app.rpc.climit" level="debug" />
|
||||
<Logger name="app.common.files.migrations" level="info" />
|
||||
|
||||
<Logger name="app.loggers" level="debug" additivity="false">
|
||||
<AppenderRef ref="main" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app.features" level="all" additivity="true">
|
||||
<AppenderRef ref="reports" level="warn" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app" level="all" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="user" level="trace" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="main" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</Configuration>
|
||||
@@ -11,9 +11,12 @@
|
||||
<Logger name="io.lettuce" level="error" />
|
||||
<Logger name="com.zaxxer.hikari" level="error" />
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.util" level="info" />
|
||||
<Logger name="app" level="info" additivity="false">
|
||||
<AppenderRef ref="console" level="info" />
|
||||
<AppenderRef ref="console" />
|
||||
</Logger>
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="console" />
|
||||
</Root>
|
||||
|
||||
@@ -44,16 +44,11 @@ def send_eval(expr):
|
||||
s.send(b":repl/quit\n\n")
|
||||
|
||||
with s.makefile() as f:
|
||||
while True:
|
||||
line = f.readline()
|
||||
result = json.loads(line)
|
||||
tag = result.get("tag", None)
|
||||
if tag == "ret":
|
||||
return result.get("val", None), result.get("exception", None)
|
||||
elif tag == "out":
|
||||
print(result.get("val"), end="")
|
||||
else:
|
||||
raise RuntimeError("unexpected response from PREPL")
|
||||
result = json.load(f)
|
||||
tag = result.get("tag", None)
|
||||
if tag != "ret":
|
||||
raise RuntimeError("unexpected response from PREPL")
|
||||
return result.get("val", None), result.get("exception", None)
|
||||
|
||||
def encode(val):
|
||||
return json.dumps(json.dumps(val))
|
||||
@@ -65,7 +60,7 @@ def print_error(res):
|
||||
|
||||
def run_cmd(params):
|
||||
try:
|
||||
expr = "(app.srepl.cli/exec {})".format(encode(params))
|
||||
expr = "(app.srepl.ext/run-json-cmd {})".format(encode(params))
|
||||
res, failed = send_eval(expr)
|
||||
if failed:
|
||||
print_error(res)
|
||||
@@ -145,15 +140,6 @@ def derive_password(password):
|
||||
res = run_cmd(params)
|
||||
print(f"Derived password: \"{res}\"")
|
||||
|
||||
|
||||
def migrate_components_v2():
|
||||
params = {
|
||||
"cmd": "migrate-v2",
|
||||
"params": {}
|
||||
}
|
||||
|
||||
run_cmd(params)
|
||||
|
||||
available_commands = (
|
||||
"create-profile",
|
||||
"update-profile",
|
||||
@@ -231,5 +217,3 @@ elif args.action == "search-profile":
|
||||
email = input("Email: ")
|
||||
|
||||
search_profile(email)
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
clojure -J-Xms50m -J-Xmx256m -J-XX:+UseSerialGC -Sdeps '{:deps {reply/reply {:mvn/version "0.5.0"}}}' -M -m reply.main --attach localhost:6064 -e "(in-ns 'app.main)"
|
||||
@@ -4,16 +4,15 @@ export PENPOT_HOST=devenv
|
||||
export PENPOT_TENANT=dev
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-ldap \
|
||||
enable-registration
|
||||
enable-login-with-password
|
||||
enable-login-with-oidc \
|
||||
enable-login-with-google \
|
||||
enable-login-with-github \
|
||||
enable-login-with-gitlab \
|
||||
enable-backend-worker \
|
||||
enable-backend-asserts \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-fdata-storage-pointer-map \
|
||||
enable-fdata-storage-objets-map \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
@@ -25,18 +24,7 @@ export PENPOT_FLAGS="\
|
||||
enable-rpc-rlimit \
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
enable-access-tokens";
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||
@@ -51,13 +39,10 @@ export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv
|
||||
mc mb penpot-s3/penpot -p
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
@@ -65,23 +50,21 @@ export PENPOT_ASSETS_STORAGE_BACKEND=assets-s3
|
||||
export PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_STORAGE_ASSETS_S3_BUCKET=penpot
|
||||
|
||||
#-J-Djdk.virtualThreadScheduler.parallelism=16
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
|
||||
-J-XX:+EnableDynamicAgentLoading \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full"
|
||||
|
||||
# Enable preview
|
||||
export OPTIONS="$OPTIONS -J--enable-preview"
|
||||
-J-Djdk.tracePinnedThreads=full \
|
||||
-J--enable-preview";
|
||||
|
||||
# Setup HEAP
|
||||
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||
export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
|
||||
|
||||
# Increase virtual thread pool size
|
||||
@@ -94,7 +77,7 @@ export OPTIONS="$OPTIONS -J--enable-preview"
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||
export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /home/penpot/environ
|
||||
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full \
|
||||
-J-XX:+UseTransparentHugePages \
|
||||
-J-XX:ReservedCodeCacheSize=1g \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J--enable-preview";
|
||||
|
||||
# Setup HEAP
|
||||
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
|
||||
|
||||
export PENPOT_HTTP_SERVER_IO_THREADS=2
|
||||
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Setup GC
|
||||
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
set -ex
|
||||
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main
|
||||
@@ -18,9 +18,7 @@ if [ -f ./environ ]; then
|
||||
source ./environ
|
||||
fi
|
||||
|
||||
export JVM_OPTS="-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow -Dpolyglot.engine.WarnInterpreterOnly=false --enable-preview $JVM_OPTS"
|
||||
export JVM_OPTS="-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow --enable-preview $JVM_OPTS"
|
||||
|
||||
ENTRYPOINT=${1:-app.main};
|
||||
|
||||
set -ex
|
||||
exec $JAVA_CMD $JVM_OPTS -jar penpot.jar -m $ENTRYPOINT
|
||||
set -x
|
||||
exec $JAVA_CMD $JVM_OPTS "$@" -jar penpot.jar -m app.main
|
||||
|
||||
@@ -6,61 +6,33 @@ export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-nrepl-server \
|
||||
enable-webhooks \
|
||||
enable-backend-asserts \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-fdata-storage-pointer-map \
|
||||
enable-fdata-storage-objets-map \
|
||||
disable-secure-session-cookies \
|
||||
enable-rpc-climit \
|
||||
enable-smtp \
|
||||
enable-access-tokens \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation";
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-J-XX:+EnableDynamicAgentLoading \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints"
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_ASSETS_STORAGE_BACKEND=assets-s3
|
||||
export PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_STORAGE_ASSETS_S3_BUCKET=penpot
|
||||
|
||||
entrypoint=${1:-app.main};
|
||||
enable-access-tokens";
|
||||
|
||||
set -ex
|
||||
|
||||
clojure $OPTIONS -A:dev -M -m $entrypoint;
|
||||
if [ "$1" = "--watch" ]; then
|
||||
echo "Start Watch..."
|
||||
|
||||
clojure -A:dev -M -m app.main &
|
||||
PID=$!
|
||||
|
||||
npx nodemon \
|
||||
--watch src \
|
||||
--watch ../common \
|
||||
--ext "clj" \
|
||||
--signal SIGKILL \
|
||||
--exec 'echo "(user/restart)" | nc -N localhost 6062'
|
||||
|
||||
kill -9 $PID
|
||||
else
|
||||
clojure -A:dev -M -m app.main
|
||||
fi
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
@@ -32,13 +31,13 @@
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn obfuscate-string
|
||||
(defn- obfuscate-string
|
||||
[s]
|
||||
(if (< (count s) 10)
|
||||
(apply str (take (count s) (repeat "*")))
|
||||
@@ -354,7 +353,8 @@
|
||||
(get-name [props]
|
||||
(let [attr-kw (cf/get :oidc-name-attr "name")
|
||||
attr-ph (parse-attr-path provider attr-kw)]
|
||||
(get-in props attr-ph)))]
|
||||
(get-in props attr-ph)))
|
||||
]
|
||||
|
||||
(let [props (qualify-props provider info)
|
||||
email (get-email props)]
|
||||
@@ -414,7 +414,7 @@
|
||||
::props]))
|
||||
|
||||
(defn get-info
|
||||
[{:keys [provider ::setup/props] :as cfg} {:keys [params] :as request}]
|
||||
[{:keys [provider ::main/props] :as cfg} {:keys [params] :as request}]
|
||||
(when-let [error (get params :error)]
|
||||
(ex/raise :type :internal
|
||||
:code :error-on-retrieving-code
|
||||
@@ -475,13 +475,12 @@
|
||||
[{:keys [::db/pool] :as cfg} info]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(some->> (:email info)
|
||||
(profile/clean-email)
|
||||
(profile/get-profile-by-email conn))))
|
||||
|
||||
(defn- redirect-response
|
||||
[uri]
|
||||
{::rres/status 302
|
||||
::rres/headers {"location" (str uri)}})
|
||||
{::yrs/status 302
|
||||
::yrs/headers {"location" (str uri)}})
|
||||
|
||||
(defn- generate-error-redirect
|
||||
[_ cause]
|
||||
@@ -509,7 +508,7 @@
|
||||
(if profile
|
||||
(let [sxf (session/create-fn cfg (:id profile))
|
||||
token (or (:invitation-token info)
|
||||
(tokens/generate (::setup/props cfg)
|
||||
(tokens/generate (::main/props cfg)
|
||||
{:iss :auth
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id (:id profile)}))
|
||||
@@ -537,7 +536,7 @@
|
||||
:iss :prepared-register
|
||||
:is-active true
|
||||
:exp (dt/in-future {:hours 48}))
|
||||
token (tokens/generate (::setup/props cfg) info)
|
||||
token (tokens/generate (::main/props cfg) info)
|
||||
params (d/without-nils
|
||||
{:token token
|
||||
:fullname (:fullname info)})
|
||||
@@ -552,14 +551,14 @@
|
||||
(defn- auth-handler
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [props (audit/extract-utm-params params)
|
||||
state (tokens/generate (::setup/props cfg)
|
||||
state (tokens/generate (::main/props cfg)
|
||||
{:iss :oauth
|
||||
:invitation-token (:invitation-token params)
|
||||
:props props
|
||||
:exp (dt/in-future "4h")})
|
||||
uri (build-auth-uri cfg state)]
|
||||
{::rres/status 200
|
||||
::rres/body {:redirect-uri uri}}))
|
||||
{::yrs/status 200
|
||||
::yrs/body {:redirect-uri uri}}))
|
||||
|
||||
(defn- callback-handler
|
||||
[cfg request]
|
||||
@@ -619,7 +618,7 @@
|
||||
[_]
|
||||
(s/keys :req [::session/manager
|
||||
::http/client
|
||||
::setup/props
|
||||
::main/props
|
||||
::db/pool
|
||||
::providers]))
|
||||
|
||||
|
||||
@@ -1,492 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.common
|
||||
"A binfile related file processing common code, used for different
|
||||
binfile format implementations and management rpc methods."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as fval]
|
||||
[app.common.logging :as l]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.components-v2 :as feat.compv2]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[clojure.walk :as walk]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(def ^:dynamic *state* nil)
|
||||
(def ^:dynamic *options* nil)
|
||||
|
||||
(def xf-map-id
|
||||
(map :id))
|
||||
|
||||
(def xf-map-media-id
|
||||
(comp
|
||||
(mapcat (juxt :media-id
|
||||
:thumbnail-id
|
||||
:woff1-file-id
|
||||
:woff2-file-id
|
||||
:ttf-file-id
|
||||
:otf-file-id))
|
||||
(filter uuid?)))
|
||||
|
||||
(def into-vec
|
||||
(fnil into []))
|
||||
|
||||
(def conj-vec
|
||||
(fnil conj []))
|
||||
|
||||
(defn collect-storage-objects
|
||||
[state items]
|
||||
(update state :storage-objects into xf-map-media-id items))
|
||||
|
||||
(defn collect-summary
|
||||
[state key items]
|
||||
(update state key into xf-map-media-id items))
|
||||
|
||||
(defn lookup-index
|
||||
[id]
|
||||
(when id
|
||||
(let [val (get-in @*state* [:index id])]
|
||||
(l/trc :fn "lookup-index" :id (str id) :result (some-> val str) ::l/sync? true)
|
||||
(or val id))))
|
||||
|
||||
(defn remap-id
|
||||
[item key]
|
||||
(cond-> item
|
||||
(contains? item key)
|
||||
(update key lookup-index)))
|
||||
|
||||
(defn- index-object
|
||||
[index obj & attrs]
|
||||
(reduce (fn [index attr-fn]
|
||||
(let [old-id (attr-fn obj)
|
||||
new-id (if (::overwrite *options*) old-id (uuid/next))]
|
||||
(assoc index old-id new-id)))
|
||||
index
|
||||
attrs))
|
||||
|
||||
(defn update-index
|
||||
([index coll]
|
||||
(update-index index coll identity))
|
||||
([index coll attr]
|
||||
(reduce #(index-object %1 %2 attr) index coll)))
|
||||
|
||||
(defn decode-row
|
||||
"A generic decode row helper"
|
||||
[{:keys [data features] :as row}]
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
data (assoc :data (blob/decode data))))
|
||||
|
||||
(defn get-file
|
||||
[cfg file-id]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(when-let [file (db/get* conn :file {:id file-id}
|
||||
{::db/remove-deleted false})]
|
||||
(-> file
|
||||
(decode-row)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))))))))
|
||||
|
||||
(defn get-project
|
||||
[cfg project-id]
|
||||
(db/get cfg :project {:id project-id}))
|
||||
|
||||
(defn get-team
|
||||
[cfg team-id]
|
||||
(-> (db/get cfg :team {:id team-id})
|
||||
(decode-row)))
|
||||
|
||||
(defn get-fonts
|
||||
[cfg team-id]
|
||||
(db/query cfg :team-font-variant
|
||||
{:team-id team-id
|
||||
:deleted-at nil}))
|
||||
|
||||
(defn get-files-rels
|
||||
"Given a set of file-id's, return all matching relations with the libraries"
|
||||
[cfg ids]
|
||||
|
||||
(dm/assert!
|
||||
"expected a set of uuids"
|
||||
(and (set? ids)
|
||||
(every? uuid? ids)))
|
||||
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids (db/create-array conn "uuid" ids)
|
||||
sql (str "SELECT flr.* FROM file_library_rel AS flr "
|
||||
" JOIN file AS l ON (flr.library_file_id = l.id) "
|
||||
" WHERE flr.file_id = ANY(?) AND l.deleted_at IS NULL")]
|
||||
(db/exec! conn [sql ids])))))
|
||||
|
||||
(def ^:private sql:get-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.id
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ANY(?)
|
||||
UNION
|
||||
SELECT fl.id
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT DISTINCT l.id
|
||||
FROM libs AS l")
|
||||
|
||||
(defn get-libraries
|
||||
"Get all libraries ids related to provided file ids"
|
||||
[cfg ids]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids' (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:get-libraries ids'])
|
||||
(into #{} xf-map-id))))))
|
||||
|
||||
(defn get-file-object-thumbnails
|
||||
"Return all file object thumbnails for a given file."
|
||||
[cfg file-id]
|
||||
(db/query cfg :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:deleted-at nil}))
|
||||
|
||||
(defn get-file-thumbnail
|
||||
"Return the thumbnail for the specified file-id"
|
||||
[cfg {:keys [id revn]}]
|
||||
(db/get* cfg :file-thumbnail
|
||||
{:file-id id
|
||||
:revn revn
|
||||
:data nil}
|
||||
{::sql/columns [:media-id :file-id :revn]}))
|
||||
|
||||
|
||||
(def ^:private
|
||||
xform:collect-media-id
|
||||
(comp
|
||||
(map :objects)
|
||||
(mapcat vals)
|
||||
(mapcat (fn [obj]
|
||||
;; NOTE: because of some bug, we ended with
|
||||
;; many shape types having the ability to
|
||||
;; have fill-image attribute (which initially
|
||||
;; designed for :path shapes).
|
||||
(sequence
|
||||
(keep :id)
|
||||
(concat [(:fill-image obj)
|
||||
(:metadata obj)]
|
||||
(map :fill-image (:fills obj))
|
||||
(map :stroke-image (:strokes obj))
|
||||
(->> (:content obj)
|
||||
(tree-seq map? :children)
|
||||
(mapcat :fills)
|
||||
(map :fill-image))))))))
|
||||
|
||||
(defn collect-used-media
|
||||
"Given a fdata (file data), returns all media references."
|
||||
[data]
|
||||
(-> #{}
|
||||
(into xform:collect-media-id (vals (:pages-index data)))
|
||||
(into xform:collect-media-id (vals (:components data)))
|
||||
(into (keys (:media data)))))
|
||||
|
||||
(defn get-file-media
|
||||
[cfg {:keys [data id] :as file}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids (collect-used-media data)
|
||||
ids (db/create-array conn "uuid" ids)
|
||||
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
|
||||
|
||||
;; We assoc the file-id again to the file-media-object row
|
||||
;; because there are cases that used objects refer to other
|
||||
;; files and we need to ensure in the exportation process that
|
||||
;; all ids matches
|
||||
(->> (db/exec! conn [sql ids])
|
||||
(mapv #(assoc % :file-id id)))))))
|
||||
|
||||
(def ^:private sql:get-team-files
|
||||
"SELECT f.id FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE p.team_id = ?")
|
||||
|
||||
(defn get-team-files
|
||||
"Get a set of file ids for the specified team-id"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-files team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-team-projects
|
||||
"SELECT p.id FROM project AS p
|
||||
WHERE p.team_id = ?
|
||||
AND p.deleted_at IS NULL")
|
||||
|
||||
(defn get-team-projects
|
||||
"Get a set of project ids for the team"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-projects team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-project-files
|
||||
"SELECT f.id FROM file AS f
|
||||
WHERE f.project_id = ?
|
||||
AND f.deleted_at IS NULL")
|
||||
|
||||
(defn get-project-files
|
||||
"Get a set of file ids for the project"
|
||||
[{:keys [::db/conn]} project-id]
|
||||
(->> (db/exec! conn [sql:get-project-files project-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(defn- relink-shapes
|
||||
"A function responsible to analyze all file data and
|
||||
replace the old :component-file reference with the new
|
||||
ones, using the provided file-index."
|
||||
[data]
|
||||
(letfn [(process-map-form [form]
|
||||
(cond-> form
|
||||
;; Relink image shapes
|
||||
(and (map? (:metadata form))
|
||||
(= :image (:type form)))
|
||||
(update-in [:metadata :id] lookup-index)
|
||||
|
||||
;; Relink paths with fill image
|
||||
(map? (:fill-image form))
|
||||
(update-in [:fill-image :id] lookup-index)
|
||||
|
||||
;; This covers old shapes and the new :fills.
|
||||
(uuid? (:fill-color-ref-file form))
|
||||
(update :fill-color-ref-file lookup-index)
|
||||
|
||||
;; This covers the old shapes and the new :strokes
|
||||
(uuid? (:stroke-color-ref-file form))
|
||||
(update :stroke-color-ref-file lookup-index)
|
||||
|
||||
;; This covers all text shapes that have typography referenced
|
||||
(uuid? (:typography-ref-file form))
|
||||
(update :typography-ref-file lookup-index)
|
||||
|
||||
;; This covers the component instance links
|
||||
(uuid? (:component-file form))
|
||||
(update :component-file lookup-index)
|
||||
|
||||
;; This covers the shadows and grids (they have directly
|
||||
;; the :file-id prop)
|
||||
(uuid? (:file-id form))
|
||||
(update :file-id lookup-index)))
|
||||
|
||||
(process-form [form]
|
||||
(if (map? form)
|
||||
(try
|
||||
(process-map-form form)
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "failed form" :form (pr-str form) ::l/sync? true)
|
||||
(throw cause)))
|
||||
form))]
|
||||
|
||||
(walk/postwalk process-form data)))
|
||||
|
||||
(defn- relink-media
|
||||
"A function responsible of process the :media attr of file data and
|
||||
remap the old ids with the new ones."
|
||||
[media]
|
||||
(reduce-kv (fn [res k v]
|
||||
(let [id (lookup-index k)]
|
||||
(if (uuid? id)
|
||||
(-> res
|
||||
(assoc id (assoc v :id id))
|
||||
(dissoc k))
|
||||
res)))
|
||||
media
|
||||
media))
|
||||
|
||||
(defn- relink-colors
|
||||
"A function responsible of process the :colors attr of file data and
|
||||
remap the old ids with the new ones."
|
||||
[colors]
|
||||
(reduce-kv (fn [res k v]
|
||||
(if (:image v)
|
||||
(update-in res [k :image :id] lookup-index)
|
||||
res))
|
||||
colors
|
||||
colors))
|
||||
|
||||
(defn embed-assets
|
||||
[cfg data file-id]
|
||||
(letfn [(walk-map-form [form state]
|
||||
(cond
|
||||
(uuid? (:fill-color-ref-file form))
|
||||
(do
|
||||
(vswap! state conj [(:fill-color-ref-file form) :colors (:fill-color-ref-id form)])
|
||||
(assoc form :fill-color-ref-file file-id))
|
||||
|
||||
(uuid? (:stroke-color-ref-file form))
|
||||
(do
|
||||
(vswap! state conj [(:stroke-color-ref-file form) :colors (:stroke-color-ref-id form)])
|
||||
(assoc form :stroke-color-ref-file file-id))
|
||||
|
||||
(uuid? (:typography-ref-file form))
|
||||
(do
|
||||
(vswap! state conj [(:typography-ref-file form) :typographies (:typography-ref-id form)])
|
||||
(assoc form :typography-ref-file file-id))
|
||||
|
||||
(uuid? (:component-file form))
|
||||
(do
|
||||
(vswap! state conj [(:component-file form) :components (:component-id form)])
|
||||
(assoc form :component-file file-id))
|
||||
|
||||
:else
|
||||
form))
|
||||
|
||||
(process-group-of-assets [data [lib-id items]]
|
||||
;; NOTE: there is a possibility that shape refers to an
|
||||
;; non-existant file because the file was removed. In this
|
||||
;; case we just ignore the asset.
|
||||
(if-let [lib (get-file cfg lib-id)]
|
||||
(reduce (partial process-asset lib) data items)
|
||||
data))
|
||||
|
||||
(process-asset [lib data [bucket asset-id]]
|
||||
(let [asset (get-in lib [:data bucket asset-id])
|
||||
;; Add a special case for colors that need to have
|
||||
;; correctly set the :file-id prop (pending of the
|
||||
;; refactor that will remove it).
|
||||
asset (cond-> asset
|
||||
(= bucket :colors) (assoc :file-id file-id))]
|
||||
(update data bucket assoc asset-id asset)))]
|
||||
|
||||
(let [assets (volatile! [])]
|
||||
(walk/postwalk #(cond-> % (map? %) (walk-map-form assets)) data)
|
||||
(->> (deref assets)
|
||||
(filter #(as-> (first %) $ (and (uuid? $) (not= $ file-id))))
|
||||
(d/group-by first rest)
|
||||
(reduce (partial process-group-of-assets) data)))))
|
||||
|
||||
(defn- fix-version
|
||||
[file]
|
||||
(let [file (fmg/fix-version file)]
|
||||
;; FIXME: We're temporarily activating all migrations because a
|
||||
;; problem in the environments messed up with the version numbers
|
||||
;; When this problem is fixed delete the following line
|
||||
(if (> (:version file) 22)
|
||||
(assoc file :version 22)
|
||||
file)))
|
||||
|
||||
(defn process-file
|
||||
[{:keys [id] :as file}]
|
||||
(-> file
|
||||
(fix-version)
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(assoc :id id)
|
||||
(dissoc :recent-colors))))
|
||||
(fmg/migrate-file)
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(update :colors relink-colors)
|
||||
(d/without-nils))))))
|
||||
|
||||
(defn- upsert-file!
|
||||
[conn file]
|
||||
(let [sql (str "INSERT INTO file (id, project_id, name, revn, version, is_shared, data, created_at, modified_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) "
|
||||
"ON CONFLICT (id) DO UPDATE SET data=?, version=?")]
|
||||
(db/exec-one! conn [sql
|
||||
(:id file)
|
||||
(:project-id file)
|
||||
(:name file)
|
||||
(:revn file)
|
||||
(:version file)
|
||||
(:is-shared file)
|
||||
(:data file)
|
||||
(:created-at file)
|
||||
(:modified-at file)
|
||||
(:data file)
|
||||
(:version file)])))
|
||||
|
||||
(defn persist-file!
|
||||
"Applies all the final validations and perist the file."
|
||||
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid timestamp"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [file (-> file
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
|
||||
(update :features
|
||||
(fn [features]
|
||||
(let [features (cfeat/check-supported-features! features)]
|
||||
(-> (::features cfg #{})
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union features))))))
|
||||
|
||||
_ (when (contains? cf/flags :file-schema-validation)
|
||||
(fval/validate-file-schema! file))
|
||||
|
||||
_ (when (contains? cf/flags :soft-file-schema-validation)
|
||||
(let [result (ex/try! (fval/validate-file-schema! file))]
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)
|
||||
|
||||
params (-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))]
|
||||
|
||||
(if (::overwrite cfg)
|
||||
(upsert-file! conn params)
|
||||
(db/insert! conn :file params ::db/return-keys false))
|
||||
|
||||
file))
|
||||
|
||||
(defn apply-pending-migrations!
|
||||
"Apply alredy registered pending migrations to files"
|
||||
[cfg]
|
||||
(doseq [[feature file-id] (-> *state* deref :pending-to-migrate)]
|
||||
(case feature
|
||||
"components/v2"
|
||||
(feat.compv2/migrate-file! cfg file-id
|
||||
:validate? (::validate cfg true)
|
||||
:skip-on-graphic-error? true)
|
||||
|
||||
"fdata/shape-data-type"
|
||||
nil
|
||||
|
||||
(ex/raise :type :internal
|
||||
:code :no-migration-defined
|
||||
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
|
||||
:feature feature))))
|
||||
@@ -1,779 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.v1
|
||||
"A custom, perfromance and efficiency focused binfile format impl"
|
||||
(:refer-clojure :exclude [assert])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.java.io :as jio]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.util :as pu]
|
||||
[yetti.adapter :as yt])
|
||||
(:import
|
||||
com.github.luben.zstd.ZstdIOException
|
||||
com.github.luben.zstd.ZstdInputStream
|
||||
com.github.luben.zstd.ZstdOutputStream
|
||||
java.io.DataInputStream
|
||||
java.io.DataOutputStream
|
||||
java.io.InputStream
|
||||
java.io.OutputStream))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Threshold in MiB when we pass from using
|
||||
;; in-memory byte-array's to use temporal files.
|
||||
(def temp-file-threshold
|
||||
(* 1024 1024 2))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; LOW LEVEL STREAM IO API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
|
||||
(def ^:const penpot-magic-number 800099563638710213)
|
||||
|
||||
|
||||
;; A maximum (storage) object size allowed: 100MiB
|
||||
(def ^:const max-object-size
|
||||
(* 1024 1024 100))
|
||||
|
||||
(def ^:dynamic *position* nil)
|
||||
|
||||
(defn get-mark
|
||||
[id]
|
||||
(case id
|
||||
:header 1
|
||||
:stream 2
|
||||
:uuid 3
|
||||
:label 4
|
||||
:obj 5
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-mark-id
|
||||
:hint (format "invalid mark id %s" id))))
|
||||
|
||||
(defmacro assert
|
||||
[expr hint]
|
||||
`(when-not ~expr
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-condition
|
||||
:hint ~hint)))
|
||||
|
||||
(defmacro assert-mark
|
||||
[v type]
|
||||
`(let [expected# (get-mark ~type)
|
||||
val# (long ~v)]
|
||||
(when (not= val# expected#)
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-mark
|
||||
:hint (format "received mark %s, expected %s" val# expected#)))))
|
||||
|
||||
(defmacro assert-label
|
||||
[expr label]
|
||||
`(let [v# ~expr]
|
||||
(when (not= v# ~label)
|
||||
(ex/raise :type :assertion
|
||||
:code :unexpected-label
|
||||
:hint (format "received label %s, expected %s" v# ~label)))))
|
||||
|
||||
;; --- PRIMITIVE IO
|
||||
|
||||
(defn write-byte!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-byte!" :data data :position @*position* ::l/sync? true)
|
||||
(.writeByte output (byte data))
|
||||
(swap! *position* inc))
|
||||
|
||||
(defn read-byte!
|
||||
[^DataInputStream input]
|
||||
(let [v (.readByte input)]
|
||||
(l/trace :fn "read-byte!" :val v :position @*position* ::l/sync? true)
|
||||
(swap! *position* inc)
|
||||
v))
|
||||
|
||||
(defn write-long!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-long!" :data data :position @*position* ::l/sync? true)
|
||||
(.writeLong output (long data))
|
||||
(swap! *position* + 8))
|
||||
|
||||
|
||||
(defn read-long!
|
||||
[^DataInputStream input]
|
||||
(let [v (.readLong input)]
|
||||
(l/trace :fn "read-long!" :val v :position @*position* ::l/sync? true)
|
||||
(swap! *position* + 8)
|
||||
v))
|
||||
|
||||
(defn write-bytes!
|
||||
[^DataOutputStream output ^bytes data]
|
||||
(let [size (alength data)]
|
||||
(l/trace :fn "write-bytes!" :size size :position @*position* ::l/sync? true)
|
||||
(.write output data 0 size)
|
||||
(swap! *position* + size)))
|
||||
|
||||
(defn read-bytes!
|
||||
[^InputStream input ^bytes buff]
|
||||
(let [size (alength buff)
|
||||
readed (.readNBytes input buff 0 size)]
|
||||
(l/trace :fn "read-bytes!" :expected (alength buff) :readed readed :position @*position* ::l/sync? true)
|
||||
(swap! *position* + readed)
|
||||
readed))
|
||||
|
||||
;; --- COMPOSITE IO
|
||||
|
||||
(defn write-uuid!
|
||||
[^DataOutputStream output id]
|
||||
(l/trace :fn "write-uuid!" :position @*position* :WRITTEN? (.size output) ::l/sync? true)
|
||||
|
||||
(doto output
|
||||
(write-byte! (get-mark :uuid))
|
||||
(write-long! (uuid/get-word-high id))
|
||||
(write-long! (uuid/get-word-low id))))
|
||||
|
||||
(defn read-uuid!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-uuid!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :uuid)
|
||||
(let [a (read-long! input)
|
||||
b (read-long! input)]
|
||||
(uuid/custom a b))))
|
||||
|
||||
(defn write-obj!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-obj!" :position @*position* ::l/sync? true)
|
||||
(let [^bytes data (fres/encode data)]
|
||||
(doto output
|
||||
(write-byte! (get-mark :obj))
|
||||
(write-long! (alength data))
|
||||
(write-bytes! data))))
|
||||
|
||||
(defn read-obj!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-obj!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :obj)
|
||||
(let [size (read-long! input)]
|
||||
(assert (pos? size) "incorrect header size found on reading header")
|
||||
(let [buff (byte-array size)]
|
||||
(read-bytes! input buff)
|
||||
(fres/decode buff)))))
|
||||
|
||||
(defn write-label!
|
||||
[^DataOutputStream output label]
|
||||
(l/trace :fn "write-label!" :label label :position @*position* ::l/sync? true)
|
||||
(doto output
|
||||
(write-byte! (get-mark :label))
|
||||
(write-obj! label)))
|
||||
|
||||
(defn read-label!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-label!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :label)
|
||||
(read-obj! input)))
|
||||
|
||||
(defn write-header!
|
||||
[^OutputStream output version]
|
||||
(l/trace :fn "write-header!"
|
||||
:version version
|
||||
:position @*position*
|
||||
::l/sync? true)
|
||||
(let [vers (-> version name (subs 1) parse-long)
|
||||
output (io/data-output-stream output)]
|
||||
(doto output
|
||||
(write-byte! (get-mark :header))
|
||||
(write-long! penpot-magic-number)
|
||||
(write-long! vers))))
|
||||
|
||||
(defn read-header!
|
||||
[^InputStream input]
|
||||
(l/trace :fn "read-header!" :position @*position* ::l/sync? true)
|
||||
(let [input (io/data-input-stream input)
|
||||
mark (read-byte! input)
|
||||
mnum (read-long! input)
|
||||
vers (read-long! input)]
|
||||
|
||||
(when (or (not= mark (get-mark :header))
|
||||
(not= mnum penpot-magic-number))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-penpot-file
|
||||
:hint "invalid penpot file"))
|
||||
|
||||
(keyword (str "v" vers))))
|
||||
|
||||
(defn copy-stream!
|
||||
[^OutputStream output ^InputStream input ^long size]
|
||||
(let [written (io/copy! input output :size size)]
|
||||
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true)
|
||||
(swap! *position* + written)
|
||||
written))
|
||||
|
||||
(defn write-stream!
|
||||
[^DataOutputStream output stream size]
|
||||
(l/trace :fn "write-stream!" :position @*position* ::l/sync? true :size size)
|
||||
(doto output
|
||||
(write-byte! (get-mark :stream))
|
||||
(write-long! size))
|
||||
|
||||
(copy-stream! output stream size))
|
||||
|
||||
(defn read-stream!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-stream!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)
|
||||
s (read-long! input)
|
||||
p (tmp/tempfile :prefix "penpot.binfile.")]
|
||||
(assert-mark m :stream)
|
||||
|
||||
(when (> s max-object-size)
|
||||
(ex/raise :type :validation
|
||||
:code :max-file-size-reached
|
||||
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
|
||||
|
||||
(if (> s temp-file-threshold)
|
||||
(with-open [^OutputStream output (io/output-stream p)]
|
||||
(let [readed (io/copy! input output :offset 0 :size s)]
|
||||
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
|
||||
(swap! *position* + readed)
|
||||
[s p]))
|
||||
[s (io/read-as-bytes input :size s)])))
|
||||
|
||||
(defmacro assert-read-label!
|
||||
[input expected-label]
|
||||
`(let [readed# (read-label! ~input)
|
||||
expected# ~expected-label]
|
||||
(when (not= readed# expected#)
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-label
|
||||
:hint (format "unexpected label found: %s, expected: %s" readed# expected#)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
(defn zstd-input-stream
|
||||
^InputStream
|
||||
[input]
|
||||
(ZstdInputStream. ^InputStream input))
|
||||
|
||||
(defn zstd-output-stream
|
||||
^OutputStream
|
||||
[output & {:keys [level] :or {level 0}}]
|
||||
(ZstdOutputStream. ^OutputStream output (int level)))
|
||||
|
||||
(defn- get-files
|
||||
[cfg ids]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [sql (str "SELECT id FROM file "
|
||||
" WHERE id = ANY(?) ")
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql ids])
|
||||
(into [] (map :id))
|
||||
(not-empty))))))
|
||||
|
||||
;; --- EXPORT WRITER
|
||||
|
||||
(defmulti write-export ::version)
|
||||
(defmulti write-section ::section)
|
||||
|
||||
(defn write-export!
|
||||
[{:keys [::include-libraries ::embed-assets] :as cfg}]
|
||||
(when (and include-libraries embed-assets)
|
||||
(throw (IllegalArgumentException.
|
||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||
|
||||
(write-export cfg))
|
||||
|
||||
(defmethod write-export :default
|
||||
[{:keys [::output] :as options}]
|
||||
(write-header! output :v1)
|
||||
(pu/with-open [output (zstd-output-stream output :level 12)
|
||||
output (io/data-output-stream output)]
|
||||
(binding [bfc/*state* (volatile! {})]
|
||||
(run! (fn [section]
|
||||
(l/dbg :hint "write section" :section section ::l/sync? true)
|
||||
(write-label! output section)
|
||||
(let [options (-> options
|
||||
(assoc ::output output)
|
||||
(assoc ::section section))]
|
||||
(binding [bfc/*options* options]
|
||||
(write-section options))))
|
||||
|
||||
[:v1/metadata :v1/files :v1/rels :v1/sobjects]))))
|
||||
|
||||
(defmethod write-section :v1/metadata
|
||||
[{:keys [::output ::ids ::include-libraries] :as cfg}]
|
||||
(if-let [fids (get-files cfg ids)]
|
||||
(let [lids (when include-libraries
|
||||
(bfc/get-libraries cfg ids))
|
||||
ids (into fids lids)]
|
||||
(write-obj! output {:version cf/version :files ids})
|
||||
(vswap! bfc/*state* assoc :files ids))
|
||||
(ex/raise :type :not-found
|
||||
:code :files-not-found
|
||||
:hint "unable to retrieve files for export")))
|
||||
|
||||
(defmethod write-section :v1/files
|
||||
[{:keys [::output ::embed-assets ::include-libraries] :as cfg}]
|
||||
|
||||
;; Initialize SIDS with empty vector
|
||||
(vswap! bfc/*state* assoc :sids [])
|
||||
|
||||
(doseq [file-id (-> bfc/*state* deref :files)]
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))
|
||||
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
|
||||
(mapv #(dissoc % :file-id)))
|
||||
|
||||
file (cond-> (bfc/get-file cfg file-id)
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
|
||||
embed-assets
|
||||
(update :data #(bfc/embed-assets cfg % file-id))
|
||||
|
||||
:always
|
||||
(assoc :thumbnails thumbnails))
|
||||
|
||||
media (bfc/get-file-media cfg file)]
|
||||
|
||||
(l/dbg :hint "write penpot file"
|
||||
:id (str file-id)
|
||||
:name (:name file)
|
||||
:thumbnails (count thumbnails)
|
||||
:features (:features file)
|
||||
:media (count media)
|
||||
::l/sync? true)
|
||||
|
||||
(doseq [item media]
|
||||
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true))
|
||||
|
||||
(doseq [item thumbnails]
|
||||
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true))
|
||||
|
||||
(doto output
|
||||
(write-obj! file)
|
||||
(write-obj! media))
|
||||
|
||||
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id media)
|
||||
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id thumbnails))))
|
||||
|
||||
(defmethod write-section :v1/rels
|
||||
[{:keys [::output ::include-libraries] :as cfg}]
|
||||
(let [ids (-> bfc/*state* deref :files set)
|
||||
rels (when include-libraries
|
||||
(bfc/get-files-rels cfg ids))]
|
||||
(l/dbg :hint "found rels" :total (count rels) ::l/sync? true)
|
||||
(write-obj! output rels)))
|
||||
|
||||
(defmethod write-section :v1/sobjects
|
||||
[{:keys [::sto/storage ::output]}]
|
||||
(let [sids (-> bfc/*state* deref :sids)
|
||||
storage (media/configure-assets-storage storage)]
|
||||
|
||||
(l/dbg :hint "found sobjects"
|
||||
:items (count sids)
|
||||
::l/sync? true)
|
||||
|
||||
;; Write all collected storage objects
|
||||
(write-obj! output sids)
|
||||
|
||||
(doseq [id sids]
|
||||
(let [{:keys [size] :as obj} (sto/get-object storage id)]
|
||||
(l/dbg :hint "write sobject" :id (str id) ::l/sync? true)
|
||||
|
||||
(doto output
|
||||
(write-uuid! id)
|
||||
(write-obj! (meta obj)))
|
||||
|
||||
(pu/with-open [stream (sto/get-object-data storage obj)]
|
||||
(let [written (write-stream! output stream size)]
|
||||
(when (not= written size)
|
||||
(ex/raise :type :validation
|
||||
:code :mismatch-readed-size
|
||||
:hint (str/ffmt "found unexpected object size; size=% written=%" size written)))))))))
|
||||
|
||||
;; --- EXPORT READER
|
||||
|
||||
(defmulti read-import ::version)
|
||||
(defmulti read-section ::section)
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::input io/input-stream?)
|
||||
(s/def ::overwrite? (s/nilable ::us/boolean))
|
||||
(s/def ::ignore-index-errors? (s/nilable ::us/boolean))
|
||||
|
||||
;; FIXME: replace with schema
|
||||
(s/def ::read-import-options
|
||||
(s/keys :req [::db/pool ::sto/storage ::project-id ::profile-id ::input]
|
||||
:opt [::overwrite? ::ignore-index-errors?]))
|
||||
|
||||
(defn read-import!
|
||||
"Do the importation of the specified resource in penpot custom binary
|
||||
format. There are some options for customize the importation
|
||||
behavior:
|
||||
|
||||
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
|
||||
it reuses all ids and updates existing objects; defaults to `false`."
|
||||
[{:keys [::input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
|
||||
|
||||
(dm/assert!
|
||||
"expected input stream"
|
||||
(io/input-stream? input))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid instant"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [version (read-header! input)]
|
||||
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
|
||||
|
||||
(defn- read-import-v1
|
||||
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(pu/with-open [input (zstd-input-stream input)
|
||||
input (io/data-input-stream input)]
|
||||
(binding [bfc/*state* (volatile! {:media [] :index {}})]
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id project-id)
|
||||
|
||||
features (cfeat/get-team-enabled-features cf/flags team)]
|
||||
|
||||
;; Process all sections
|
||||
(run! (fn [section]
|
||||
(l/dbg :hint "reading section" :section section ::l/sync? true)
|
||||
(assert-read-label! input section)
|
||||
(let [options (-> cfg
|
||||
(assoc ::bfc/features features)
|
||||
(assoc ::section section)
|
||||
(assoc ::input input))]
|
||||
(binding [bfc/*options* options]
|
||||
(events/tap :progress {:op :import :section section})
|
||||
(read-section options))))
|
||||
[:v1/metadata :v1/files :v1/rels :v1/sobjects])
|
||||
|
||||
(bfc/apply-pending-migrations! cfg)
|
||||
|
||||
;; Knowing that the ids of the created files are in index,
|
||||
;; just lookup them and return it as a set
|
||||
(let [files (-> bfc/*state* deref :files)]
|
||||
(into #{} (keep #(get-in @bfc/*state* [:index %])) files))))))
|
||||
|
||||
(defmethod read-import :v1
|
||||
[options]
|
||||
(db/tx-run! options read-import-v1))
|
||||
|
||||
(defmethod read-section :v1/metadata
|
||||
[{:keys [::input]}]
|
||||
(let [{:keys [version files]} (read-obj! input)]
|
||||
(l/dbg :hint "metadata readed"
|
||||
:version (:full version)
|
||||
:files (mapv str files)
|
||||
::l/sync? true)
|
||||
(vswap! bfc/*state* update :index bfc/update-index files)
|
||||
(vswap! bfc/*state* assoc :version version :files files)))
|
||||
|
||||
(defn- remap-thumbnails
|
||||
[thumbnails file-id]
|
||||
(mapv (fn [thumbnail]
|
||||
(-> thumbnail
|
||||
(assoc :file-id file-id)
|
||||
(update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/")))))
|
||||
thumbnails))
|
||||
|
||||
(defn- clean-features
|
||||
[file]
|
||||
(update file :features (fn [features]
|
||||
(if (set? features)
|
||||
(-> features
|
||||
(cfeat/migrate-legacy-features)
|
||||
(set/difference cfeat/backend-only-features))
|
||||
#{}))))
|
||||
|
||||
(defmethod read-section :v1/files
|
||||
[{:keys [::db/conn ::input ::project-id ::bfc/overwrite ::name] :as system}]
|
||||
|
||||
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
|
||||
(let [file (read-obj! input)
|
||||
media (read-obj! input)
|
||||
|
||||
file-id (:id file)
|
||||
file-id' (bfc/lookup-index file-id)
|
||||
|
||||
file (clean-features file)
|
||||
thumbnails (:thumbnails file)]
|
||||
|
||||
(when (not= file-id expected-file-id)
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:found-id file-id
|
||||
:expected-id expected-file-id
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (file-id)"))
|
||||
|
||||
(l/dbg :hint "processing file"
|
||||
:id (str file-id)
|
||||
:features (:features file)
|
||||
:version (-> file :data :version)
|
||||
:media (count media)
|
||||
:thumbnails (count thumbnails)
|
||||
::l/sync? true)
|
||||
|
||||
(when (seq thumbnails)
|
||||
(let [thumbnails (remap-thumbnails thumbnails file-id')]
|
||||
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true)
|
||||
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
|
||||
|
||||
(when (seq media)
|
||||
;; Update index with media
|
||||
(l/dbg :hint "update index with media" :total (count media) ::l/sync? true)
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
|
||||
|
||||
;; Store file media for later insertion
|
||||
(l/dbg :hint "update media references" ::l/sync? true)
|
||||
(vswap! bfc/*state* update :media into (map #(update % :id bfc/lookup-index)) media))
|
||||
|
||||
(let [file (-> file
|
||||
(assoc :id file-id')
|
||||
(cond-> (and (= idx 0) (some? name))
|
||||
(assoc :name name))
|
||||
(assoc :project-id project-id)
|
||||
(dissoc :thumbnails)
|
||||
(bfc/process-file))]
|
||||
|
||||
;; All features that are enabled and requires explicit migration are
|
||||
;; added to the state for a posterior migration step.
|
||||
(doseq [feature (-> (::bfc/features system)
|
||||
(set/difference cfeat/no-migration-features)
|
||||
(set/difference (:features file)))]
|
||||
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
|
||||
|
||||
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
|
||||
(bfc/persist-file! system file)
|
||||
|
||||
(when overwrite
|
||||
(db/delete! conn :file-thumbnail {:file-id file-id'}))
|
||||
|
||||
file-id'))))
|
||||
|
||||
(defmethod read-section :v1/rels
|
||||
[{:keys [::db/conn ::input ::bfc/timestamp]}]
|
||||
(let [rels (read-obj! input)
|
||||
ids (into #{} (-> bfc/*state* deref :files))]
|
||||
;; Insert all file relations
|
||||
(doseq [{:keys [library-file-id] :as rel} rels]
|
||||
(let [rel (-> rel
|
||||
(assoc :synced-at timestamp)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :library-file-id bfc/lookup-index))]
|
||||
|
||||
(if (contains? ids library-file-id)
|
||||
(do
|
||||
(l/dbg :hint "create file library link"
|
||||
:file-id (:file-id rel)
|
||||
:lib-id (:library-file-id rel)
|
||||
::l/sync? true)
|
||||
(db/insert! conn :file-library-rel rel))
|
||||
|
||||
(l/warn :hint "ignoring file library link"
|
||||
:file-id (:file-id rel)
|
||||
:lib-id (:library-file-id rel)
|
||||
::l/sync? true))))))
|
||||
|
||||
(defmethod read-section :v1/sobjects
|
||||
[{:keys [::sto/storage ::db/conn ::input ::bfc/overwrite ::bfc/timestamp]}]
|
||||
(let [storage (media/configure-assets-storage storage)
|
||||
ids (read-obj! input)
|
||||
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
|
||||
|
||||
(doseq [expected-storage-id ids]
|
||||
(let [id (read-uuid! input)
|
||||
mdata (read-obj! input)]
|
||||
|
||||
(when (not= id expected-storage-id)
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"))
|
||||
|
||||
(l/dbg :hint "readed storage object" :id (str id) ::l/sync? true)
|
||||
|
||||
(let [[size resource] (read-stream! input)
|
||||
hash (sto/calculate-hash resource)
|
||||
content (-> (sto/content resource size)
|
||||
(sto/wrap-with-hash hash))
|
||||
|
||||
params (-> mdata
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
|
||||
params (if (thumb? id)
|
||||
(assoc params :bucket "file-object-thumbnail")
|
||||
(assoc params :bucket "file-media-object"))
|
||||
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(l/dbg :hint "persisted storage object"
|
||||
:old-id (str id)
|
||||
:new-id (str (:id sobject))
|
||||
:is-thumbnail (boolean (thumb? id))
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject)))))
|
||||
|
||||
(doseq [item (:media @bfc/*state*)]
|
||||
(l/dbg :hint "inserting file media object"
|
||||
:id (str (:id item))
|
||||
:file-id (str (:file-id item))
|
||||
::l/sync? true)
|
||||
|
||||
(let [file-id (bfc/lookup-index (:file-id item))]
|
||||
(if (= file-id (:file-id item))
|
||||
(l/warn :hint "ignoring file media object" :file-id (str file-id) ::l/sync? true)
|
||||
(db/insert! conn :file-media-object
|
||||
(-> item
|
||||
(assoc :file-id file-id)
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))
|
||||
{::db/on-conflict-do-nothing? overwrite}))))
|
||||
|
||||
(doseq [item (:thumbnails @bfc/*state*)]
|
||||
(let [item (update item :media-id bfc/lookup-index)]
|
||||
(l/dbg :hint "inserting file object thumbnail"
|
||||
:file-id (str (:file-id item))
|
||||
:media-id (str (:media-id item))
|
||||
:object-id (:object-id item)
|
||||
::l/sync? true)
|
||||
(db/insert! conn :file-tagged-object-thumbnail item
|
||||
{::db/on-conflict-do-nothing? overwrite})))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HIGH LEVEL API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn export-files!
|
||||
"Do the exportation of a specified file in custom penpot binary
|
||||
format. There are some options available for customize the output:
|
||||
|
||||
`::include-libraries`: additionally to the specified file, all the
|
||||
linked libraries also will be included (including transitive
|
||||
dependencies).
|
||||
|
||||
`::embed-assets`: instead of including the libraries, embed in the
|
||||
same file library all assets used from external libraries."
|
||||
|
||||
[{:keys [::ids] :as cfg} output]
|
||||
|
||||
(dm/assert!
|
||||
"expected a set of uuid's for `::ids` parameter"
|
||||
(and (set? ids)
|
||||
(every? uuid? ids)))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id (str id))
|
||||
(pu/with-open [output (io/output-stream output)]
|
||||
(binding [*position* (atom 0)]
|
||||
(write-export! (assoc cfg ::output output))))
|
||||
|
||||
(catch java.io.IOException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
(vreset! ab true)
|
||||
nil)
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(vreset! ab true)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "exportation finished" :export-id (str id)
|
||||
:elapsed (str (inst-ms (tp)) "ms")
|
||||
:aborted @ab
|
||||
:cause @cs)))))
|
||||
|
||||
(defn import-files!
|
||||
[cfg input]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid profile-id and project-id on `cfg`"
|
||||
(and (uuid? (::profile-id cfg))
|
||||
(uuid? (::project-id cfg))))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory input))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
(try
|
||||
(binding [*position* (atom 0)]
|
||||
(pu/with-open [input (io/input-stream input)]
|
||||
(read-import! (assoc cfg ::input input))))
|
||||
|
||||
(catch ZstdIOException cause
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-penpot-file
|
||||
:hint "invalid penpot file received: probably truncated"
|
||||
:cause cause))
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
||||
|
||||
@@ -1,442 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.v2
|
||||
"A sqlite3 based binary file exportation with support for exportation
|
||||
of entire team (or multiple teams) at once."
|
||||
(:refer-clojure :exclude [read])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.util :as pu])
|
||||
(:import
|
||||
java.sql.DriverManager))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; LOW LEVEL API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- create-database
|
||||
([cfg]
|
||||
(let [path (tmp/tempfile :prefix "penpot.binfile." :suffix ".sqlite")]
|
||||
(create-database cfg path)))
|
||||
([cfg path]
|
||||
(let [db (DriverManager/getConnection (str "jdbc:sqlite:" path))]
|
||||
(assoc cfg ::db db ::path path))))
|
||||
|
||||
(def ^:private
|
||||
sql:create-kvdata-table
|
||||
"CREATE TABLE kvdata (
|
||||
tag text NOT NULL,
|
||||
key text NOT NULL,
|
||||
val text NOT NULL,
|
||||
dat blob NULL
|
||||
)")
|
||||
|
||||
(def ^:private
|
||||
sql:create-kvdata-index
|
||||
"CREATE INDEX kvdata__tag_key__idx
|
||||
ON kvdata (tag, key)")
|
||||
|
||||
(defn- setup-schema!
|
||||
[{:keys [::db]}]
|
||||
(db/exec-one! db [sql:create-kvdata-table])
|
||||
(db/exec-one! db [sql:create-kvdata-index]))
|
||||
|
||||
(defn- write!
|
||||
[{:keys [::db]} tag k v & [data]]
|
||||
(db/insert! db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)
|
||||
:val (t/encode-str v {:type :json-verbose})
|
||||
:dat data}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(defn- read-blob
|
||||
[{:keys [::db]} tag k]
|
||||
(let [obj (db/get db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)}
|
||||
{::sql/columns [:dat]})]
|
||||
(:dat obj)))
|
||||
|
||||
(defn- read-seq
|
||||
([{:keys [::db]} tag]
|
||||
(->> (db/query db :kvdata
|
||||
{:tag (d/name tag)}
|
||||
{::sql/columns [::val]})
|
||||
(map :val)
|
||||
(map t/decode-str)))
|
||||
([{:keys [::db]} tag k]
|
||||
(->> (db/query db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)}
|
||||
{::sql/columns [::val]})
|
||||
(map :val)
|
||||
(map t/decode-str))))
|
||||
|
||||
(defn- read-obj
|
||||
[{:keys [::db]} tag k]
|
||||
(let [obj (db/get db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)}
|
||||
{::sql/columns [:val]})]
|
||||
(-> obj :val t/decode-str)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; IMPORT/EXPORT IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare ^:private write-project!)
|
||||
(declare ^:private write-file!)
|
||||
|
||||
(defn- write-team!
|
||||
[cfg team-id]
|
||||
|
||||
(let [team (bfc/get-team cfg team-id)
|
||||
fonts (bfc/get-fonts cfg team-id)]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-team
|
||||
:id team-id
|
||||
:name (:name team)})
|
||||
|
||||
(l/trc :hint "write" :obj "team"
|
||||
:id (str team-id)
|
||||
:fonts (count fonts))
|
||||
|
||||
(when-let [photo-id (:photo-id team)]
|
||||
(vswap! bfc/*state* update :storage-objects conj photo-id))
|
||||
|
||||
(vswap! bfc/*state* update :teams conj team-id)
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects fonts)
|
||||
|
||||
(write! cfg :team team-id team)
|
||||
|
||||
(doseq [{:keys [id] :as font} fonts]
|
||||
(vswap! bfc/*state* update :team-font-variants conj id)
|
||||
(write! cfg :team-font-variant id font))))
|
||||
|
||||
(defn- write-project!
|
||||
[cfg project-id]
|
||||
(let [project (bfc/get-project cfg project-id)]
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-project
|
||||
:id project-id
|
||||
:name (:name project)})
|
||||
(l/trc :hint "write" :obj "project" :id (str project-id))
|
||||
(write! cfg :project (str project-id) project)
|
||||
(vswap! bfc/*state* update :projects conj project-id)))
|
||||
|
||||
(defn- write-file!
|
||||
[cfg file-id]
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
thumbs (bfc/get-file-object-thumbnails cfg file-id)
|
||||
media (bfc/get-file-media cfg file)
|
||||
rels (bfc/get-files-rels cfg #{file-id})]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-file
|
||||
:id file-id
|
||||
:name (:name file)})
|
||||
|
||||
(vswap! bfc/*state* (fn [state]
|
||||
(-> state
|
||||
(update :files conj file-id)
|
||||
(update :file-media-objects into bfc/xf-map-id media)
|
||||
(bfc/collect-storage-objects thumbs)
|
||||
(bfc/collect-storage-objects media))))
|
||||
|
||||
(write! cfg :file file-id file)
|
||||
(write! cfg :file-rels file-id rels)
|
||||
|
||||
(run! (partial write! cfg :file-media-object file-id) media)
|
||||
(run! (partial write! cfg :file-object-thumbnail file-id) thumbs)
|
||||
|
||||
(when-let [thumb (bfc/get-file-thumbnail cfg file)]
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects [thumb])
|
||||
(write! cfg :file-thumbnail file-id thumb))
|
||||
|
||||
(l/trc :hint "write" :obj "file"
|
||||
:thumbnails (count thumbs)
|
||||
:rels (count rels)
|
||||
:media (count media))))
|
||||
|
||||
(defn- write-storage-object!
|
||||
[{:keys [::sto/storage] :as cfg} id]
|
||||
(let [sobj (sto/get-object storage id)
|
||||
data (with-open [input (sto/get-object-data storage sobj)]
|
||||
(io/read-as-bytes input))]
|
||||
|
||||
(l/trc :hint "write" :obj "storage-object" :id (str id) :size (:size sobj))
|
||||
(write! cfg :storage-object id (meta sobj) data)))
|
||||
|
||||
(defn- read-storage-object!
|
||||
[{:keys [::sto/storage ::bfc/timestamp] :as cfg} id]
|
||||
(let [mdata (read-obj cfg :storage-object id)
|
||||
data (read-blob cfg :storage-object id)
|
||||
hash (sto/calculate-hash data)
|
||||
|
||||
content (-> (sto/content data)
|
||||
(sto/wrap-with-hash hash))
|
||||
|
||||
params (-> mdata
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject))
|
||||
|
||||
(l/trc :hint "read" :obj "storage-object"
|
||||
:id (str id)
|
||||
:new-id (str (:id sobject))
|
||||
:size (:size sobject))))
|
||||
|
||||
(defn read-team!
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} team-id]
|
||||
(l/trc :hint "read" :obj "team" :id (str team-id))
|
||||
|
||||
(let [team (read-obj cfg :team team-id)
|
||||
team (-> team
|
||||
(update :id bfc/lookup-index)
|
||||
(update :photo-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :import
|
||||
:section :read-team
|
||||
:id team-id
|
||||
:name (:name team)})
|
||||
|
||||
(db/insert! conn :team
|
||||
(update team :features db/encode-pgarray conn "text")
|
||||
::db/return-keys false)
|
||||
|
||||
(doseq [font (->> (read-seq cfg :team-font-variant)
|
||||
(filter #(= team-id (:team-id %))))]
|
||||
(let [font (-> font
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index)
|
||||
(update :woff1-file-id bfc/lookup-index)
|
||||
(update :woff2-file-id bfc/lookup-index)
|
||||
(update :ttf-file-id bfc/lookup-index)
|
||||
(update :otf-file-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
(db/insert! conn :team-font-variant font
|
||||
::db/return-keys false)))
|
||||
|
||||
team))
|
||||
|
||||
(defn read-project!
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} project-id]
|
||||
(l/trc :hint "read" :obj "project" :id (str project-id))
|
||||
|
||||
(let [project (read-obj cfg :project project-id)
|
||||
project (-> project
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :import
|
||||
:section :read-project
|
||||
:id project-id
|
||||
:name (:name project)})
|
||||
|
||||
(db/insert! conn :project project
|
||||
::db/return-keys false)))
|
||||
|
||||
(defn read-file!
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} file-id]
|
||||
(l/trc :hint "read" :obj "file" :id (str file-id))
|
||||
|
||||
(let [file (-> (read-obj cfg :file file-id)
|
||||
(update :id bfc/lookup-index)
|
||||
(update :project-id bfc/lookup-index)
|
||||
(bfc/process-file))]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :import
|
||||
:section :read-file
|
||||
:id file-id
|
||||
:name (:name file)})
|
||||
|
||||
;; All features that are enabled and requires explicit migration are
|
||||
;; added to the state for a posterior migration step.
|
||||
(doseq [feature (-> (::bfc/features cfg)
|
||||
(set/difference cfeat/no-migration-features)
|
||||
(set/difference (:features file)))]
|
||||
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
|
||||
|
||||
(bfc/persist-file! cfg file))
|
||||
|
||||
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
|
||||
(let [thumbnail (-> thumbnail
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :media-id bfc/lookup-index))
|
||||
file-id (:file-id thumbnail)
|
||||
|
||||
thumbnail (update thumbnail :object-id
|
||||
#(str/replace-first % #"^(.*?)/" (str file-id "/")))]
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail thumbnail
|
||||
{::db/return-keys false})))
|
||||
|
||||
(doseq [rel (read-obj cfg :file-rels file-id)]
|
||||
(let [rel (-> rel
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :library-file-id bfc/lookup-index)
|
||||
(assoc :synced-at timestamp))]
|
||||
(db/insert! conn :file-library-rel rel
|
||||
::db/return-keys false)))
|
||||
|
||||
(doseq [media (read-seq cfg :file-media-object file-id)]
|
||||
(let [media (-> media
|
||||
(update :id bfc/lookup-index)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :media-id bfc/lookup-index)
|
||||
(update :thumbnail-id bfc/lookup-index))]
|
||||
(db/insert! conn :file-media-object media
|
||||
::db/return-keys false
|
||||
::sql/on-conflict-do-nothing true))))
|
||||
|
||||
(def ^:private empty-summary
|
||||
{:teams #{}
|
||||
:files #{}
|
||||
:projects #{}
|
||||
:file-media-objects #{}
|
||||
:team-font-variants #{}
|
||||
:storage-objects #{}})
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn export-team!
|
||||
[cfg team-id]
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
|
||||
cfg (-> (create-database cfg)
|
||||
(update ::sto/storage media/configure-assets-storage))]
|
||||
|
||||
(l/inf :hint "start"
|
||||
:operation "export"
|
||||
:id (str id)
|
||||
:path (str (::path cfg)))
|
||||
|
||||
(try
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(setup-schema! cfg)
|
||||
(binding [bfc/*state* (volatile! empty-summary)]
|
||||
(write-team! cfg team-id)
|
||||
|
||||
(run! (partial write-project! cfg)
|
||||
(bfc/get-team-projects cfg team-id))
|
||||
|
||||
(run! (partial write-file! cfg)
|
||||
(bfc/get-team-files cfg team-id))
|
||||
|
||||
(run! (partial write-storage-object! cfg)
|
||||
(-> bfc/*state* deref :storage-objects))
|
||||
|
||||
(write! cfg :manifest "team-id" team-id)
|
||||
(write! cfg :manifest "objects" (deref bfc/*state*))
|
||||
|
||||
(::path cfg))))
|
||||
(finally
|
||||
(pu/close! (::db cfg))
|
||||
|
||||
(let [elapsed (tp)]
|
||||
(l/inf :hint "end"
|
||||
:operation "export"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
|
||||
(defn import-team!
|
||||
[cfg path]
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
|
||||
cfg (-> (create-database cfg path)
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::bfc/timestamp (dt/now)))]
|
||||
|
||||
(l/inf :hint "start"
|
||||
:operation "import"
|
||||
:id (str id)
|
||||
:path (str (::path cfg)))
|
||||
|
||||
(try
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(binding [bfc/*state* (volatile! {:index {}})]
|
||||
(let [objects (read-obj cfg :manifest "objects")]
|
||||
|
||||
;; We first process all storage objects, they have
|
||||
;; deduplication so we can't rely on simple reindex. This
|
||||
;; operation populates the index for all storage objects.
|
||||
(run! (partial read-storage-object! cfg) (:storage-objects objects))
|
||||
|
||||
;; Populate index with all the incoming objects
|
||||
(vswap! bfc/*state* update :index
|
||||
(fn [index]
|
||||
(-> index
|
||||
(bfc/update-index (:teams objects))
|
||||
(bfc/update-index (:projects objects))
|
||||
(bfc/update-index (:files objects))
|
||||
(bfc/update-index (:file-media-objects objects))
|
||||
(bfc/update-index (:team-font-variants objects)))))
|
||||
|
||||
(let [team-id (read-obj cfg :manifest "team-id")
|
||||
team (read-team! cfg team-id)
|
||||
features (cfeat/get-team-enabled-features cf/flags team)
|
||||
cfg (assoc cfg ::bfc/features features)]
|
||||
|
||||
(run! (partial read-project! cfg) (:projects objects))
|
||||
(run! (partial read-file! cfg) (:files objects))
|
||||
|
||||
;; (run-pending-migrations! cfg)
|
||||
|
||||
team)))))
|
||||
(finally
|
||||
(pu/close! (::db cfg))
|
||||
|
||||
(let [elapsed (tp)]
|
||||
(l/inf :hint "end"
|
||||
:operation "import"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
@@ -79,8 +79,6 @@
|
||||
|
||||
:telemetry-uri "https://telemetry.penpot.app/"
|
||||
|
||||
:media-max-file-size (* 1024 1024 30) ; 30MiB
|
||||
|
||||
:ldap-user-query "(|(uid=:username)(mail=:username))"
|
||||
:ldap-attrs-username "uid"
|
||||
:ldap-attrs-email "mail"
|
||||
@@ -101,8 +99,6 @@
|
||||
(s/def ::audit-log-archive-uri ::us/string)
|
||||
(s/def ::audit-log-http-handler-concurrency ::us/integer)
|
||||
|
||||
(s/def ::deletion-delay ::dt/duration)
|
||||
|
||||
(s/def ::admins ::us/set-of-valid-emails)
|
||||
(s/def ::file-change-snapshot-every ::us/integer)
|
||||
(s/def ::file-change-snapshot-timeout ::dt/duration)
|
||||
@@ -207,7 +203,6 @@
|
||||
(s/def ::storage-assets-s3-bucket ::us/string)
|
||||
(s/def ::storage-assets-s3-region ::us/keyword)
|
||||
(s/def ::storage-assets-s3-endpoint ::us/string)
|
||||
(s/def ::storage-assets-s3-io-threads ::us/integer)
|
||||
(s/def ::telemetry-uri ::us/string)
|
||||
(s/def ::telemetry-with-taiga ::us/boolean)
|
||||
(s/def ::tenant ::us/string)
|
||||
@@ -216,7 +211,6 @@
|
||||
(s/keys :opt-un [::secret-key
|
||||
::flags
|
||||
::admins
|
||||
::deletion-delay
|
||||
::allow-demo-users
|
||||
::audit-log-archive-uri
|
||||
::audit-log-http-handler-concurrency
|
||||
@@ -300,7 +294,6 @@
|
||||
::redis-uri
|
||||
::registration-domain-whitelist
|
||||
::rpc-rlimit-config
|
||||
::rpc-climit-config
|
||||
|
||||
::semaphore-process-font
|
||||
::semaphore-process-image
|
||||
@@ -326,7 +319,6 @@
|
||||
::storage-assets-s3-bucket
|
||||
::storage-assets-s3-region
|
||||
::storage-assets-s3-endpoint
|
||||
::storage-assets-s3-io-threads
|
||||
::telemetry-enabled
|
||||
::telemetry-uri
|
||||
::telemetry-referer
|
||||
@@ -338,8 +330,7 @@
|
||||
:enable-backend-openapi-doc
|
||||
:enable-backend-worker
|
||||
:enable-secure-session-cookies
|
||||
:enable-email-verification
|
||||
:enable-v2-migration])
|
||||
:enable-email-verification])
|
||||
|
||||
(defn- parse-flags
|
||||
[config]
|
||||
@@ -384,8 +375,7 @@
|
||||
(defonce ^:dynamic flags (parse-flags config))
|
||||
|
||||
(def deletion-delay
|
||||
(or (c/get config :deletion-delay)
|
||||
(dt/duration {:days 7})))
|
||||
(dt/duration {:days 7}))
|
||||
|
||||
(defn get
|
||||
"A configuration getter. Helps code be more testable."
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[next.jdbc :as jdbc]
|
||||
@@ -98,7 +97,7 @@
|
||||
:with-credentials (and (contains? cfg ::username)
|
||||
(contains? cfg ::password))
|
||||
:min-size (::min-size cfg)
|
||||
:max-size (::max-size cfg))
|
||||
:max-size (::max-size cfg))
|
||||
(create-pool cfg)))
|
||||
|
||||
(defmethod ig/halt-key! ::pool
|
||||
@@ -232,155 +231,62 @@
|
||||
`(jdbc/with-transaction ~@args)))
|
||||
|
||||
(defn open
|
||||
[system-or-pool]
|
||||
(if (pool? system-or-pool)
|
||||
(jdbc/get-connection system-or-pool)
|
||||
(if (map? system-or-pool)
|
||||
(open (::pool system-or-pool))
|
||||
(throw (IllegalArgumentException. "unable to resolve connection pool")))))
|
||||
[pool]
|
||||
(jdbc/get-connection pool))
|
||||
|
||||
(defn get-update-count
|
||||
[result]
|
||||
(:next.jdbc/update-count result))
|
||||
|
||||
(defn get-connection
|
||||
[cfg-or-conn]
|
||||
(if (connection? cfg-or-conn)
|
||||
cfg-or-conn
|
||||
(if (map? cfg-or-conn)
|
||||
(get-connection (::conn cfg-or-conn))
|
||||
(throw (IllegalArgumentException. "unable to resolve connection")))))
|
||||
|
||||
(defn connection-map?
|
||||
"Check if the provided value is a map like data structure that
|
||||
contains a database connection."
|
||||
(defn- resolve-connectable
|
||||
[o]
|
||||
(and (map? o) (connection? (::conn o))))
|
||||
(if (connection? o)
|
||||
o
|
||||
(if (pool? o)
|
||||
o
|
||||
(or (::conn o) (::pool o)))))
|
||||
|
||||
(defn get-connectable
|
||||
"Resolve to a connection or connection pool instance; if it is not
|
||||
possible, raises an exception"
|
||||
[o]
|
||||
(cond
|
||||
(connection? o) o
|
||||
(pool? o) o
|
||||
(map? o) (get-connectable (or (::conn o) (::pool o)))
|
||||
:else (throw (IllegalArgumentException. "unable to resolve connectable"))))
|
||||
|
||||
(def ^:private params-mapping
|
||||
{::return-keys? :return-keys
|
||||
::return-keys :return-keys})
|
||||
|
||||
(defn rename-opts
|
||||
[opts]
|
||||
(set/rename-keys opts params-mapping))
|
||||
|
||||
(def ^:private default-insert-opts
|
||||
{:builder-fn sql/as-kebab-maps
|
||||
:return-keys true})
|
||||
|
||||
(def ^:private default-opts
|
||||
{:builder-fn sql/as-kebab-maps})
|
||||
|
||||
(defn exec!
|
||||
([ds sv] (exec! ds sv nil))
|
||||
([ds sv]
|
||||
(-> (resolve-connectable ds)
|
||||
(jdbc/execute! sv default-opts)))
|
||||
([ds sv opts]
|
||||
(let [conn (get-connectable ds)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))]
|
||||
(jdbc/execute! conn sv opts))))
|
||||
(-> (resolve-connectable ds)
|
||||
(jdbc/execute! sv (merge default-opts opts)))))
|
||||
|
||||
(defn exec-one!
|
||||
([ds sv] (exec-one! ds sv nil))
|
||||
([ds sv]
|
||||
(-> (resolve-connectable ds)
|
||||
(jdbc/execute-one! sv default-opts)))
|
||||
([ds sv opts]
|
||||
(let [conn (get-connectable ds)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))]
|
||||
(jdbc/execute-one! conn sv opts))))
|
||||
(-> (resolve-connectable ds)
|
||||
(jdbc/execute-one! sv
|
||||
(-> (merge default-opts opts)
|
||||
(assoc :return-keys (::return-keys? opts false)))))))
|
||||
|
||||
(defn insert!
|
||||
"A helper that builds an insert sql statement and executes it. By
|
||||
default returns the inserted row with all the field; you can delimit
|
||||
the returned columns with the `::columns` option."
|
||||
[ds table params & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/insert table params opts)
|
||||
opts (if (empty? opts)
|
||||
default-insert-opts
|
||||
(into default-insert-opts (rename-opts opts)))]
|
||||
(jdbc/execute-one! conn sql opts)))
|
||||
(-> (resolve-connectable ds)
|
||||
(exec-one! (sql/insert table params opts)
|
||||
(merge {::return-keys? true} opts))))
|
||||
|
||||
(defn insert-many!
|
||||
"An optimized version of `insert!` that perform insertion of multiple
|
||||
values at once.
|
||||
|
||||
This expands to a single SQL statement with placeholders for every
|
||||
value being inserted. For large data sets, this may exceed the limit
|
||||
of sql string size and/or number of parameters."
|
||||
(defn insert-multi!
|
||||
[ds table cols rows & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/insert-many table cols rows opts)
|
||||
opts (if (empty? opts)
|
||||
default-insert-opts
|
||||
(into default-insert-opts (rename-opts opts)))
|
||||
opts (update opts :return-keys boolean)]
|
||||
(jdbc/execute! conn sql opts)))
|
||||
(-> (resolve-connectable ds)
|
||||
(exec! (sql/insert-multi table cols rows opts)
|
||||
(merge {::return-keys? true} opts))))
|
||||
|
||||
(defn update!
|
||||
"A helper that build an UPDATE SQL statement and executes it.
|
||||
|
||||
Given a connectable object, a table name, a hash map of columns and
|
||||
values to set, and either a hash map of columns and values to search
|
||||
on or a vector of a SQL where clause and parameters, perform an
|
||||
update on the table.
|
||||
|
||||
By default returns an object with the number of affected rows; a
|
||||
complete row can be returned if you pass `::return-keys` with `true`
|
||||
or with a vector of columns.
|
||||
|
||||
Also it can be combined with the `::many` option if you perform an
|
||||
update to multiple rows and you want all the affected rows to be
|
||||
returned."
|
||||
[ds table params where & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/update table params where opts)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))
|
||||
opts (update opts :return-keys boolean)]
|
||||
(if (::many opts)
|
||||
(jdbc/execute! conn sql opts)
|
||||
(jdbc/execute-one! conn sql opts))))
|
||||
(-> (resolve-connectable ds)
|
||||
(exec-one! (sql/update table params where opts)
|
||||
(merge {::return-keys? true} opts))))
|
||||
|
||||
(defn delete!
|
||||
"A helper that builds an DELETE SQL statement and executes it.
|
||||
|
||||
Given a connectable object, a table name, and either a hash map of columns
|
||||
and values to search on or a vector of a SQL where clause and parameters,
|
||||
perform a delete on the table.
|
||||
|
||||
By default returns an object with the number of affected rows; a
|
||||
complete row can be returned if you pass `::return-keys` with `true`
|
||||
or with a vector of columns.
|
||||
|
||||
Also it can be combined with the `::many` option if you perform an
|
||||
update to multiple rows and you want all the affected rows to be
|
||||
returned."
|
||||
[ds table params & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/delete table params opts)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))]
|
||||
(if (::many opts)
|
||||
(jdbc/execute! conn sql opts)
|
||||
(jdbc/execute-one! conn sql opts))))
|
||||
|
||||
(defn query
|
||||
[ds table params & {:as opts}]
|
||||
(exec! ds (sql/select table params opts) opts))
|
||||
(-> (resolve-connectable ds)
|
||||
(exec-one! (sql/delete table params opts)
|
||||
(merge {::return-keys? true} opts))))
|
||||
|
||||
(defn is-row-deleted?
|
||||
[{:keys [deleted-at]}]
|
||||
@@ -394,7 +300,7 @@
|
||||
[ds table params & {:as opts}]
|
||||
(let [rows (exec! ds (sql/select table params opts))
|
||||
rows (cond->> rows
|
||||
(::remove-deleted opts true)
|
||||
(::remove-deleted? opts true)
|
||||
(remove is-row-deleted?))]
|
||||
(first rows)))
|
||||
|
||||
@@ -403,7 +309,7 @@
|
||||
filters. Raises :not-found exception if no object is found."
|
||||
[ds table params & {:as opts}]
|
||||
(let [row (get* ds table params opts)]
|
||||
(when (and (not row) (::check-deleted opts true))
|
||||
(when (and (not row) (::check-deleted? opts true))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:table table
|
||||
@@ -412,32 +318,17 @@
|
||||
|
||||
(defn plan
|
||||
[ds sql]
|
||||
(-> (get-connectable ds)
|
||||
(-> (resolve-connectable ds)
|
||||
(jdbc/plan sql sql/default-opts)))
|
||||
|
||||
(defn cursor
|
||||
"Return a lazy seq of rows using server side cursors"
|
||||
[conn query & {:keys [chunk-size] :or {chunk-size 25}}]
|
||||
(let [cname (str (gensym "cursor_"))
|
||||
fquery [(str "FETCH " chunk-size " FROM " cname)]]
|
||||
|
||||
;; declare cursor
|
||||
(exec-one! conn
|
||||
(if (vector? query)
|
||||
(into [(str "DECLARE " cname " CURSOR FOR " (nth query 0))]
|
||||
(rest query))
|
||||
[(str "DECLARE " cname " CURSOR FOR " query)]))
|
||||
|
||||
;; return a lazy seq
|
||||
((fn fetch-more []
|
||||
(lazy-seq
|
||||
(when-let [chunk (seq (exec! conn fquery))]
|
||||
(concat chunk (fetch-more))))))))
|
||||
|
||||
(defn get-by-id
|
||||
[ds table id & {:as opts}]
|
||||
(get ds table {:id id} opts))
|
||||
|
||||
(defn query
|
||||
[ds table params & {:as opts}]
|
||||
(exec! ds (sql/select table params opts)))
|
||||
|
||||
(defn pgobject?
|
||||
([v]
|
||||
(instance? PGobject v))
|
||||
@@ -490,10 +381,6 @@
|
||||
(.createArrayOf conn ^String type (into-array Object objects))
|
||||
(.createArrayOf conn ^String type objects))))
|
||||
|
||||
(defn encode-pgarray
|
||||
[data conn type]
|
||||
(create-array conn type data))
|
||||
|
||||
(defn decode-pgpoint
|
||||
[^PGpoint v]
|
||||
(gpt/point (.-x v) (.-y v)))
|
||||
@@ -509,74 +396,57 @@
|
||||
(.setSavepoint conn (name label))))
|
||||
|
||||
(defn release!
|
||||
[^Connection conn ^Savepoint sp]
|
||||
[^Connection conn ^Savepoint sp ]
|
||||
(.releaseSavepoint conn sp))
|
||||
|
||||
(defn rollback!
|
||||
([conn]
|
||||
(if (and (map? conn) (::savepoint conn))
|
||||
(rollback! conn (::savepoint conn))
|
||||
(let [^Connection conn (get-connection conn)]
|
||||
(l/trc :hint "explicit rollback requested")
|
||||
(.rollback conn))))
|
||||
([conn ^Savepoint sp]
|
||||
(let [^Connection conn (get-connection conn)]
|
||||
(l/trc :hint "explicit rollback requested (savepoint)")
|
||||
(.rollback conn sp))))
|
||||
([^Connection conn]
|
||||
(.rollback conn))
|
||||
([^Connection conn ^Savepoint sp]
|
||||
(.rollback conn sp)))
|
||||
|
||||
(defn tx-run!
|
||||
[system f & params]
|
||||
[cfg f]
|
||||
(cond
|
||||
(connection? system)
|
||||
(tx-run! {::conn system} f)
|
||||
(connection? cfg)
|
||||
(tx-run! {::conn cfg} f)
|
||||
|
||||
(pool? system)
|
||||
(tx-run! {::pool system} f)
|
||||
(pool? cfg)
|
||||
(tx-run! {::pool cfg} f)
|
||||
|
||||
(::conn system)
|
||||
(let [conn (::conn system)
|
||||
(::conn cfg)
|
||||
(let [conn (::conn cfg)
|
||||
sp (savepoint conn)]
|
||||
(try
|
||||
(let [system' (-> system
|
||||
(assoc ::savepoint sp)
|
||||
(dissoc ::rollback))
|
||||
result (apply f system' params)]
|
||||
(if (::rollback system)
|
||||
(rollback! conn sp)
|
||||
(release! conn sp))
|
||||
(let [result (f cfg)]
|
||||
(release! conn sp)
|
||||
result)
|
||||
(catch Throwable cause
|
||||
(.rollback ^Connection conn ^Savepoint sp)
|
||||
(rollback! sp)
|
||||
(throw cause))))
|
||||
|
||||
(::pool system)
|
||||
(with-atomic [conn (::pool system)]
|
||||
(let [system' (-> system
|
||||
(assoc ::conn conn)
|
||||
(dissoc ::rollback))
|
||||
result (apply f system' params)]
|
||||
(when (::rollback system)
|
||||
(rollback! conn))
|
||||
result))
|
||||
(::pool cfg)
|
||||
(with-atomic [conn (::pool cfg)]
|
||||
(f (assoc cfg ::conn conn)))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid system/cfg provided"))))
|
||||
(throw (IllegalArgumentException. "invalid arguments"))))
|
||||
|
||||
(defn run!
|
||||
[system f & params]
|
||||
[cfg f]
|
||||
(cond
|
||||
(connection? system)
|
||||
(run! {::conn system} f)
|
||||
(connection? cfg)
|
||||
(run! {::conn cfg} f)
|
||||
|
||||
(pool? system)
|
||||
(run! {::pool system} f)
|
||||
(pool? cfg)
|
||||
(run! {::pool cfg} f)
|
||||
|
||||
(::conn system)
|
||||
(apply f system params)
|
||||
(::conn cfg)
|
||||
(f cfg)
|
||||
|
||||
(::pool system)
|
||||
(with-open [^Connection conn (open (::pool system))]
|
||||
(apply f (assoc system ::conn conn) params))
|
||||
(::pool cfg)
|
||||
(with-open [^Connection conn (open (::pool cfg))]
|
||||
(f (assoc cfg ::conn conn)))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid arguments"))))
|
||||
@@ -650,6 +520,11 @@
|
||||
(.setType "jsonb")
|
||||
(.setValue (json/encode-str data)))))
|
||||
|
||||
(defn get-update-count
|
||||
[result]
|
||||
(:next.jdbc/update-count result))
|
||||
|
||||
|
||||
;; --- Locks
|
||||
|
||||
(def ^:private siphash-state
|
||||
|
||||
@@ -29,14 +29,11 @@
|
||||
([table key-map opts]
|
||||
(let [opts (merge default-opts opts)
|
||||
opts (cond-> opts
|
||||
(::db/on-conflict-do-nothing? opts)
|
||||
(assoc :suffix "ON CONFLICT DO NOTHING")
|
||||
|
||||
(::on-conflict-do-nothing opts)
|
||||
(:on-conflict-do-nothing opts)
|
||||
(assoc :suffix "ON CONFLICT DO NOTHING"))]
|
||||
(sql/for-insert table key-map opts))))
|
||||
|
||||
(defn insert-many
|
||||
(defn insert-multi
|
||||
[table cols rows opts]
|
||||
(let [opts (merge default-opts opts)]
|
||||
(sql/for-insert-multi table cols rows opts)))
|
||||
@@ -47,30 +44,22 @@
|
||||
([table where-params opts]
|
||||
(let [opts (merge default-opts opts)
|
||||
opts (cond-> opts
|
||||
(::order-by opts) (assoc :order-by (::order-by opts))
|
||||
(::columns opts) (assoc :columns (::columns opts))
|
||||
(::for-update opts) (assoc :suffix "FOR UPDATE")
|
||||
(::for-share opts) (assoc :suffix "FOR SHARE"))]
|
||||
(::db/for-update? opts) (assoc :suffix "FOR UPDATE")
|
||||
(::db/for-share? opts) (assoc :suffix "FOR KEY SHARE")
|
||||
(:for-update opts) (assoc :suffix "FOR UPDATE")
|
||||
(:for-key-share opts) (assoc :suffix "FOR KEY SHARE"))]
|
||||
(sql/for-query table where-params opts))))
|
||||
|
||||
(defn update
|
||||
([table key-map where-params]
|
||||
(update table key-map where-params nil))
|
||||
([table key-map where-params opts]
|
||||
(let [opts (into default-opts opts)
|
||||
keys (::db/return-keys opts)
|
||||
opts (if (vector? keys)
|
||||
(assoc opts :suffix (str "RETURNING " (sql/as-cols keys opts)))
|
||||
opts)]
|
||||
(let [opts (merge default-opts opts)]
|
||||
(sql/for-update table key-map where-params opts))))
|
||||
|
||||
(defn delete
|
||||
([table where-params]
|
||||
(delete table where-params nil))
|
||||
([table where-params opts]
|
||||
(let [opts (merge default-opts opts)
|
||||
keys (::db/return-keys opts)
|
||||
opts (if (vector? keys)
|
||||
(assoc opts :suffix (str "RETURNING " (sql/as-cols keys opts)))
|
||||
opts)]
|
||||
(let [opts (merge default-opts opts)]
|
||||
(sql/for-delete table where-params opts))))
|
||||
|
||||
@@ -311,12 +311,6 @@
|
||||
^String (::password cfg))
|
||||
|
||||
(let [^MimeMessage message (create-smtp-message cfg session params)]
|
||||
(l/dbg :hint "sendmail"
|
||||
:id (:id params)
|
||||
:to (:to params)
|
||||
:subject (str/trim (:subject params))
|
||||
:body (str/join "," (map :type (:body params))))
|
||||
|
||||
(.sendMessage ^Transport transport
|
||||
^MimeMessage message
|
||||
(.getAllRecipients message))))))
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,122 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.features.fdata
|
||||
"A `fdata/*` related feature migration helpers"
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OBJECTS-MAP
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn enable-objects-map
|
||||
[file]
|
||||
(let [update-page
|
||||
(fn [page]
|
||||
(if (and (pmap/pointer-map? page)
|
||||
(not (pmap/loaded? page)))
|
||||
page
|
||||
(update page :objects omap/wrap)))
|
||||
|
||||
update-data
|
||||
(fn [fdata]
|
||||
(update fdata :pages-index d/update-vals update-page))]
|
||||
|
||||
(-> file
|
||||
(update :data update-data)
|
||||
(update :features conj "fdata/objects-map"))))
|
||||
|
||||
(defn process-objects
|
||||
"Apply a function to all objects-map on the file. Usualy used for convert
|
||||
the objects-map instances to plain maps"
|
||||
[fdata update-fn]
|
||||
(if (contains? fdata :pages-index)
|
||||
(update fdata :pages-index d/update-vals
|
||||
(fn [page]
|
||||
(update page :objects
|
||||
(fn [objects]
|
||||
(if (omap/objects-map? objects)
|
||||
(update-fn objects)
|
||||
objects)))))
|
||||
fdata))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; POINTER-MAP
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn load-pointer
|
||||
"A database loader pointer helper"
|
||||
[system file-id id]
|
||||
(let [{:keys [content]} (db/get system :file-data-fragment
|
||||
{:id id :file-id file-id}
|
||||
{::sql/columns [:content]
|
||||
::db/check-deleted false})]
|
||||
|
||||
(l/trc :hint "load pointer"
|
||||
:file-id (str file-id)
|
||||
:id (str id)
|
||||
:found (some? content))
|
||||
|
||||
(when-not content
|
||||
(ex/raise :type :internal
|
||||
:code :fragment-not-found
|
||||
:hint "fragment not found"
|
||||
:file-id file-id
|
||||
:fragment-id id))
|
||||
|
||||
(blob/decode content)))
|
||||
|
||||
(defn persist-pointers!
|
||||
"Given a database connection and the final file-id, persist all
|
||||
pointers to the underlying storage (the database)."
|
||||
[system file-id]
|
||||
(let [conn (db/get-connection system)]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(db/insert! conn :file-data-fragment
|
||||
{:id id
|
||||
:file-id file-id
|
||||
:content content}))))))
|
||||
|
||||
(defn process-pointers
|
||||
"Apply a function to all pointers on the file. Usuly used for
|
||||
dereference the pointer to a plain value before some processing."
|
||||
[fdata update-fn]
|
||||
(let [update-fn' (fn [val]
|
||||
(if (pmap/pointer-map? val)
|
||||
(update-fn val)
|
||||
val))]
|
||||
(-> fdata
|
||||
(d/update-vals update-fn')
|
||||
(update :pages-index d/update-vals update-fn'))))
|
||||
|
||||
(defn get-used-pointer-ids
|
||||
"Given a file, return all pointer ids used in the data."
|
||||
[fdata]
|
||||
(->> (concat (vals fdata)
|
||||
(vals (:pages-index fdata)))
|
||||
(into #{} (comp (filter pmap/pointer-map?)
|
||||
(map pmap/get-id)))))
|
||||
|
||||
(defn enable-pointer-map
|
||||
"Enable the fdata/pointer-map feature on the file."
|
||||
[file]
|
||||
(-> file
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index d/update-vals pmap/wrap)
|
||||
(d/update-when :components pmap/wrap))))
|
||||
(update :features conj "fdata/pointer-map")))
|
||||
@@ -23,15 +23,15 @@
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as-alias rpc.doc]
|
||||
[app.setup :as-alias setup]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[reitit.core :as r]
|
||||
[reitit.middleware :as rr]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as-alias rres]
|
||||
[yetti.adapter :as yt]))
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
(declare router-handler)
|
||||
|
||||
@@ -53,8 +53,8 @@
|
||||
[_ cfg]
|
||||
(merge {::port 6060
|
||||
::host "0.0.0.0"
|
||||
::max-body-size (* 1024 1024 30) ; default 30 MiB
|
||||
::max-multipart-body-size (* 1024 1024 120)} ; default 120 MiB
|
||||
::max-body-size (* 1024 1024 30) ; 30 MiB
|
||||
::max-multipart-body-size (* 1024 1024 120)} ; 120 MiB
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::server [_]
|
||||
@@ -63,7 +63,8 @@
|
||||
::max-multipart-body-size
|
||||
::router
|
||||
::handler
|
||||
::io-threads]))
|
||||
::io-threads
|
||||
::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::server
|
||||
[_ {:keys [::handler ::router ::host ::port] :as cfg}]
|
||||
@@ -74,9 +75,11 @@
|
||||
:http/max-multipart-body-size (::max-multipart-body-size cfg)
|
||||
:xnio/io-threads (or (::io-threads cfg)
|
||||
(max 3 (px/get-available-processors)))
|
||||
:xnio/dispatch :virtual
|
||||
:ring/compat :ring2
|
||||
:socket/backlog 4069}
|
||||
:xnio/worker-threads (or (::worker-threads cfg)
|
||||
(max 6 (px/get-available-processors)))
|
||||
:xnio/dispatch true
|
||||
:socket/backlog 4069
|
||||
:ring/async true}
|
||||
|
||||
handler (cond
|
||||
(some? router)
|
||||
@@ -99,13 +102,13 @@
|
||||
(yt/stop! server))
|
||||
|
||||
(defn- not-found-handler
|
||||
[_]
|
||||
{::rres/status 404})
|
||||
[_ respond _]
|
||||
(respond {::yrs/status 404}))
|
||||
|
||||
(defn- router-handler
|
||||
[router]
|
||||
(letfn [(resolve-handler [request]
|
||||
(if-let [match (r/match-by-path router (rreq/path request))]
|
||||
(if-let [match (r/match-by-path router (yrq/path request))]
|
||||
(let [params (:path-params match)
|
||||
result (:result match)
|
||||
handler (or (:handler result) not-found-handler)
|
||||
@@ -117,15 +120,18 @@
|
||||
(let [{:keys [body] :as response} (errors/handle cause request)]
|
||||
(cond-> response
|
||||
(map? body)
|
||||
(-> (update ::rres/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::rres/body (t/encode-str body {:type :json-verbose}))))))]
|
||||
(-> (update ::yrs/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::yrs/body (t/encode-str body {:type :json-verbose}))))))]
|
||||
|
||||
(fn [request]
|
||||
(let [handler (resolve-handler request)]
|
||||
(try
|
||||
(handler)
|
||||
(catch Throwable cause
|
||||
(on-error cause request)))))))
|
||||
(fn [request respond _]
|
||||
(let [handler (resolve-handler request)
|
||||
exchange (yrq/exchange request)]
|
||||
(handler
|
||||
(fn [response]
|
||||
(yt/dispatch! exchange (partial respond response)))
|
||||
(fn [cause]
|
||||
(let [response (on-error cause request)]
|
||||
(yt/dispatch! exchange (partial respond response)))))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP ROUTER
|
||||
@@ -137,7 +143,7 @@
|
||||
::rpc/routes
|
||||
::rpc.doc/routes
|
||||
::oidc/routes
|
||||
::setup/props
|
||||
::main/props
|
||||
::assets/routes
|
||||
::debug/routes
|
||||
::db/pool
|
||||
@@ -154,7 +160,8 @@
|
||||
[session/soft-auth cfg]
|
||||
[actoken/soft-auth cfg]
|
||||
[mw/errors errors/handle]
|
||||
[mw/restrict-methods]]}
|
||||
[mw/restrict-methods]
|
||||
[mw/with-dispatch :vthread]]}
|
||||
|
||||
(::mtx/routes cfg)
|
||||
(::assets/routes cfg)
|
||||
|
||||
@@ -10,15 +10,14 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[ring.request :as rreq]))
|
||||
[yetti.request :as yrq]))
|
||||
|
||||
(def header-re #"^Token\s+(.*)")
|
||||
|
||||
(defn- get-token
|
||||
[request]
|
||||
(some->> (rreq/get-header request "authorization")
|
||||
(some->> (yrq/get-header request "authorization")
|
||||
(re-matches header-re)
|
||||
(second)))
|
||||
|
||||
@@ -31,7 +30,7 @@
|
||||
"SELECT perms, profile_id, expires_at
|
||||
FROM access_token
|
||||
WHERE id = ?
|
||||
AND (expires_at IS NULL
|
||||
AND (expires_at IS NULL
|
||||
OR (expires_at > now()));")
|
||||
|
||||
(defn- get-token-data
|
||||
@@ -43,7 +42,7 @@
|
||||
(defn- wrap-soft-auth
|
||||
"Soft Authentication, will be executed synchronously on the undertow
|
||||
worker thread."
|
||||
[handler {:keys [::setup/props]}]
|
||||
[handler {:keys [::main/props]}]
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
(let [token (get-token request)
|
||||
@@ -55,8 +54,9 @@
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause)
|
||||
request)))]
|
||||
|
||||
(fn [request]
|
||||
(handler (handle-request request)))))
|
||||
(fn [request respond raise]
|
||||
(let [request (handle-request request)]
|
||||
(handler request respond raise)))))
|
||||
|
||||
(defn- wrap-authz
|
||||
"Authorization middleware, will be executed synchronously on vthread."
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
(def ^:private cache-max-age
|
||||
(dt/duration {:hours 24}))
|
||||
@@ -37,11 +37,11 @@
|
||||
(defn- serve-object-from-s3
|
||||
[{:keys [::sto/storage] :as cfg} obj]
|
||||
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
|
||||
{::rres/status 307
|
||||
::rres/headers {"location" (str url)
|
||||
"x-host" (cond-> host port (str ":" port))
|
||||
"x-mtype" (-> obj meta :content-type)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}}))
|
||||
{::yrs/status 307
|
||||
::yrs/headers {"location" (str url)
|
||||
"x-host" (cond-> host port (str ":" port))
|
||||
"x-mtype" (-> obj meta :content-type)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}}))
|
||||
|
||||
(defn- serve-object-from-fs
|
||||
[{:keys [::path]} obj]
|
||||
@@ -51,8 +51,8 @@
|
||||
headers {"x-accel-redirect" (:path purl)
|
||||
"content-type" (:content-type mdata)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}]
|
||||
{::rres/status 204
|
||||
::rres/headers headers}))
|
||||
{::yrs/status 204
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defn- serve-object
|
||||
"Helper function that returns the appropriate response depending on
|
||||
@@ -70,7 +70,7 @@
|
||||
obj (sto/get-object storage id)]
|
||||
(if obj
|
||||
(serve-object cfg obj)
|
||||
{::rres/status 404})))
|
||||
{::yrs/status 404})))
|
||||
|
||||
(defn- generic-handler
|
||||
"A generic handler helper/common code for file-media based handlers."
|
||||
@@ -81,7 +81,7 @@
|
||||
sobj (sto/get-object storage (kf mobj))]
|
||||
(if sobj
|
||||
(serve-object cfg sobj)
|
||||
{::rres/status 404})))
|
||||
{::yrs/status 404})))
|
||||
|
||||
(defn file-objects-handler
|
||||
"Handler that serves storage objects by file media id."
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
[app.db.sql :as sql]
|
||||
[app.http.client :as http]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -21,8 +20,8 @@
|
||||
[integrant.core :as ig]
|
||||
[jsonista.core :as j]
|
||||
[promesa.exec :as px]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as-alias yrs]))
|
||||
|
||||
(declare parse-json)
|
||||
(declare handle-request)
|
||||
@@ -31,15 +30,16 @@
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::http/client
|
||||
::setup/props
|
||||
::db/pool]))
|
||||
::main/props
|
||||
::db/pool
|
||||
::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ cfg]
|
||||
[_ {:keys [::wrk/executor] :as cfg}]
|
||||
(letfn [(handler [request]
|
||||
(let [data (-> request rreq/body slurp)]
|
||||
(px/run! :vthread (partial handle-request cfg data)))
|
||||
{::rres/status 200})]
|
||||
(let [data (-> request yrq/body slurp)]
|
||||
(px/run! executor #(handle-request cfg data)))
|
||||
{::yrs/status 200})]
|
||||
["/sns" {:handler handler
|
||||
:allowed-methods #{:post}}]))
|
||||
|
||||
@@ -107,7 +107,7 @@
|
||||
[cfg headers]
|
||||
(let [tdata (get headers "x-penpot-data")]
|
||||
(when-not (str/empty? tdata)
|
||||
(let [result (tokens/verify (::setup/props cfg) {:token tdata :iss :profile-identity})]
|
||||
(let [result (tokens/verify (::main/props cfg) {:token tdata :iss :profile-identity})]
|
||||
(:profile-id result)))))
|
||||
|
||||
(defn- parse-notification
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"Http client abstraction layer."
|
||||
(:require
|
||||
[app.common.spec :as us]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[java-http-clj.core :as http]
|
||||
@@ -20,11 +21,12 @@
|
||||
(s/keys :req [::client]))
|
||||
|
||||
(defmethod ig/pre-init-spec ::client [_]
|
||||
(s/keys :req []))
|
||||
(s/keys :req [::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::client
|
||||
[_ _]
|
||||
(http/build-client {:connect-timeout 30000 ;; 10s
|
||||
[_ {:keys [::wrk/executor] :as cfg}]
|
||||
(http/build-client {:executor executor
|
||||
:connect-timeout 30000 ;; 10s
|
||||
:follow-redirects :always}))
|
||||
|
||||
(defn send!
|
||||
@@ -55,8 +57,8 @@
|
||||
convention."
|
||||
([cfg-or-client request]
|
||||
(let [client (resolve-client cfg-or-client)]
|
||||
(send! client request {:sync? true})))
|
||||
(send! client request {})))
|
||||
([cfg-or-client request options]
|
||||
(let [client (resolve-client cfg-or-client)]
|
||||
(send! client request (merge {:sync? true} options)))))
|
||||
(send! client request options))))
|
||||
|
||||
|
||||
@@ -7,8 +7,6 @@
|
||||
(ns app.http.debug
|
||||
(:refer-clojure :exclude [error-handler])
|
||||
(:require
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
@@ -16,14 +14,10 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.session :as session]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc.commands.auth :as auth]
|
||||
[app.rpc.commands.binfile :as binf]
|
||||
[app.rpc.commands.files-create :refer [create-file]]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.setup :as-alias setup]
|
||||
[app.srepl.helpers :as srepl]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.template :as tmpl]
|
||||
[app.util.time :as dt]
|
||||
@@ -34,41 +28,55 @@
|
||||
[integrant.core :as ig]
|
||||
[markdown.core :as md]
|
||||
[markdown.transformers :as mdt]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]))
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
;; (selmer.parser/cache-off!)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn authorized?
|
||||
[pool {:keys [::session/profile-id]}]
|
||||
(or (= "devenv" (cf/get :host))
|
||||
(let [profile (ex/ignoring (profile/get-profile pool profile-id))
|
||||
admins (or (cf/get :admins) #{})]
|
||||
(contains? admins (:email profile)))))
|
||||
|
||||
(defn prepare-response
|
||||
[body]
|
||||
(let [headers {"content-type" "application/transit+json"}]
|
||||
{::yrs/status 200
|
||||
::yrs/body body
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defn prepare-download-response
|
||||
[body filename]
|
||||
(let [headers {"content-disposition" (str "attachment; filename=" filename)
|
||||
"content-type" "application/octet-stream"}]
|
||||
{::yrs/status 200
|
||||
::yrs/body body
|
||||
::yrs/headers headers}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INDEX
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn index-handler
|
||||
[_cfg _request]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/html"}
|
||||
::rres/body (-> (io/resource "app/templates/debug.tmpl")
|
||||
(tmpl/render {}))})
|
||||
[{:keys [::db/pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/html"}
|
||||
::yrs/body (-> (io/resource "app/templates/debug.tmpl")
|
||||
(tmpl/render {}))})
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FILE CHANGES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn prepare-response
|
||||
[body]
|
||||
(let [headers {"content-type" "application/transit+json"}]
|
||||
{::rres/status 200
|
||||
::rres/body body
|
||||
::rres/headers headers}))
|
||||
|
||||
(defn prepare-download-response
|
||||
[body filename]
|
||||
(let [headers {"content-disposition" (str "attachment; filename=" filename)
|
||||
"content-type" "application/octet-stream"}]
|
||||
{::rres/status 200
|
||||
::rres/body body
|
||||
::rres/headers headers}))
|
||||
|
||||
(def sql:retrieve-range-of-changes
|
||||
"select revn, changes from file_change where file_id=? and revn >= ? and revn <= ? order by revn")
|
||||
|
||||
@@ -77,6 +85,10 @@
|
||||
|
||||
(defn- retrieve-file-data
|
||||
[{:keys [::db/pool]} {:keys [params ::session/profile-id] :as request}]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(let [file-id (some-> params :file-id parse-uuid)
|
||||
revn (some-> params :revn parse-long)
|
||||
filename (str file-id)]
|
||||
@@ -101,16 +113,16 @@
|
||||
(let [profile (profile/get-profile pool profile-id)
|
||||
project-id (:default-project-id profile)]
|
||||
|
||||
(db/run! pool (fn [{:keys [::db/conn] :as cfg}]
|
||||
(create-file cfg {:id file-id
|
||||
:name (str "Cloned file: " filename)
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/run! pool (fn [{:keys [::db/conn]}]
|
||||
(create-file conn {:id file-id
|
||||
:name (str "Cloned file: " filename)
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/update! conn :file
|
||||
{:data data}
|
||||
{:id file-id})
|
||||
{::rres/status 201
|
||||
::rres/body "OK CREATED"})))
|
||||
{::yrs/status 201
|
||||
::yrs/body "OK CREATED"})))
|
||||
|
||||
:else
|
||||
(prepare-response (blob/decode data))))))
|
||||
@@ -139,26 +151,26 @@
|
||||
{:data data
|
||||
:deleted-at nil}
|
||||
{:id file-id})
|
||||
{::rres/status 200
|
||||
::rres/body "OK UPDATED"})
|
||||
{::yrs/status 200
|
||||
::yrs/body "OK UPDATED"})
|
||||
|
||||
(db/run! pool (fn [{:keys [::db/conn] :as cfg}]
|
||||
(create-file cfg {:id file-id
|
||||
:name fname
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/run! pool (fn [{:keys [::db/conn]}]
|
||||
(create-file conn {:id file-id
|
||||
:name fname
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/update! conn :file
|
||||
{:data data}
|
||||
{:id file-id})
|
||||
{::rres/status 201
|
||||
::rres/body "OK CREATED"}))))
|
||||
{::yrs/status 201
|
||||
::yrs/body "OK CREATED"}))))
|
||||
|
||||
{::rres/status 500
|
||||
::rres/body "ERROR"})))
|
||||
{::yrs/status 500
|
||||
::yrs/body "ERROR"})))
|
||||
|
||||
(defn file-data-handler
|
||||
[cfg request]
|
||||
(case (rreq/method request)
|
||||
(case (yrq/method request)
|
||||
:get (retrieve-file-data cfg request)
|
||||
:post (upload-file-data cfg request)
|
||||
(ex/raise :type :http
|
||||
@@ -166,6 +178,10 @@
|
||||
|
||||
(defn file-changes-handler
|
||||
[{:keys [::db/pool]} {:keys [params] :as request}]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(letfn [(retrieve-changes [file-id revn]
|
||||
(if (str/includes? revn ":")
|
||||
(let [[start end] (->> (str/split revn #":")
|
||||
@@ -232,19 +248,24 @@
|
||||
(-> (io/resource "app/templates/error-report.v3.tmpl")
|
||||
(tmpl/render (-> content
|
||||
(assoc :id id)
|
||||
(assoc :created-at (dt/format-instant created-at :rfc1123))))))]
|
||||
(assoc :created-at (dt/format-instant created-at :rfc1123))))))
|
||||
]
|
||||
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(if-let [report (get-report request)]
|
||||
(let [result (case (:version report)
|
||||
1 (render-template-v1 report)
|
||||
2 (render-template-v2 report)
|
||||
3 (render-template-v3 report))]
|
||||
{::rres/status 200
|
||||
::rres/body result
|
||||
::rres/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}})
|
||||
{::rres/status 404
|
||||
::rres/body "not found"})))
|
||||
{::yrs/status 200
|
||||
::yrs/body result
|
||||
::yrs/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}})
|
||||
{::yrs/status 404
|
||||
::yrs/body "not found"})))
|
||||
|
||||
(def sql:error-reports
|
||||
"SELECT id, created_at,
|
||||
@@ -254,14 +275,17 @@
|
||||
LIMIT 200")
|
||||
|
||||
(defn error-list-handler
|
||||
[{:keys [::db/pool]} _request]
|
||||
[{:keys [::db/pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
(let [items (->> (db/exec! pool [sql:error-reports])
|
||||
(map #(update % :created-at dt/format-instant :rfc1123)))]
|
||||
{::rres/status 200
|
||||
::rres/body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))
|
||||
::rres/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}}))
|
||||
{::yrs/status 200
|
||||
::yrs/body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))
|
||||
::yrs/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; EXPORT/IMPORT
|
||||
@@ -270,10 +294,9 @@
|
||||
(defn export-handler
|
||||
[{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}]
|
||||
|
||||
(let [file-ids (into #{}
|
||||
(comp (remove empty?)
|
||||
(map parse-uuid))
|
||||
(:file-ids params))
|
||||
(let [file-ids (->> (:file-ids params)
|
||||
(remove empty?)
|
||||
(mapv parse-uuid))
|
||||
libs? (contains? params :includelibs)
|
||||
clone? (contains? params :clone)
|
||||
embed? (contains? params :embedassets)]
|
||||
@@ -282,30 +305,31 @@
|
||||
(ex/raise :type :validation
|
||||
:code :missing-arguments))
|
||||
|
||||
(let [path (tmp/tempfile :prefix "penpot.export.")]
|
||||
(with-open [output (io/output-stream path)]
|
||||
(-> cfg
|
||||
(assoc ::bf.v1/ids file-ids)
|
||||
(assoc ::bf.v1/embed-assets embed?)
|
||||
(assoc ::bf.v1/include-libraries libs?)
|
||||
(bf.v1/export-files! output)))
|
||||
|
||||
(let [path (-> cfg
|
||||
(assoc ::binf/file-ids file-ids)
|
||||
(assoc ::binf/embed-assets? embed?)
|
||||
(assoc ::binf/include-libraries? libs?)
|
||||
(binf/export-to-tmpfile!))]
|
||||
(if clone?
|
||||
(let [profile (profile/get-profile pool profile-id)
|
||||
project-id (:default-project-id profile)
|
||||
cfg (assoc cfg
|
||||
::bf.v1/overwrite false
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK CLONED"})
|
||||
project-id (:default-project-id profile)]
|
||||
(binf/import!
|
||||
(assoc cfg
|
||||
::binf/input path
|
||||
::binf/overwrite? false
|
||||
::binf/ignore-index-errors? true
|
||||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/plain"}
|
||||
::yrs/body "OK CLONED"})
|
||||
|
||||
{::yrs/status 200
|
||||
::yrs/body (io/input-stream path)
|
||||
::yrs/headers {"content-type" "application/octet-stream"
|
||||
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
|
||||
|
||||
{::rres/status 200
|
||||
::rres/body (io/input-stream path)
|
||||
::rres/headers {"content-type" "application/octet-stream"
|
||||
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
|
||||
|
||||
|
||||
(defn import-handler
|
||||
@@ -318,108 +342,26 @@
|
||||
(let [profile (profile/get-profile pool profile-id)
|
||||
project-id (:default-project-id profile)
|
||||
overwrite? (contains? params :overwrite)
|
||||
migrate? (contains? params :migrate)]
|
||||
migrate? (contains? params :migrate)
|
||||
ignore-index-errors? (contains? params :ignore-index-errors)]
|
||||
|
||||
(when-not project-id
|
||||
(ex/raise :type :validation
|
||||
:code :missing-project
|
||||
:hint "project not found"))
|
||||
|
||||
(let [path (-> params :file :path)
|
||||
cfg (assoc cfg
|
||||
::bf.v1/overwrite overwrite?
|
||||
::bf.v1/migrate migrate?
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; ACTIONS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- resend-email-notification
|
||||
[{:keys [::db/pool ::setup/props] :as cfg} {:keys [params] :as request}]
|
||||
|
||||
(when-not (contains? params :force)
|
||||
(ex/raise :type :validation
|
||||
:code :missing-force
|
||||
:hint "missing force checkbox"))
|
||||
|
||||
(let [profile (some->> params
|
||||
:email
|
||||
(profile/clean-email)
|
||||
(profile/get-profile-by-email pool))]
|
||||
|
||||
(when-not profile
|
||||
(ex/raise :type :validation
|
||||
:code :missing-profile
|
||||
:hint "unable to find profile by email"))
|
||||
|
||||
(cond
|
||||
(contains? params :block)
|
||||
(do
|
||||
(db/update! pool :profile {:is-blocked true} {:id (:id profile)})
|
||||
(db/delete! pool :http-session {:profile-id (:id profile)})
|
||||
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "PROFILE '%' BLOCKED" (:email profile))})
|
||||
|
||||
(contains? params :unblock)
|
||||
(do
|
||||
(db/update! pool :profile {:is-blocked false} {:id (:id profile)})
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "PROFILE '%' UNBLOCKED" (:email profile))})
|
||||
|
||||
(contains? params :resend)
|
||||
(if (:is-blocked profile)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "PROFILE ALREADY BLOCKED"}
|
||||
(do
|
||||
(auth/send-email-verification! pool props profile)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "RESENDED FOR '%'" (:email profile))}))
|
||||
|
||||
:else
|
||||
(do
|
||||
(db/update! pool :profile {:is-active true} {:id (:id profile)})
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))
|
||||
|
||||
|
||||
(defn- reset-file-version
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [file-id (some-> params :file-id d/parse-uuid)
|
||||
version (some-> params :version d/parse-integer)]
|
||||
|
||||
(when-not (contains? params :force)
|
||||
(ex/raise :type :validation
|
||||
:code :missing-force
|
||||
:hint "missing force checkbox"))
|
||||
|
||||
(when (nil? file-id)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-file-id
|
||||
:hint "provided invalid file id"))
|
||||
|
||||
(when (nil? version)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-version
|
||||
:hint "provided invalid version"))
|
||||
|
||||
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
|
||||
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"}))
|
||||
(binf/import!
|
||||
(assoc cfg
|
||||
::binf/input (-> params :file :path)
|
||||
::binf/overwrite? overwrite?
|
||||
::binf/migrate? migrate?
|
||||
::binf/ignore-index-errors? ignore-index-errors?
|
||||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/plain"}
|
||||
::yrs/body "OK"}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OTHER SMALL VIEWS/HANDLERS
|
||||
@@ -430,13 +372,13 @@
|
||||
[{:keys [::db/pool]} _]
|
||||
(try
|
||||
(db/exec-one! pool ["select count(*) as count from server_prop;"])
|
||||
{::rres/status 200
|
||||
::rres/body "OK"}
|
||||
{::yrs/status 200
|
||||
::yrs/body "OK"}
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unable to execute query on health handler"
|
||||
:cause cause)
|
||||
{::rres/status 503
|
||||
::rres/body "KO"})))
|
||||
{::yrs/status 503
|
||||
::yrs/body "KO"})))
|
||||
|
||||
(defn changelog-handler
|
||||
[_ _]
|
||||
@@ -445,23 +387,16 @@
|
||||
(md->html [text]
|
||||
(md/md-to-html-string text :replacement-transformers (into [transform-emoji] mdt/transformer-vector)))]
|
||||
(if-let [clog (io/resource "changelog.md")]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/html; charset=utf-8"}
|
||||
::rres/body (-> clog slurp md->html)}
|
||||
{::rres/status 404
|
||||
::rres/body "NOT FOUND"})))
|
||||
{::yrs/status 200
|
||||
::yrs/headers {"content-type" "text/html; charset=utf-8"}
|
||||
::yrs/body (-> clog slurp md->html)}
|
||||
{::yrs/status 404
|
||||
::yrs/body "NOT FOUND"})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INIT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn authorized?
|
||||
[pool {:keys [::session/profile-id]}]
|
||||
(or (= "devenv" (cf/get :host))
|
||||
(let [profile (ex/ignoring (profile/get-profile pool profile-id))
|
||||
admins (or (cf/get :admins) #{})]
|
||||
(contains? admins (:email profile)))))
|
||||
|
||||
(def with-authorization
|
||||
{:compile
|
||||
(fn [& _]
|
||||
@@ -485,10 +420,6 @@
|
||||
["/changelog" {:handler (partial changelog-handler cfg)}]
|
||||
["/error/:id" {:handler (partial error-handler cfg)}]
|
||||
["/error" {:handler (partial error-list-handler cfg)}]
|
||||
["/actions/resend-email-verification"
|
||||
{:handler (partial resend-email-notification cfg)}]
|
||||
["/actions/reset-file-version"
|
||||
{:handler (partial reset-file-version cfg)}]
|
||||
["/file/export" {:handler (partial export-handler cfg)}]
|
||||
["/file/import" {:handler (partial import-handler cfg)}]
|
||||
["/file/data" {:handler (partial file-data-handler cfg)}]
|
||||
|
||||
@@ -9,21 +9,21 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as-alias sm]
|
||||
[app.common.schema :as sm]
|
||||
[app.config :as cf]
|
||||
[app.http :as-alias http]
|
||||
[app.http.access-token :as-alias actoken]
|
||||
[app.http.session :as-alias session]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]))
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
(defn- parse-client-ip
|
||||
[request]
|
||||
(or (some-> (rreq/get-header request "x-forwarded-for") (str/split ",") first)
|
||||
(rreq/get-header request "x-real-ip")
|
||||
(rreq/remote-addr request)))
|
||||
(or (some-> (yrq/get-header request "x-forwarded-for") (str/split ",") first)
|
||||
(yrq/get-header request "x-real-ip")
|
||||
(yrq/remote-addr request)))
|
||||
|
||||
(defn request->context
|
||||
"Extracts error report relevant context data from request."
|
||||
@@ -34,218 +34,184 @@
|
||||
{:request/path (:path request)
|
||||
:request/method (:method request)
|
||||
:request/params (:params request)
|
||||
:request/user-agent (rreq/get-header request "user-agent")
|
||||
:request/user-agent (yrq/get-header request "user-agent")
|
||||
:request/ip-addr (parse-client-ip request)
|
||||
:request/profile-id (:uid claims)
|
||||
:version/frontend (or (rreq/get-header request "x-frontend-version") "unknown")
|
||||
:version/frontend (or (yrq/get-header request "x-frontend-version") "unknown")
|
||||
:version/backend (:full cf/version)}))
|
||||
|
||||
(defmulti handle-error
|
||||
(fn [cause _ _]
|
||||
(-> cause ex-data :type)))
|
||||
|
||||
(defmulti handle-exception
|
||||
(fn [cause _ _]
|
||||
(class cause)))
|
||||
(fn [err & _rest]
|
||||
(let [edata (ex-data err)]
|
||||
(or (:type edata)
|
||||
(class err)))))
|
||||
|
||||
(defmethod handle-error :authentication
|
||||
[err _ _]
|
||||
{::rres/status 401
|
||||
::rres/body (ex-data err)})
|
||||
(defmethod handle-exception :authentication
|
||||
[err _]
|
||||
{::yrs/status 401
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-error :authorization
|
||||
[err _ _]
|
||||
{::rres/status 403
|
||||
::rres/body (ex-data err)})
|
||||
(defmethod handle-exception :authorization
|
||||
[err _]
|
||||
{::yrs/status 403
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-error :restriction
|
||||
[err _ _]
|
||||
(let [{:keys [code] :as data} (ex-data err)]
|
||||
(if (= code :method-not-allowed)
|
||||
{::rres/status 405
|
||||
::rres/body data}
|
||||
{::rres/status 400
|
||||
::rres/body data})))
|
||||
(defmethod handle-exception :restriction
|
||||
[err _]
|
||||
{::yrs/status 400
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-error :rate-limit
|
||||
[err _ _]
|
||||
(defmethod handle-exception :rate-limit
|
||||
[err _]
|
||||
(let [headers (-> err ex-data ::http/headers)]
|
||||
{::rres/status 429
|
||||
::rres/headers headers}))
|
||||
{::yrs/status 429
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defmethod handle-error :concurrency-limit
|
||||
[err _ _]
|
||||
(defmethod handle-exception :concurrency-limit
|
||||
[err _]
|
||||
(let [headers (-> err ex-data ::http/headers)]
|
||||
{::rres/status 429
|
||||
::rres/headers headers}))
|
||||
{::yrs/status 429
|
||||
::yrs/headers headers}))
|
||||
|
||||
(defmethod handle-error :validation
|
||||
[err request parent-cause]
|
||||
(defmethod handle-exception :validation
|
||||
[err request]
|
||||
(let [{:keys [code] :as data} (ex-data err)]
|
||||
(cond
|
||||
(or (= code :spec-validation)
|
||||
(= code :params-validation)
|
||||
(= code :schema-validation)
|
||||
(= code :data-validation))
|
||||
(= code :spec-validation)
|
||||
(let [explain (ex/explain data)]
|
||||
{::rres/status 400
|
||||
::rres/body (-> data
|
||||
(dissoc ::s/problems ::s/value ::s/spec ::sm/explain)
|
||||
(cond-> explain (assoc :explain explain)))})
|
||||
{::yrs/status 400
|
||||
::yrs/body (-> data
|
||||
(dissoc ::s/problems ::s/value ::s/spec)
|
||||
(cond-> explain (assoc :explain explain)))})
|
||||
|
||||
(= code :params-validation)
|
||||
(let [explain (::sm/explain data)
|
||||
payload (sm/humanize-data explain)]
|
||||
{::yrs/status 400
|
||||
::yrs/body (-> data
|
||||
(dissoc ::sm/explain)
|
||||
(assoc :data payload))})
|
||||
|
||||
(= code :request-body-too-large)
|
||||
{::rres/status 413 ::rres/body data}
|
||||
{::yrs/status 413 ::yrs/body data}
|
||||
|
||||
(= code :invalid-image)
|
||||
(binding [l/*context* (request->context request)]
|
||||
(let [cause (or parent-cause err)]
|
||||
(l/warn :hint "unexpected error on processing image" :cause cause)
|
||||
{::rres/status 400 ::rres/body data}))
|
||||
(l/error :hint "unexpected error on processing image" :cause err)
|
||||
{::yrs/status 400 ::yrs/body data})
|
||||
|
||||
:else
|
||||
{::rres/status 400 ::rres/body data})))
|
||||
{::yrs/status 400 ::yrs/body data})))
|
||||
|
||||
(defmethod handle-error :assertion
|
||||
[error request parent-cause]
|
||||
(defmethod handle-exception :assertion
|
||||
[error request]
|
||||
(binding [l/*context* (request->context request)]
|
||||
(let [{:keys [code] :as data} (ex-data error)
|
||||
cause (or parent-cause error)]
|
||||
(let [{:keys [code] :as data} (ex-data error)]
|
||||
(cond
|
||||
(= code :data-validation)
|
||||
(let [explain (ex/explain data)]
|
||||
(l/error :hint "data assertion error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> data
|
||||
(dissoc ::sm/explain)
|
||||
(cond-> explain (assoc :explain explain)))}})
|
||||
(let [explain (::sm/explain data)
|
||||
payload (sm/humanize-data explain)]
|
||||
(l/error :hint "Data assertion error" :message (ex-message error) :cause error)
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> data
|
||||
(dissoc ::sm/explain)
|
||||
(assoc :data payload))}})
|
||||
|
||||
(= code :spec-validation)
|
||||
(let [explain (ex/explain data)]
|
||||
(l/error :hint "spec assertion error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> data
|
||||
(dissoc ::s/problems ::s/value ::s/spec)
|
||||
(cond-> explain (assoc :explain explain)))}})
|
||||
(l/error :hint "Spec assertion error" :message (ex-message error) :cause error)
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> data
|
||||
(dissoc ::s/problems ::s/value ::s/spec)
|
||||
(cond-> explain (assoc :explain explain)))}})
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/error :hint "assertion error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :assertion
|
||||
:data data}})))))
|
||||
(l/error :hint "Assertion error" :message (ex-message error) :cause error)
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :assertion
|
||||
:data data}})))))
|
||||
|
||||
(defmethod handle-error :not-found
|
||||
[err _ _]
|
||||
{::rres/status 404
|
||||
::rres/body (ex-data err)})
|
||||
|
||||
(defmethod handle-error :internal
|
||||
[error request parent-cause]
|
||||
(defmethod handle-exception :not-found
|
||||
[err _]
|
||||
{::yrs/status 404
|
||||
::yrs/body (ex-data err)})
|
||||
|
||||
(defmethod handle-exception :internal
|
||||
[error request]
|
||||
(binding [l/*context* (request->context request)]
|
||||
(let [cause (or parent-cause error)]
|
||||
(l/error :hint "internal error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data (ex-data error)}})))
|
||||
|
||||
(defmethod handle-error :default
|
||||
[error request parent-cause]
|
||||
(let [edata (ex-data error)]
|
||||
;; This is a special case for the idle-in-transaction error;
|
||||
;; when it happens, the connection is automatically closed and
|
||||
;; next-jdbc combines the two errors in a single ex-info. We
|
||||
;; only need the :handling error, because the :rollback error
|
||||
;; will be always "connection closed".
|
||||
(if (and (ex/exception? (:rollback edata))
|
||||
(ex/exception? (:handling edata)))
|
||||
(handle-exception (:handling edata) request error)
|
||||
(handle-exception error request parent-cause))))
|
||||
(l/error :hint "Internal error" :message (ex-message error) :cause error)
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data (ex-data error)}}))
|
||||
|
||||
(defmethod handle-exception org.postgresql.util.PSQLException
|
||||
[error request parent-cause]
|
||||
(let [state (.getSQLState ^java.sql.SQLException error)
|
||||
cause (or parent-cause error)]
|
||||
[error request]
|
||||
(let [state (.getSQLState ^java.sql.SQLException error)]
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "PSQL error"
|
||||
:cause cause)
|
||||
(l/error :hint "PSQL error" :message (ex-message error) :cause error)
|
||||
(cond
|
||||
(= state "57014")
|
||||
{::rres/status 504
|
||||
::rres/body {:type :server-error
|
||||
:code :statement-timeout
|
||||
:hint (ex-message error)}}
|
||||
{::yrs/status 504
|
||||
::yrs/body {:type :server-error
|
||||
:code :statement-timeout
|
||||
:hint (ex-message error)}}
|
||||
|
||||
(= state "25P03")
|
||||
{::rres/status 504
|
||||
::rres/body {:type :server-error
|
||||
:code :idle-in-transaction-timeout
|
||||
:hint (ex-message error)}}
|
||||
{::yrs/status 504
|
||||
::yrs/body {:type :server-error
|
||||
:code :idle-in-transaction-timeout
|
||||
:hint (ex-message error)}}
|
||||
|
||||
:else
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)
|
||||
:state state}}))))
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)
|
||||
:state state}}))))
|
||||
|
||||
(defmethod handle-exception :default
|
||||
[error request parent-cause]
|
||||
(let [edata (ex-data error)
|
||||
cause (or parent-cause error)]
|
||||
[error request]
|
||||
(let [edata (ex-data error)]
|
||||
(cond
|
||||
;; This means that exception is not a controlled exception.
|
||||
(nil? edata)
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "unexpected error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)}})
|
||||
(l/error :hint "Unexpected error" :message (ex-message error) :cause error)
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)}})
|
||||
|
||||
;; This is a special case for the idle-in-transaction error;
|
||||
;; when it happens, the connection is automatically closed and
|
||||
;; next-jdbc combines the two errors in a single ex-info. We
|
||||
;; only need the :handling error, because the :rollback error
|
||||
;; will be always "connection closed".
|
||||
(and (ex/exception? (:rollback edata))
|
||||
(ex/exception? (:handling edata)))
|
||||
(handle-exception (:handling edata) request)
|
||||
|
||||
:else
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "unhandled error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data edata}}))))
|
||||
|
||||
(defmethod handle-exception java.io.IOException
|
||||
[cause _ _]
|
||||
(l/wrn :hint "io exception" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :io-exception
|
||||
:hint (ex-message cause)}})
|
||||
|
||||
(defmethod handle-exception java.util.concurrent.CompletionException
|
||||
[cause request _]
|
||||
(let [cause' (ex-cause cause)]
|
||||
(if (ex/error? cause')
|
||||
(handle-error cause' request cause)
|
||||
(handle-exception cause' request cause))))
|
||||
|
||||
(defmethod handle-exception java.util.concurrent.ExecutionException
|
||||
[cause request _]
|
||||
(let [cause' (ex-cause cause)]
|
||||
(if (ex/error? cause')
|
||||
(handle-error cause' request cause)
|
||||
(handle-exception cause' request cause))))
|
||||
(l/error :hint "Unhandled error" :message (ex-message error) :cause error)
|
||||
{::yrs/status 500
|
||||
::yrs/body {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data edata}}))))
|
||||
|
||||
(defn handle
|
||||
[cause request]
|
||||
(if (ex/error? cause)
|
||||
(handle-error cause request nil)
|
||||
(handle-exception cause request nil)))
|
||||
|
||||
(defn handle'
|
||||
[cause request]
|
||||
(::rres/body (handle cause request)))
|
||||
(if (or (instance? java.util.concurrent.CompletionException cause)
|
||||
(instance? java.util.concurrent.ExecutionException cause))
|
||||
(handle-exception (ex-cause cause) request)
|
||||
(handle-exception cause request)))
|
||||
|
||||
@@ -12,10 +12,13 @@
|
||||
[app.config :as cf]
|
||||
[app.util.json :as json]
|
||||
[cuerdas.core :as str]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.util :as pu]
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.middleware :as ymw])
|
||||
[yetti.middleware :as ymw]
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs])
|
||||
(:import
|
||||
com.fasterxml.jackson.core.JsonParseException
|
||||
com.fasterxml.jackson.core.io.JsonEOFException
|
||||
@@ -43,17 +46,17 @@
|
||||
(defn wrap-parse-request
|
||||
[handler]
|
||||
(letfn [(process-request [request]
|
||||
(let [header (rreq/get-header request "content-type")]
|
||||
(let [header (yrq/get-header request "content-type")]
|
||||
(cond
|
||||
(str/starts-with? header "application/transit+json")
|
||||
(with-open [^InputStream is (rreq/body request)]
|
||||
(with-open [^InputStream is (yrq/body request)]
|
||||
(let [params (t/read! (t/reader is))]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
(update :params merge params))))
|
||||
|
||||
(str/starts-with? header "application/json")
|
||||
(with-open [^InputStream is (rreq/body request)]
|
||||
(with-open [^InputStream is (yrq/body request)]
|
||||
(let [params (json/decode is json-mapper)]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
@@ -62,36 +65,37 @@
|
||||
:else
|
||||
request)))
|
||||
|
||||
(handle-error [cause]
|
||||
(handle-error [raise cause]
|
||||
(cond
|
||||
(instance? RuntimeException cause)
|
||||
(if-let [cause (ex-cause cause)]
|
||||
(handle-error cause)
|
||||
(throw cause))
|
||||
(handle-error raise cause)
|
||||
(raise cause))
|
||||
|
||||
(instance? RequestTooBigException cause)
|
||||
(ex/raise :type :validation
|
||||
:code :request-body-too-large
|
||||
:hint (ex-message cause))
|
||||
(raise (ex/error :type :validation
|
||||
:code :request-body-too-large
|
||||
:hint (ex-message cause)))
|
||||
|
||||
|
||||
(or (instance? JsonEOFException cause)
|
||||
(instance? JsonParseException cause)
|
||||
(instance? MismatchedInputException cause))
|
||||
(ex/raise :type :validation
|
||||
:code :malformed-json
|
||||
:hint (ex-message cause)
|
||||
:cause cause)
|
||||
(raise (ex/error :type :validation
|
||||
:code :malformed-json
|
||||
:hint (ex-message cause)
|
||||
:cause cause))
|
||||
|
||||
:else
|
||||
(throw cause)))]
|
||||
(raise cause)))]
|
||||
|
||||
(fn [request]
|
||||
(if (= (rreq/method request) :post)
|
||||
(fn [request respond raise]
|
||||
(if (= (yrq/method request) :post)
|
||||
(let [request (ex/try! (process-request request))]
|
||||
(if (ex/exception? request)
|
||||
(handle-error request)
|
||||
(handler request)))
|
||||
(handler request)))))
|
||||
(handle-error raise request)
|
||||
(handler request respond raise)))
|
||||
(handler request respond raise)))))
|
||||
|
||||
(def parse-request
|
||||
{:name ::parse-request
|
||||
@@ -109,7 +113,7 @@
|
||||
(defn wrap-format-response
|
||||
[handler]
|
||||
(letfn [(transit-streamable-body [data opts]
|
||||
(reify rres/StreamableResponseBody
|
||||
(reify yrs/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
@@ -124,7 +128,7 @@
|
||||
(.close ^OutputStream output-stream))))))
|
||||
|
||||
(json-streamable-body [data]
|
||||
(reify rres/StreamableResponseBody
|
||||
(reify yrs/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
@@ -139,24 +143,24 @@
|
||||
(.close ^OutputStream output-stream))))))
|
||||
|
||||
(format-response-with-json [response _]
|
||||
(let [body (::rres/body response)]
|
||||
(let [body (::yrs/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(-> response
|
||||
(update ::rres/headers assoc "content-type" "application/json")
|
||||
(assoc ::rres/body (json-streamable-body body)))
|
||||
(update ::yrs/headers assoc "content-type" "application/json")
|
||||
(assoc ::yrs/body (json-streamable-body body)))
|
||||
response)))
|
||||
|
||||
(format-response-with-transit [response request]
|
||||
(let [body (::rres/body response)]
|
||||
(let [body (::yrs/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(let [qs (rreq/query request)
|
||||
(let [qs (yrq/query request)
|
||||
opts (if (or (contains? cf/flags :transit-readable-response)
|
||||
(str/includes? qs "transit_verbose"))
|
||||
{:type :json-verbose}
|
||||
{:type :json})]
|
||||
(-> response
|
||||
(update ::rres/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::rres/body (transit-streamable-body body opts))))
|
||||
(update ::yrs/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::yrs/body (transit-streamable-body body opts))))
|
||||
response)))
|
||||
|
||||
(format-from-params [{:keys [query-params] :as request}]
|
||||
@@ -165,7 +169,7 @@
|
||||
|
||||
(format-response [response request]
|
||||
(let [accept (or (format-from-params request)
|
||||
(rreq/get-header request "accept"))]
|
||||
(yrq/get-header request "accept"))]
|
||||
(cond
|
||||
(or (= accept "application/transit+json")
|
||||
(str/includes? accept "application/transit+json"))
|
||||
@@ -182,9 +186,11 @@
|
||||
(cond-> response
|
||||
(map? response) (format-response request)))]
|
||||
|
||||
(fn [request]
|
||||
(let [response (handler request)]
|
||||
(process-response response request)))))
|
||||
(fn [request respond raise]
|
||||
(handler request
|
||||
(fn [response]
|
||||
(respond (process-response response request)))
|
||||
raise))))
|
||||
|
||||
(def format-response
|
||||
{:name ::format-response
|
||||
@@ -192,11 +198,12 @@
|
||||
|
||||
(defn wrap-errors
|
||||
[handler on-error]
|
||||
(fn [request]
|
||||
(try
|
||||
(handler request)
|
||||
(catch Throwable cause
|
||||
(on-error cause request)))))
|
||||
(fn [request respond raise]
|
||||
(handler request respond (fn [cause]
|
||||
(try
|
||||
(respond (on-error cause request))
|
||||
(catch Throwable cause
|
||||
(raise cause)))))))
|
||||
|
||||
(def errors
|
||||
{:name ::errors
|
||||
@@ -214,11 +221,11 @@
|
||||
(defn wrap-cors
|
||||
[handler]
|
||||
(fn [request]
|
||||
(let [response (if (= (rreq/method request) :options)
|
||||
{::rres/status 200}
|
||||
(let [response (if (= (yrq/method request) :options)
|
||||
{::yrs/status 200}
|
||||
(handler request))
|
||||
origin (rreq/get-header request "origin")]
|
||||
(update response ::rres/headers with-cors-headers origin))))
|
||||
origin (yrq/get-header request "origin")]
|
||||
(update response ::yrs/headers with-cors-headers origin))))
|
||||
|
||||
(def cors
|
||||
{:name ::cors
|
||||
@@ -232,8 +239,18 @@
|
||||
(fn [data _]
|
||||
(when-let [allowed (:allowed-methods data)]
|
||||
(fn [handler]
|
||||
(fn [request]
|
||||
(let [method (rreq/method request)]
|
||||
(fn [request respond raise]
|
||||
(let [method (yrq/method request)]
|
||||
(if (contains? allowed method)
|
||||
(handler request)
|
||||
{::rres/status 405}))))))})
|
||||
(handler request respond raise)
|
||||
(respond {::yrs/status 405})))))))})
|
||||
|
||||
(def with-dispatch
|
||||
{:name ::with-dispatch
|
||||
:compile
|
||||
(fn [& _]
|
||||
(fn [handler executor]
|
||||
(let [executor (px/resolve-executor executor)]
|
||||
(fn [request respond raise]
|
||||
(->> (px/submit! executor (partial handler request))
|
||||
(p/fnly (pu/handler respond raise)))))))})
|
||||
|
||||
@@ -15,13 +15,11 @@
|
||||
[app.db.sql :as sql]
|
||||
[app.http.session.tasks :as-alias tasks]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[ring.request :as rreq]
|
||||
[yetti.request :as yrq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -139,12 +137,12 @@
|
||||
(declare ^:private gen-token)
|
||||
|
||||
(defn create-fn
|
||||
[{:keys [::manager ::setup/props]} profile-id]
|
||||
[{:keys [::manager ::main/props]} profile-id]
|
||||
(us/assert! ::manager manager)
|
||||
(us/assert! ::us/uuid profile-id)
|
||||
|
||||
(fn [request response]
|
||||
(let [uagent (rreq/get-header request "user-agent")
|
||||
(let [uagent (yrq/get-header request "user-agent")
|
||||
params {:profile-id profile-id
|
||||
:user-agent uagent
|
||||
:created-at (dt/now)}
|
||||
@@ -197,7 +195,7 @@
|
||||
(neg? (compare default-renewal-max-age elapsed)))))
|
||||
|
||||
(defn- wrap-soft-auth
|
||||
[handler {:keys [::manager ::setup/props]}]
|
||||
[handler {:keys [::manager ::main/props]}]
|
||||
(us/assert! ::manager manager)
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
@@ -211,8 +209,9 @@
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause)
|
||||
request)))]
|
||||
|
||||
(fn [request]
|
||||
(handler (handle-request request)))))
|
||||
(fn [request respond raise]
|
||||
(let [request (handle-request request)]
|
||||
(handler request respond raise)))))
|
||||
|
||||
(defn- wrap-authz
|
||||
[handler {:keys [::manager]}]
|
||||
@@ -222,15 +221,12 @@
|
||||
request (cond-> request
|
||||
(some? session)
|
||||
(assoc ::profile-id (:profile-id session)
|
||||
::id (:id session)))
|
||||
response (handler request)]
|
||||
::id (:id session)))]
|
||||
|
||||
(if (renew-session? session)
|
||||
(let [session (update! manager session)]
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)))
|
||||
response))))
|
||||
(cond-> (handler request)
|
||||
(renew-session? session)
|
||||
(-> (assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session))))))
|
||||
|
||||
(def soft-auth
|
||||
{:name ::soft-auth
|
||||
@@ -249,7 +245,6 @@
|
||||
renewal (dt/plus created-at default-renewal-max-age)
|
||||
expires (dt/plus created-at max-age)
|
||||
secure? (contains? cf/flags :secure-session-cookies)
|
||||
strict? (contains? cf/flags :strict-session-cookies)
|
||||
cors? (contains? cf/flags :cors)
|
||||
name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
|
||||
@@ -258,7 +253,7 @@
|
||||
:expires expires
|
||||
:value token
|
||||
:comment comment
|
||||
:same-site (if cors? :none (if strict? :strict :lax))
|
||||
:same-site (if cors? :none :lax)
|
||||
:secure secure?}]
|
||||
(update response :cookies assoc name cookie)))
|
||||
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.http.sse
|
||||
"SSE (server sent events) helpers"
|
||||
(:refer-clojure :exclude [tap])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.http.errors :as errors]
|
||||
[app.util.events :as events]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]
|
||||
[promesa.util :as pu]
|
||||
[ring.response :as rres])
|
||||
(:import
|
||||
java.io.OutputStream))
|
||||
|
||||
(defn- write!
|
||||
[^OutputStream output ^bytes data]
|
||||
(l/trc :hint "writting data" :data data :length (alength data))
|
||||
(.write output data)
|
||||
(.flush output))
|
||||
|
||||
(defn- encode
|
||||
[[name data]]
|
||||
(try
|
||||
(let [data (with-out-str
|
||||
(println "event:" (d/name name))
|
||||
(println "data:" (t/encode-str data {:type :json-verbose}))
|
||||
(println))]
|
||||
(.getBytes data "UTF-8"))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "unexpected error on encoding value on sse stream"
|
||||
:cause cause)
|
||||
nil)))
|
||||
|
||||
;; ---- PUBLIC API
|
||||
|
||||
(def default-headers
|
||||
{"Content-Type" "text/event-stream;charset=UTF-8"
|
||||
"Cache-Control" "no-cache, no-store, max-age=0, must-revalidate"
|
||||
"Pragma" "no-cache"})
|
||||
|
||||
(defn response
|
||||
[handler & {:keys [buf] :or {buf 32} :as opts}]
|
||||
(fn [request]
|
||||
{::rres/headers default-headers
|
||||
::rres/status 200
|
||||
::rres/body (reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output]
|
||||
(binding [events/*channel* (sp/chan :buf buf :xf (keep encode))]
|
||||
(let [listener (events/start-listener
|
||||
(partial write! output)
|
||||
(partial pu/close! output))]
|
||||
(try
|
||||
(let [result (handler)]
|
||||
(events/tap :end result))
|
||||
(catch Throwable cause
|
||||
(events/tap :error (errors/handle' cause request)))
|
||||
(finally
|
||||
(sp/close! events/*channel*)
|
||||
(px/await! listener)))))))}))
|
||||
@@ -10,7 +10,7 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http.session :as session]
|
||||
@@ -21,7 +21,6 @@
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec.csp :as sp]
|
||||
[ring.websocket :as rws]
|
||||
[yetti.websocket :as yws]))
|
||||
|
||||
(def recv-labels
|
||||
@@ -117,21 +116,21 @@
|
||||
:profile-id profile-id
|
||||
:session-id session-id}]
|
||||
|
||||
;; Close profile subscription if exists
|
||||
(when-let [ch (:channel psub)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))
|
||||
;; Close profile subscription if exists
|
||||
(when-let [ch (:channel psub)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))
|
||||
|
||||
;; Close team subscription if exists
|
||||
(when-let [ch (:channel tsub)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))
|
||||
;; Close team subscription if exists
|
||||
(when-let [ch (:channel tsub)]
|
||||
(sp/close! ch)
|
||||
(mbus/purge! msgbus [ch]))
|
||||
|
||||
;; Close file subscription if exists
|
||||
(when-let [{:keys [topic channel]} fsub]
|
||||
(sp/close! channel)
|
||||
(mbus/purge! msgbus [channel])
|
||||
(mbus/pub! msgbus :topic topic :message msg))))
|
||||
;; Close file subscription if exists
|
||||
(when-let [{:keys [topic channel]} fsub]
|
||||
(sp/close! channel)
|
||||
(mbus/purge! msgbus [channel])
|
||||
(mbus/pub! msgbus :topic topic :message msg))))
|
||||
|
||||
(defmethod handle-message :subscribe-team
|
||||
[{:keys [::mbus/msgbus]} {:keys [::ws/id ::ws/state ::ws/output-ch ::session-id]} {:keys [team-id] :as params}]
|
||||
@@ -179,7 +178,7 @@
|
||||
(let [message {:type :presence
|
||||
:file-id file-id
|
||||
:session-id session-id
|
||||
:profile-id profile-id}]
|
||||
:profile-id profile-id}]
|
||||
(mbus/pub! msgbus
|
||||
:topic file-id
|
||||
:message message)))
|
||||
@@ -278,23 +277,19 @@
|
||||
:inc 1)
|
||||
message)
|
||||
|
||||
(def ^:private schema:params
|
||||
(sm/define
|
||||
[:map {:title "params"}
|
||||
[:session-id ::sm/uuid]]))
|
||||
|
||||
(s/def ::session-id ::us/uuid)
|
||||
(s/def ::handler-params
|
||||
(s/keys :req-un [::session-id]))
|
||||
|
||||
(defn- http-handler
|
||||
[cfg {:keys [params ::session/profile-id] :as request}]
|
||||
(let [{:keys [session-id]} (sm/conform! schema:params params)]
|
||||
(let [{:keys [session-id]} (us/conform ::handler-params params)]
|
||||
(cond
|
||||
(not profile-id)
|
||||
(ex/raise :type :authentication
|
||||
:hint "Authentication required.")
|
||||
|
||||
;; WORKAROUND: we use the adapter specific predicate for
|
||||
;; performance reasons; for now, the ring default impl for
|
||||
;; `upgrade-request?` parses all requests headers before perform
|
||||
;; any checking.
|
||||
(not (yws/upgrade-request? request))
|
||||
(ex/raise :type :validation
|
||||
:code :websocket-request-expected
|
||||
@@ -303,13 +298,14 @@
|
||||
:else
|
||||
(do
|
||||
(l/trace :hint "websocket request" :profile-id profile-id :session-id session-id)
|
||||
{::rws/listener (ws/listener request
|
||||
::ws/on-rcv-message (partial on-rcv-message cfg)
|
||||
::ws/on-snd-message (partial on-snd-message cfg)
|
||||
::ws/on-connect (partial on-connect cfg)
|
||||
::ws/handler (partial handle-message cfg)
|
||||
::profile-id profile-id
|
||||
::session-id session-id)}))))
|
||||
(->> (ws/handler
|
||||
::ws/on-rcv-message (partial on-rcv-message cfg)
|
||||
::ws/on-snd-message (partial on-snd-message cfg)
|
||||
::ws/on-connect (partial on-connect cfg)
|
||||
::ws/handler (partial handle-message cfg)
|
||||
::profile-id profile-id
|
||||
::session-id session-id)
|
||||
(yws/upgrade request))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::mbus/msgbus
|
||||
@@ -322,4 +318,5 @@
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ cfg]
|
||||
["/ws/notifications" {:middleware [[session/authz cfg]]
|
||||
:handler (partial http-handler cfg)}])
|
||||
:handler (partial http-handler cfg)
|
||||
:allowed-methods #{:get}}])
|
||||
|
||||
@@ -9,25 +9,31 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as-alias http]
|
||||
[app.http.access-token :as-alias actoken]
|
||||
[app.http.client :as http.client]
|
||||
[app.loggers.audit.tasks :as-alias tasks]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.retry :as rtry]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[ring.request :as rreq]))
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
@@ -35,9 +41,9 @@
|
||||
|
||||
(defn parse-client-ip
|
||||
[request]
|
||||
(or (some-> (rreq/get-header request "x-forwarded-for") (str/split ",") first)
|
||||
(rreq/get-header request "x-real-ip")
|
||||
(some-> (rreq/remote-addr request) str)))
|
||||
(or (some-> (yrq/get-header request "x-forwarded-for") (str/split ",") first)
|
||||
(yrq/get-header request "x-real-ip")
|
||||
(some-> (yrq/remote-addr request) str)))
|
||||
|
||||
(defn extract-utm-params
|
||||
"Extracts additional data from params and namespace them under
|
||||
@@ -127,7 +133,7 @@
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
(cond
|
||||
(db/read-only? pool)
|
||||
(l/warn :hint "audit disabled (db is read-only)")
|
||||
(l/warn :hint "audit: disabled (db is read-only)")
|
||||
|
||||
:else
|
||||
cfg))
|
||||
@@ -181,45 +187,33 @@
|
||||
false)}))
|
||||
|
||||
(defn- handle-event!
|
||||
[cfg event]
|
||||
[conn-or-pool event]
|
||||
(us/verify! ::event event)
|
||||
(let [params {:id (uuid/next)
|
||||
:name (::name event)
|
||||
:type (::type event)
|
||||
:profile-id (::profile-id event)
|
||||
:ip-addr (::ip-addr event)
|
||||
:context (::context event)
|
||||
:props (::props event)}
|
||||
tnow (dt/now)]
|
||||
:props (::props event)}]
|
||||
|
||||
(when (contains? cf/flags :audit-log)
|
||||
;; NOTE: this operation may cause primary key conflicts on inserts
|
||||
;; because of the timestamp precission (two concurrent requests), in
|
||||
;; this case we just retry the operation.
|
||||
(let [params (-> params
|
||||
(assoc :created-at tnow)
|
||||
(assoc :tracked-at tnow)
|
||||
(update :props db/tjson)
|
||||
(update :context db/tjson)
|
||||
(update :ip-addr db/inet)
|
||||
(assoc :source "backend"))]
|
||||
(db/insert! cfg :audit-log params)))
|
||||
|
||||
(when (and (or (contains? cf/flags :telemetry)
|
||||
(cf/get :telemetry-enabled))
|
||||
(not (contains? cf/flags :audit-log)))
|
||||
;; NOTE: this operation may cause primary key conflicts on inserts
|
||||
;; because of the timestamp precission (two concurrent requests), in
|
||||
;; this case we just retry the operation.
|
||||
;;
|
||||
;; NOTE: this is only executed when general audit log is disabled
|
||||
(let [params (-> params
|
||||
(assoc :created-at tnow)
|
||||
(assoc :tracked-at tnow)
|
||||
(assoc :props (db/tjson {}))
|
||||
(assoc :context (db/tjson {}))
|
||||
(assoc :ip-addr (db/inet "0.0.0.0"))
|
||||
(assoc :source "backend"))]
|
||||
(db/insert! cfg :audit-log params)))
|
||||
(rtry/with-retry {::rtry/when rtry/conflict-exception?
|
||||
::rtry/max-retries 6
|
||||
::rtry/label "persist-audit-log"
|
||||
::db/conn (dm/check db/connection? conn-or-pool)}
|
||||
(let [now (dt/now)]
|
||||
(db/insert! conn-or-pool :audit-log
|
||||
(-> params
|
||||
(update :props db/tjson)
|
||||
(update :context db/tjson)
|
||||
(update :ip-addr db/inet)
|
||||
(assoc :created-at now)
|
||||
(assoc :tracked-at now)
|
||||
(assoc :source "backend"))))))
|
||||
|
||||
(when (and (contains? cf/flags :webhooks)
|
||||
(::webhooks/event? event))
|
||||
@@ -232,7 +226,7 @@
|
||||
:else label)
|
||||
dedupe? (boolean (and batch-key batch-timeout))]
|
||||
|
||||
(wrk/submit! ::wrk/conn (::db/conn cfg)
|
||||
(wrk/submit! ::wrk/conn conn-or-pool
|
||||
::wrk/task :process-webhook-event
|
||||
::wrk/queue :webhooks
|
||||
::wrk/max-retries 0
|
||||
@@ -249,13 +243,144 @@
|
||||
(defn submit!
|
||||
"Submit audit event to the collector."
|
||||
[cfg params]
|
||||
(try
|
||||
(let [event (d/without-nils params)
|
||||
cfg (-> cfg
|
||||
(assoc ::rtry/when rtry/conflict-exception?)
|
||||
(assoc ::rtry/max-retries 6)
|
||||
(assoc ::rtry/label "persist-audit-log"))]
|
||||
(us/verify! ::event event)
|
||||
(rtry/invoke! cfg db/tx-run! handle-event! event))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected error processing event" :cause cause))))
|
||||
(let [conn (or (::db/conn cfg) (::db/pool cfg))]
|
||||
(us/assert! ::db/pool-or-conn conn)
|
||||
(try
|
||||
(handle-event! conn (d/without-nils params))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "audit: unexpected error processing event" :cause cause)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASK: ARCHIVE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; This is a task responsible to send the accumulated events to
|
||||
;; external service for archival.
|
||||
|
||||
(declare archive-events)
|
||||
|
||||
(s/def ::tasks/uri ::us/string)
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/archive-task [_]
|
||||
(s/keys :req [::db/pool ::main/props ::http.client/client]))
|
||||
|
||||
(defmethod ig/init-key ::tasks/archive
|
||||
[_ cfg]
|
||||
(fn [params]
|
||||
;; NOTE: this let allows overwrite default configured values from
|
||||
;; the repl, when manually invoking the task.
|
||||
(let [enabled (or (contains? cf/flags :audit-log-archive)
|
||||
(:enabled params false))
|
||||
uri (cf/get :audit-log-archive-uri)
|
||||
uri (or uri (:uri params))
|
||||
cfg (assoc cfg ::uri uri)]
|
||||
|
||||
(when (and enabled (not uri))
|
||||
(ex/raise :type :internal
|
||||
:code :task-not-configured
|
||||
:hint "archive task not configured, missing uri"))
|
||||
|
||||
(when enabled
|
||||
(loop [total 0]
|
||||
(let [n (archive-events cfg)]
|
||||
(if n
|
||||
(do
|
||||
(px/sleep 100)
|
||||
(recur (+ total ^long n)))
|
||||
(when (pos? total)
|
||||
(l/debug :hint "events archived" :total total)))))))))
|
||||
|
||||
(def ^:private sql:retrieve-batch-of-audit-log
|
||||
"select *
|
||||
from audit_log
|
||||
where archived_at is null
|
||||
order by created_at asc
|
||||
limit 128
|
||||
for update skip locked;")
|
||||
|
||||
(defn archive-events
|
||||
[{:keys [::db/pool ::uri] :as cfg}]
|
||||
(letfn [(decode-row [{:keys [props ip-addr context] :as row}]
|
||||
(cond-> row
|
||||
(db/pgobject? props)
|
||||
(assoc :props (db/decode-transit-pgobject props))
|
||||
|
||||
(db/pgobject? context)
|
||||
(assoc :context (db/decode-transit-pgobject context))
|
||||
|
||||
(db/pgobject? ip-addr "inet")
|
||||
(assoc :ip-addr (db/decode-inet ip-addr))))
|
||||
|
||||
(row->event [row]
|
||||
(select-keys row [:type
|
||||
:name
|
||||
:source
|
||||
:created-at
|
||||
:tracked-at
|
||||
:profile-id
|
||||
:ip-addr
|
||||
:props
|
||||
:context]))
|
||||
|
||||
(send [events]
|
||||
(let [token (tokens/generate (::main/props cfg)
|
||||
{:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid uuid/zero})
|
||||
body (t/encode {:events events})
|
||||
headers {"content-type" "application/transit+json"
|
||||
"origin" (cf/get :public-uri)
|
||||
"cookie" (u/map->query-string {:auth-token token})}
|
||||
params {:uri uri
|
||||
:timeout 6000
|
||||
:method :post
|
||||
:headers headers
|
||||
:body body}
|
||||
resp (http.client/req! cfg params {:sync? true})]
|
||||
(if (= (:status resp) 204)
|
||||
true
|
||||
(do
|
||||
(l/error :hint "unable to archive events"
|
||||
:resp-status (:status resp)
|
||||
:resp-body (:body resp))
|
||||
false))))
|
||||
|
||||
(mark-as-archived [conn rows]
|
||||
(db/exec-one! conn ["update audit_log set archived_at=now() where id = ANY(?)"
|
||||
(->> (map :id rows)
|
||||
(into-array java.util.UUID)
|
||||
(db/create-array conn "uuid"))]))]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(let [rows (db/exec! conn [sql:retrieve-batch-of-audit-log])
|
||||
xform (comp (map decode-row)
|
||||
(map row->event))
|
||||
events (into [] xform rows)]
|
||||
(when-not (empty? events)
|
||||
(l/trace :hint "archive events chunk" :uri uri :events (count events))
|
||||
(when (send events)
|
||||
(mark-as-archived conn rows)
|
||||
(count events)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; GC Task
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private sql:clean-archived
|
||||
"delete from audit_log
|
||||
where archived_at is not null")
|
||||
|
||||
(defn- clean-archived
|
||||
[{:keys [::db/pool]}]
|
||||
(let [result (db/exec-one! pool [sql:clean-archived])
|
||||
result (:next.jdbc/update-count result)]
|
||||
(l/debug :hint "delete archived audit log entries" :deleted result)
|
||||
result))
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/gc [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::tasks/gc
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(clean-archived cfg)))
|
||||
|
||||
@@ -1,140 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.loggers.audit.archive-task
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
;; This is a task responsible to send the accumulated events to
|
||||
;; external service for archival.
|
||||
|
||||
(defn- decode-row
|
||||
[{:keys [props ip-addr context] :as row}]
|
||||
(cond-> row
|
||||
(db/pgobject? props)
|
||||
(assoc :props (db/decode-transit-pgobject props))
|
||||
|
||||
(db/pgobject? context)
|
||||
(assoc :context (db/decode-transit-pgobject context))
|
||||
|
||||
(db/pgobject? ip-addr "inet")
|
||||
(assoc :ip-addr (db/decode-inet ip-addr))))
|
||||
|
||||
(def ^:private event-keys
|
||||
[:type
|
||||
:name
|
||||
:source
|
||||
:created-at
|
||||
:tracked-at
|
||||
:profile-id
|
||||
:ip-addr
|
||||
:props
|
||||
:context])
|
||||
|
||||
(defn- row->event
|
||||
[row]
|
||||
(select-keys row event-keys))
|
||||
|
||||
(defn- send!
|
||||
[{:keys [::uri] :as cfg} events]
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
{:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid uuid/zero})
|
||||
body (t/encode {:events events})
|
||||
headers {"content-type" "application/transit+json"
|
||||
"origin" (cf/get :public-uri)
|
||||
"cookie" (u/map->query-string {:auth-token token})}
|
||||
params {:uri uri
|
||||
:timeout 12000
|
||||
:method :post
|
||||
:headers headers
|
||||
:body body}
|
||||
resp (http/req! cfg params)]
|
||||
(if (= (:status resp) 204)
|
||||
true
|
||||
(do
|
||||
(l/error :hint "unable to archive events"
|
||||
:resp-status (:status resp)
|
||||
:resp-body (:body resp))
|
||||
false))))
|
||||
|
||||
(defn- mark-archived!
|
||||
[{:keys [::db/conn]} rows]
|
||||
(let [ids (db/create-array conn "uuid" (map :id rows))]
|
||||
(db/exec-one! conn ["update audit_log set archived_at=now() where id = ANY(?)" ids])))
|
||||
|
||||
(def ^:private xf:create-event
|
||||
(comp (map decode-row)
|
||||
(map row->event)))
|
||||
|
||||
(def ^:private sql:get-audit-log-chunk
|
||||
"SELECT *
|
||||
FROM audit_log
|
||||
WHERE archived_at is null
|
||||
ORDER BY created_at ASC
|
||||
LIMIT 128
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- get-event-rows
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(->> (db/exec! conn [sql:get-audit-log-chunk])
|
||||
(not-empty)))
|
||||
|
||||
(defn- archive-events!
|
||||
[{:keys [::uri] :as cfg}]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(when-let [rows (get-event-rows cfg)]
|
||||
(let [events (into [] xf:create-event rows)]
|
||||
(l/trc :hint "archive events chunk" :uri uri :events (count events))
|
||||
(when (send! cfg events)
|
||||
(mark-archived! cfg rows)
|
||||
(count events)))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool ::setup/props ::http/client]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [params]
|
||||
;; NOTE: this let allows overwrite default configured values from
|
||||
;; the repl, when manually invoking the task.
|
||||
(let [enabled (or (contains? cf/flags :audit-log-archive)
|
||||
(:enabled params false))
|
||||
|
||||
uri (cf/get :audit-log-archive-uri)
|
||||
uri (or uri (:uri params))
|
||||
cfg (assoc cfg ::uri uri)]
|
||||
|
||||
(when (and enabled (not uri))
|
||||
(ex/raise :type :internal
|
||||
:code :task-not-configured
|
||||
:hint "archive task not configured, missing uri"))
|
||||
|
||||
(when enabled
|
||||
(loop [total 0]
|
||||
(if-let [n (archive-events! cfg)]
|
||||
(do
|
||||
(px/sleep 100)
|
||||
(recur (+ total ^long n)))
|
||||
|
||||
(when (pos? total)
|
||||
(l/dbg :hint "events archived" :total total))))))))
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.loggers.audit.gc-task
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:clean-archived
|
||||
"DELETE FROM audit_log
|
||||
WHERE archived_at IS NOT NULL")
|
||||
|
||||
(defn- clean-archived!
|
||||
[{:keys [::db/pool]}]
|
||||
(let [result (db/exec-one! pool [sql:clean-archived])
|
||||
result (db/get-update-count result)]
|
||||
(l/debug :hint "delete archived audit log entries" :deleted result)
|
||||
result))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(clean-archived! cfg)))
|
||||
@@ -39,40 +39,33 @@
|
||||
(defn record->report
|
||||
[{:keys [::l/context ::l/message ::l/props ::l/logger ::l/level ::l/cause] :as record}]
|
||||
(us/assert! ::l/record record)
|
||||
(if (or (instance? java.util.concurrent.CompletionException cause)
|
||||
(instance? java.util.concurrent.ExecutionException cause))
|
||||
(-> record
|
||||
(assoc ::trace (ex/format-throwable cause :data? false :explain? false :header? false :summary? false))
|
||||
(assoc ::l/cause (ex-cause cause))
|
||||
(record->report))
|
||||
|
||||
(let [data (ex-data cause)
|
||||
ctx (-> context
|
||||
(assoc :tenant (cf/get :tenant))
|
||||
(assoc :host (cf/get :host))
|
||||
(assoc :public-uri (cf/get :public-uri))
|
||||
(assoc :logger/name logger)
|
||||
(assoc :logger/level level)
|
||||
(dissoc :request/params :value :params :data))]
|
||||
(merge
|
||||
{:context (-> (into (sorted-map) ctx)
|
||||
(pp/pprint-str :length 50))
|
||||
:props (pp/pprint-str props :length 50)
|
||||
:hint (or (ex-message cause) @message)
|
||||
:trace (or (::trace record)
|
||||
(ex/format-throwable cause :data? false :explain? false :header? false :summary? false))}
|
||||
(let [data (ex-data cause)
|
||||
ctx (-> context
|
||||
(assoc :tenant (cf/get :tenant))
|
||||
(assoc :host (cf/get :host))
|
||||
(assoc :public-uri (cf/get :public-uri))
|
||||
(assoc :logger/name logger)
|
||||
(assoc :logger/level level)
|
||||
(dissoc :request/params :value :params :data))]
|
||||
(merge
|
||||
{:context (-> (into (sorted-map) ctx)
|
||||
(pp/pprint-str :width 200 :length 50 :level 10))
|
||||
:props (pp/pprint-str props :width 200 :length 50)
|
||||
:hint (or (ex-message cause) @message)
|
||||
:trace (ex/format-throwable cause :data? false :explain? false :header? false :summary? false)}
|
||||
|
||||
(when-let [params (or (:request/params context) (:params context))]
|
||||
{:params (pp/pprint-str params :length 30 :level 12)})
|
||||
(when-let [params (or (:request/params context) (:params context))]
|
||||
{:params (pp/pprint-str params :width 200 :length 50 :level 10)})
|
||||
|
||||
(when-let [value (:value context)]
|
||||
{:value (pp/pprint-str value :length 30 :level 12)})
|
||||
(when-let [value (:value context)]
|
||||
{:value (pp/pprint-str value :width 200 :length 50 :level 10)})
|
||||
|
||||
(when-let [data (some-> data (dissoc ::s/problems ::s/value ::s/spec ::sm/explain :hint))]
|
||||
{:data (pp/pprint-str data :length 30 :level 12)})
|
||||
(when-let [data (some-> data (dissoc ::s/problems ::s/value ::s/spec ::sm/explain :hint))]
|
||||
{:data (pp/pprint-str data :width 200)})
|
||||
|
||||
(when-let [explain (ex/explain data :length 30 :level 12)]
|
||||
{:explain explain})))))
|
||||
(when-let [explain (ex/explain data {:level 10 :length 50})]
|
||||
{:explain explain}))))
|
||||
|
||||
(defn error-record?
|
||||
[{:keys [::l/level ::l/cause]}]
|
||||
@@ -96,11 +89,11 @@
|
||||
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ cfg]
|
||||
(let [input (sp/chan :buf (sp/sliding-buffer 64)
|
||||
(let [input (sp/chan :buf (sp/sliding-buffer 32)
|
||||
:xf (filter error-record?))]
|
||||
(add-watch l/log-record ::reporter #(sp/put! input %4))
|
||||
|
||||
(px/thread {:name "penpot/database-reporter"}
|
||||
(px/thread {:name "penpot/database-reporter" :virtual true}
|
||||
(l/info :hint "initializing database error persistence")
|
||||
(try
|
||||
(loop []
|
||||
|
||||
@@ -23,20 +23,17 @@
|
||||
|
||||
(defn- send-mattermost-notification!
|
||||
[cfg {:keys [id public-uri] :as report}]
|
||||
|
||||
|
||||
(let [text (str "Exception: " public-uri "/dbg/error/" id " "
|
||||
(when-let [pid (:profile-id report)]
|
||||
(str "(pid: #uuid-" pid ")"))
|
||||
"\n"
|
||||
"- host: #" (:host report) "\n"
|
||||
"- tenant: #" (:tenant report) "\n"
|
||||
"- logger: #" (:logger report) "\n"
|
||||
"```\n"
|
||||
"- host: `" (:host report) "`\n"
|
||||
"- tenant: `" (:tenant report) "`\n"
|
||||
"- request-path: `" (:request-path report) "`\n"
|
||||
"- frontend-version: `" (:frontend-version report) "`\n"
|
||||
"- backend-version: `" (:backend-version report) "`\n"
|
||||
"\n"
|
||||
"```\n"
|
||||
"Trace:\n"
|
||||
(:trace report)
|
||||
"```")
|
||||
@@ -63,7 +60,6 @@
|
||||
:frontend-version (:version/frontend context)
|
||||
:profile-id (:request/profile-id context)
|
||||
:request-path (:request/path context)
|
||||
:logger (::l/logger record)
|
||||
:trace (ex/format-throwable cause :detail? false :header? false)})
|
||||
|
||||
(defn handle-event
|
||||
|
||||
@@ -15,9 +15,9 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.data.json :as json]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
@@ -67,10 +67,12 @@
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [event (::event props)]
|
||||
(l/dbg :hint "process webhook event" :name (:name event))
|
||||
|
||||
(l/debug :hint "process webhook event"
|
||||
:name (:name event))
|
||||
|
||||
(when-let [items (lookup-webhooks cfg event)]
|
||||
(l/trc :hint "webhooks found for event" :total (count items))
|
||||
(l/trace :hint "webhooks found for event" :total (count items))
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(doseq [item items]
|
||||
@@ -86,9 +88,11 @@
|
||||
(declare interpret-exception)
|
||||
(declare interpret-response)
|
||||
|
||||
(def json-write-opts
|
||||
{:key-fn str/camel
|
||||
:indent true})
|
||||
(def ^:private json-mapper
|
||||
(json/mapper
|
||||
{:encode-key-fn str/camel
|
||||
:decode-key-fn (comp keyword str/kebab)
|
||||
:pretty true}))
|
||||
|
||||
(defmethod ig/pre-init-spec ::run-webhook-handler [_]
|
||||
(s/keys :req [::http/client ::db/pool]))
|
||||
@@ -107,11 +111,9 @@
|
||||
" where id=?")
|
||||
err
|
||||
(:id whook)]
|
||||
res (db/exec-one! pool sql {::db/return-keys true})]
|
||||
res (db/exec-one! pool sql {::db/return-keys? true})]
|
||||
(when (>= (:error-count res) max-errors)
|
||||
(db/update! pool :webhook
|
||||
{:is-active false}
|
||||
{:id (:id whook)})))
|
||||
(db/update! pool :webhook {:is-active false} {:id (:id whook)})))
|
||||
|
||||
(db/update! pool :webhook
|
||||
{:updated-at (dt/now)
|
||||
@@ -132,15 +134,15 @@
|
||||
whook (::config props)
|
||||
|
||||
body (case (:mtype whook)
|
||||
"application/json" (json/write-str event json-write-opts)
|
||||
"application/json" (json/encode-str event json-mapper)
|
||||
"application/transit+json" (t/encode-str event)
|
||||
"application/x-www-form-urlencoded" (uri/map->query-string event))]
|
||||
|
||||
(l/dbg :hint "run webhook"
|
||||
:event-name (:name event)
|
||||
:webhook-id (:id whook)
|
||||
:webhook-uri (:uri whook)
|
||||
:webhook-mtype (:mtype whook))
|
||||
(l/debug :hint "run webhook"
|
||||
:event-name (:name event)
|
||||
:webhook-id (:id whook)
|
||||
:webhook-uri (:uri whook)
|
||||
:webhook-mtype (:mtype whook))
|
||||
|
||||
(let [req {:uri (:uri whook)
|
||||
:headers {"content-type" (:mtype whook)
|
||||
@@ -158,8 +160,8 @@
|
||||
(report-delivery! whook req nil err)
|
||||
(update-webhook! whook err)
|
||||
(when (= err "unknown")
|
||||
(l/err :hint "unknown error on webhook request"
|
||||
:cause cause))))))))))
|
||||
(l/error :hint "unknown error on webhook request"
|
||||
:cause cause))))))))))
|
||||
|
||||
(defn interpret-response
|
||||
[{:keys [status] :as response}]
|
||||
@@ -180,4 +182,5 @@
|
||||
"invalid-uri"
|
||||
|
||||
(instance? java.net.http.HttpConnectTimeoutException cause)
|
||||
"timeout"))
|
||||
"timeout"
|
||||
))
|
||||
|
||||
@@ -21,10 +21,10 @@
|
||||
[app.http.session :as-alias session]
|
||||
[app.http.session.tasks :as-alias session.tasks]
|
||||
[app.http.websocket :as http.ws]
|
||||
[app.loggers.audit.tasks :as-alias audit.tasks]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.metrics :as-alias mtx]
|
||||
[app.metrics.definition :as-alias mdef]
|
||||
[app.migrations.v2 :as migrations.v2]
|
||||
[app.msgbus :as-alias mbus]
|
||||
[app.redis :as-alias rds]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -33,18 +33,11 @@
|
||||
[app.srepl :as-alias srepl]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.fs :as-alias sto.fs]
|
||||
[app.storage.gc-deleted :as-alias sto.gc-deleted]
|
||||
[app.storage.gc-touched :as-alias sto.gc-touched]
|
||||
[app.storage.s3 :as-alias sto.s3]
|
||||
[app.svgo :as-alias svgo]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[cider.nrepl :refer [cider-nrepl-handler]]
|
||||
[clojure.test :as test]
|
||||
[clojure.tools.namespace.repl :as repl]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[nrepl.server :as nrepl]
|
||||
[promesa.exec :as px])
|
||||
(:gen-class))
|
||||
|
||||
@@ -162,6 +155,12 @@
|
||||
{::mdef/name "penpot_executors_running_threads"
|
||||
::mdef/help "Current number of threads with state RUNNING."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:executors-queued-submissions
|
||||
{::mdef/name "penpot_executors_queued_submissions"
|
||||
::mdef/help "Current number of queued submissions."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}})
|
||||
|
||||
(def system-config
|
||||
@@ -176,12 +175,13 @@
|
||||
|
||||
;; Default thread pool for IO operations
|
||||
::wrk/executor
|
||||
{}
|
||||
{::wrk/parallelism (cf/get :default-executor-parallelism
|
||||
(+ 3 (* (px/get-available-processors) 3)))}
|
||||
|
||||
::wrk/monitor
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::wrk/name "default"}
|
||||
::wrk/name "default"
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
:app.migrations/migrations
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
@@ -204,15 +204,15 @@
|
||||
:app.storage.tmp/cleaner
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::sto.gc-deleted/handler
|
||||
::sto/gc-deleted-task
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
::sto.gc-touched/handler
|
||||
::sto/gc-touched-task
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
::http.client/client
|
||||
{}
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::session/manager
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
@@ -221,14 +221,16 @@
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
::http.awsns/routes
|
||||
{::setup/props (ig/ref ::setup/props)
|
||||
{::props (ig/ref ::setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
::http.client/client (ig/ref ::http.client/client)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::http/server
|
||||
{::http/port (cf/get :http-server-port)
|
||||
::http/host (cf/get :http-server-host)
|
||||
::http/router (ig/ref ::http/router)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::http/io-threads (cf/get :http-server-io-threads)
|
||||
::http/max-body-size (cf/get :http-server-max-body-size)
|
||||
::http/max-multipart-body-size (cf/get :http-server-max-multipart-body-size)}
|
||||
@@ -262,7 +264,7 @@
|
||||
::oidc/routes
|
||||
{::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::setup/props (ig/ref ::setup/props)
|
||||
::props (ig/ref ::setup/props)
|
||||
::oidc/providers {:google (ig/ref ::oidc.providers/google)
|
||||
:github (ig/ref ::oidc.providers/github)
|
||||
:gitlab (ig/ref ::oidc.providers/gitlab)
|
||||
@@ -274,7 +276,7 @@
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::rpc/routes (ig/ref ::rpc/routes)
|
||||
::rpc.doc/routes (ig/ref ::rpc.doc/routes)
|
||||
::setup/props (ig/ref ::setup/props)
|
||||
::props (ig/ref ::setup/props)
|
||||
::mtx/routes (ig/ref ::mtx/routes)
|
||||
::oidc/routes (ig/ref ::oidc/routes)
|
||||
::http.debug/routes (ig/ref ::http.debug/routes)
|
||||
@@ -282,11 +284,11 @@
|
||||
::http.ws/routes (ig/ref ::http.ws/routes)
|
||||
::http.awsns/routes (ig/ref ::http.awsns/routes)}
|
||||
|
||||
::http.debug/routes
|
||||
:app.http.debug/routes
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::sto/storage (ig/ref ::sto/storage)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
::http.ws/routes
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
@@ -298,7 +300,8 @@
|
||||
{::http.assets/path (cf/get :assets-path)
|
||||
::http.assets/cache-max-age (dt/duration {:hours 24})
|
||||
::http.assets/cache-max-agesignature-max-age (dt/duration {:hours 24 :minutes 5})
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
::sto/storage (ig/ref ::sto/storage)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
:app.rpc/climit
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
@@ -317,12 +320,14 @@
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::mbus/msgbus (ig/ref ::mbus/msgbus)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::svgo/optimizer (ig/ref ::svgo/optimizer)
|
||||
|
||||
::rpc/climit (ig/ref ::rpc/climit)
|
||||
::rpc/rlimit (ig/ref ::rpc/rlimit)
|
||||
::setup/templates (ig/ref ::setup/templates)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
::props (ig/ref ::setup/props)
|
||||
|
||||
:pool (ig/ref ::db/pool)
|
||||
}
|
||||
|
||||
:app.rpc.doc/routes
|
||||
{:methods (ig/ref :app.rpc/methods)}
|
||||
@@ -330,24 +335,24 @@
|
||||
:app.rpc/routes
|
||||
{::rpc/methods (ig/ref :app.rpc/methods)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
::props (ig/ref ::setup/props)}
|
||||
|
||||
::wrk/registry
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/tasks
|
||||
{:sendmail (ig/ref ::email/handler)
|
||||
:objects-gc (ig/ref :app.tasks.objects-gc/handler)
|
||||
:orphan-teams-gc (ig/ref :app.tasks.orphan-teams-gc/handler)
|
||||
:file-gc (ig/ref :app.tasks.file-gc/handler)
|
||||
:file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler)
|
||||
:storage-gc-deleted (ig/ref ::sto/gc-deleted-task)
|
||||
:storage-gc-touched (ig/ref ::sto/gc-touched-task)
|
||||
:tasks-gc (ig/ref :app.tasks.tasks-gc/handler)
|
||||
:telemetry (ig/ref :app.tasks.telemetry/handler)
|
||||
:storage-gc-deleted (ig/ref ::sto.gc-deleted/handler)
|
||||
:storage-gc-touched (ig/ref ::sto.gc-touched/handler)
|
||||
:session-gc (ig/ref ::session.tasks/gc)
|
||||
:audit-log-archive (ig/ref :app.loggers.audit.archive-task/handler)
|
||||
:audit-log-gc (ig/ref :app.loggers.audit.gc-task/handler)
|
||||
:audit-log-archive (ig/ref ::audit.tasks/archive)
|
||||
:audit-log-gc (ig/ref ::audit.tasks/gc)
|
||||
|
||||
:process-webhook-event
|
||||
(ig/ref ::webhooks/process-event-handler)
|
||||
@@ -375,9 +380,6 @@
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
:app.tasks.orphan-teams-gc/handler
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
:app.tasks.file-gc/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
@@ -388,7 +390,7 @@
|
||||
:app.tasks.telemetry/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
::props (ig/ref ::setup/props)}
|
||||
|
||||
[::srepl/urepl ::srepl/server]
|
||||
{::srepl/port (cf/get :urepl-port 6062)
|
||||
@@ -402,21 +404,18 @@
|
||||
|
||||
::setup/props
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::setup/key (cf/get :secret-key)
|
||||
::key (cf/get :secret-key)
|
||||
|
||||
;; NOTE: this dependency is only necessary for proper initialization ordering, props
|
||||
;; module requires the migrations to run before initialize.
|
||||
::migrations (ig/ref :app.migrations/migrations)}
|
||||
|
||||
::svgo/optimizer
|
||||
{}
|
||||
|
||||
:app.loggers.audit.archive-task/handler
|
||||
{::setup/props (ig/ref ::setup/props)
|
||||
::audit.tasks/archive
|
||||
{::props (ig/ref ::setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
|
||||
:app.loggers.audit.gc-task/handler
|
||||
::audit.tasks/gc
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
::webhooks/process-event-handler
|
||||
@@ -435,18 +434,20 @@
|
||||
|
||||
::sto/storage
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::sto/backends
|
||||
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
:assets-fs (ig/ref [::assets :app.storage.fs/backend])}}
|
||||
|
||||
[::assets :app.storage.s3/backend]
|
||||
{::sto.s3/region (cf/get :storage-assets-s3-region)
|
||||
::sto.s3/endpoint (cf/get :storage-assets-s3-endpoint)
|
||||
::sto.s3/bucket (cf/get :storage-assets-s3-bucket)
|
||||
::sto.s3/io-threads (cf/get :storage-assets-s3-io-threads)}
|
||||
{::sto.s3/region (cf/get :storage-assets-s3-region)
|
||||
::sto.s3/endpoint (cf/get :storage-assets-s3-endpoint)
|
||||
::sto.s3/bucket (cf/get :storage-assets-s3-bucket)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
[::assets :app.storage.fs/backend]
|
||||
{::sto.fs/directory (cf/get :storage-assets-fs-directory)}})
|
||||
{::sto.fs/directory (cf/get :storage-assets-fs-directory)}
|
||||
})
|
||||
|
||||
|
||||
(def worker-config
|
||||
@@ -463,9 +464,6 @@
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :objects-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :orphan-teams-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :storage-gc-deleted}
|
||||
|
||||
@@ -494,7 +492,7 @@
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
[::default ::wrk/runner]
|
||||
[::default ::wrk/worker]
|
||||
{::wrk/parallelism (cf/get ::worker-default-parallelism 1)
|
||||
::wrk/queue :default
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
@@ -502,7 +500,7 @@
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
[::webhook ::wrk/runner]
|
||||
[::webhook ::wrk/worker]
|
||||
{::wrk/parallelism (cf/get ::worker-webhook-parallelism 1)
|
||||
::wrk/queue :webhooks
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
@@ -523,79 +521,22 @@
|
||||
(merge worker-config))
|
||||
(ig/prep)
|
||||
(ig/init))))
|
||||
(l/inf :hint "welcome to penpot"
|
||||
:flags (str/join "," (map name cf/flags))
|
||||
:worker? (contains? cf/flags :backend-worker)
|
||||
:version (:full cf/version)))
|
||||
|
||||
(defn start-custom
|
||||
[config]
|
||||
(ig/load-namespaces config)
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
(-> config
|
||||
(ig/prep)
|
||||
(ig/init)))))
|
||||
(l/info :hint "welcome to penpot"
|
||||
:flags (str/join "," (map name cf/flags))
|
||||
:worker? (contains? cf/flags :backend-worker)
|
||||
:version (:full cf/version)))
|
||||
|
||||
(defn stop
|
||||
[]
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
nil)))
|
||||
(defn restart
|
||||
[]
|
||||
(stop)
|
||||
(repl/refresh :after 'app.main/start))
|
||||
|
||||
(defn restart-all
|
||||
[]
|
||||
(stop)
|
||||
(repl/refresh-all :after 'app.main/start))
|
||||
|
||||
(defmacro run-bench
|
||||
[& exprs]
|
||||
`(do
|
||||
(require 'criterium.core)
|
||||
(criterium.core/with-progress-reporting (crit/quick-bench (do ~@exprs) :verbose))))
|
||||
|
||||
(defn run-tests
|
||||
([] (run-tests #"^backend-tests.*-test$"))
|
||||
([o]
|
||||
(repl/refresh)
|
||||
(cond
|
||||
(instance? java.util.regex.Pattern o)
|
||||
(test/run-all-tests o)
|
||||
|
||||
(symbol? o)
|
||||
(if-let [sns (namespace o)]
|
||||
(do (require (symbol sns))
|
||||
(test/test-vars [(resolve o)]))
|
||||
(test/test-ns o)))))
|
||||
|
||||
(repl/disable-reload! (find-ns 'integrant.core))
|
||||
|
||||
(defn -main
|
||||
[& _args]
|
||||
(try
|
||||
(let [p (promise)]
|
||||
(when (contains? cf/flags :nrepl-server)
|
||||
(l/inf :hint "start nrepl server" :port 6064)
|
||||
(nrepl/start-server :bind "0.0.0.0" :port 6064 :handler cider-nrepl-handler))
|
||||
|
||||
(start)
|
||||
|
||||
(when (contains? cf/flags :v2-migration)
|
||||
(px/sleep 5000)
|
||||
(migrations.v2/migrate app.main/system))
|
||||
|
||||
(deref p))
|
||||
(start)
|
||||
(catch Throwable cause
|
||||
(binding [*out* *err*]
|
||||
(println "==== ERROR ===="))
|
||||
(.printStackTrace cause)
|
||||
(when-let [cause' (ex-cause cause)]
|
||||
(binding [*out* *err*]
|
||||
(println "==== CAUSE ===="))
|
||||
(.printStackTrace cause'))
|
||||
(px/sleep 500)
|
||||
(l/error :hint (ex-message cause)
|
||||
:cause cause)
|
||||
(System/exit -1))))
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.schema.openapi :as-alias oapi]
|
||||
[app.common.spec :as us]
|
||||
[app.common.svg :as csvg]
|
||||
[app.config :as cf]
|
||||
[app.db :as-alias db]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.svg :as svg]
|
||||
[app.util.time :as dt]
|
||||
[buddy.core.bytes :as bb]
|
||||
[buddy.core.codecs :as bc]
|
||||
@@ -32,6 +32,9 @@
|
||||
org.im4java.core.IMOperation
|
||||
org.im4java.core.Info))
|
||||
|
||||
(def default-max-file-size
|
||||
(* 1024 1024 30)) ; 30 MiB
|
||||
|
||||
(s/def ::path fs/path?)
|
||||
(s/def ::filename string?)
|
||||
(s/def ::size integer?)
|
||||
@@ -80,14 +83,13 @@
|
||||
|
||||
(defn validate-media-size!
|
||||
[upload]
|
||||
(let [max-size (cf/get :media-max-file-size)]
|
||||
(when (> (:size upload) max-size)
|
||||
(ex/raise :type :restriction
|
||||
:code :media-max-file-size-reached
|
||||
:hint (str/ffmt "the uploaded file size % is greater than the maximum %"
|
||||
(:size upload)
|
||||
max-size)))
|
||||
upload))
|
||||
(when (> (:size upload) (cf/get :media-max-file-size default-max-file-size))
|
||||
(ex/raise :type :restriction
|
||||
:code :media-max-file-size-reached
|
||||
:hint (str/ffmt "the uploaded file size % is greater than the maximum %"
|
||||
(:size upload)
|
||||
default-max-file-size)))
|
||||
upload)
|
||||
|
||||
(defmulti process :cmd)
|
||||
(defmulti process-error class)
|
||||
@@ -199,7 +201,7 @@
|
||||
(us/assert ::input input)
|
||||
(let [{:keys [path mtype]} input]
|
||||
(if (= mtype "image/svg+xml")
|
||||
(let [info (some-> path slurp csvg/parse get-basic-info-from-svg)]
|
||||
(let [info (some-> path slurp svg/pre-process svg/parse get-basic-info-from-svg)]
|
||||
(when-not info
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-svg-file
|
||||
|
||||
@@ -94,7 +94,7 @@
|
||||
writer (StringWriter.)]
|
||||
(TextFormat/write004 writer samples)
|
||||
{:headers {"content-type" TextFormat/CONTENT_TYPE_004}
|
||||
:body (.toString writer)}))
|
||||
:body (.toString writer)}))
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -330,56 +330,7 @@
|
||||
{:name "0105-mod-server-error-report-table"
|
||||
:fn (mg/resource "app/migrations/sql/0105-mod-server-error-report-table.sql")}
|
||||
|
||||
{:name "0106-add-file-tagged-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0106-add-file-tagged-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0106-mod-team-table"
|
||||
:fn (mg/resource "app/migrations/sql/0106-mod-team-table.sql")}
|
||||
|
||||
{:name "0107-mod-file-tagged-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0107-mod-file-tagged-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0107-add-deletion-protection-trigger-function"
|
||||
:fn (mg/resource "app/migrations/sql/0107-add-deletion-protection-trigger-function.sql")}
|
||||
|
||||
{:name "0108-mod-file-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0108-mod-file-thumbnail-table.sql")}
|
||||
|
||||
{:name "0109-mod-file-tagged-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0109-mod-file-tagged-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0110-mod-file-media-object-table"
|
||||
:fn (mg/resource "app/migrations/sql/0110-mod-file-media-object-table.sql")}
|
||||
|
||||
{:name "0111-mod-file-data-fragment-table"
|
||||
:fn (mg/resource "app/migrations/sql/0111-mod-file-data-fragment-table.sql")}
|
||||
|
||||
{:name "0112-mod-profile-table"
|
||||
:fn (mg/resource "app/migrations/sql/0112-mod-profile-table.sql")}
|
||||
|
||||
{:name "0113-mod-team-font-variant-table"
|
||||
:fn (mg/resource "app/migrations/sql/0113-mod-team-font-variant-table.sql")}
|
||||
|
||||
{:name "0114-mod-team-table"
|
||||
:fn (mg/resource "app/migrations/sql/0114-mod-team-table.sql")}
|
||||
|
||||
{:name "0115-mod-project-table"
|
||||
:fn (mg/resource "app/migrations/sql/0115-mod-project-table.sql")}
|
||||
|
||||
{:name "0116-mod-file-table"
|
||||
:fn (mg/resource "app/migrations/sql/0116-mod-file-table.sql")}
|
||||
|
||||
{:name "0117-mod-file-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0117-mod-file-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0118-mod-task-table"
|
||||
:fn (mg/resource "app/migrations/sql/0118-mod-task-table.sql")}
|
||||
|
||||
{:name "0119-mod-file-table"
|
||||
:fn (mg/resource "app/migrations/sql/0119-mod-file-table.sql")}
|
||||
|
||||
{:name "0120-mod-audit-log-table"
|
||||
:fn (mg/resource "app/migrations/sql/0120-mod-audit-log-table.sql")}])
|
||||
])
|
||||
|
||||
(defn apply-migrations!
|
||||
[pool name migrations]
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
CREATE TABLE file_tagged_object_thumbnail (
|
||||
file_id uuid NOT NULL REFERENCES file(id) ON DELETE CASCADE DEFERRABLE,
|
||||
tag text DEFAULT 'frame',
|
||||
object_id text NOT NULL,
|
||||
|
||||
media_id uuid NOT NULL REFERENCES storage_object(id) ON DELETE CASCADE DEFERRABLE,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
|
||||
PRIMARY KEY(file_id, tag, object_id)
|
||||
);
|
||||
@@ -1 +0,0 @@
|
||||
ALTER TABLE team ADD COLUMN features text[] NULL DEFAULT null;
|
||||
@@ -1,8 +0,0 @@
|
||||
CREATE OR REPLACE FUNCTION raise_deletion_protection()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
RAISE EXCEPTION 'unable to proceed to delete row on "%"', TG_TABLE_NAME
|
||||
USING HINT = 'disable deletion protection with "SET rules.deletion_protection TO off"';
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
@@ -1,2 +0,0 @@
|
||||
CREATE INDEX file_tagged_object_thumbnail__media_id__idx
|
||||
ON file_tagged_object_thumbnail (media_id);
|
||||
@@ -1,25 +0,0 @@
|
||||
--- Add missing index for deleted_at column, we include all related
|
||||
--- columns because we expect the index to be small and expect use
|
||||
--- index-only scans.
|
||||
CREATE INDEX IF NOT EXISTS file_thumbnail__deleted_at__idx
|
||||
ON file_thumbnail (deleted_at, file_id, revn, media_id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
--- Add missing for media_id column, used mainly for refs checking
|
||||
CREATE INDEX IF NOT EXISTS file_thumbnail__media_id__idx ON file_thumbnail (media_id);
|
||||
|
||||
--- Remove CASCADE from media_id and file_id foreign constraint
|
||||
ALTER TABLE file_thumbnail
|
||||
DROP CONSTRAINT file_thumbnail_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE;
|
||||
|
||||
ALTER TABLE file_thumbnail
|
||||
DROP CONSTRAINT file_thumbnail_media_id_fkey,
|
||||
ADD FOREIGN KEY (media_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON file_thumbnail FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
@@ -1,26 +0,0 @@
|
||||
ALTER TABLE file_tagged_object_thumbnail
|
||||
ADD COLUMN updated_at timestamptz NULL,
|
||||
ADD COLUMN deleted_at timestamptz NULL;
|
||||
|
||||
--- Add index for deleted_at column, we include all related columns
|
||||
--- because we expect the index to be small and expect use index-only
|
||||
--- scans.
|
||||
CREATE INDEX IF NOT EXISTS file_tagged_object_thumbnail__deleted_at__idx
|
||||
ON file_tagged_object_thumbnail (deleted_at, file_id, object_id, media_id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
--- Remove CASCADE from media_id and file_id foreign constraint
|
||||
ALTER TABLE file_tagged_object_thumbnail
|
||||
DROP CONSTRAINT file_tagged_object_thumbnail_media_id_fkey,
|
||||
ADD FOREIGN KEY (media_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
|
||||
ALTER TABLE file_tagged_object_thumbnail
|
||||
DROP CONSTRAINT file_tagged_object_thumbnail_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON file_tagged_object_thumbnail FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
@@ -1,27 +0,0 @@
|
||||
--- Fix legacy naming
|
||||
ALTER INDEX media_object_pkey RENAME TO file_media_object_pkey;
|
||||
ALTER INDEX media_object__file_id__idx RENAME TO file_media_object__file_id__idx;
|
||||
|
||||
--- Create index for the deleted_at column
|
||||
CREATE INDEX IF NOT EXISTS file_media_object__deleted_at__idx
|
||||
ON file_media_object (deleted_at, id, media_id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
--- Drop now unnecesary trigger because this will be handled by the
|
||||
--- application code
|
||||
DROP TRIGGER file_media_object__on_delete__tgr ON file_media_object;
|
||||
DROP FUNCTION on_delete_file_media_object ( ) CASCADE;
|
||||
DROP TRIGGER file_media_object__on_insert__tgr ON file_media_object;
|
||||
DROP FUNCTION on_media_object_insert () CASCADE;
|
||||
|
||||
--- Remove CASCADE from file FOREIGN KEY
|
||||
ALTER TABLE file_media_object
|
||||
DROP CONSTRAINT file_media_object_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON file_media_object FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
@@ -1,9 +0,0 @@
|
||||
ALTER TABLE file_data_fragment
|
||||
ADD COLUMN deleted_at timestamptz NULL;
|
||||
|
||||
--- Add index for deleted_at column, we include all related columns
|
||||
--- because we expect the index to be small and expect use index-only
|
||||
--- scans.
|
||||
CREATE INDEX IF NOT EXISTS file_data_fragment__deleted_at__idx
|
||||
ON file_data_fragment (deleted_at, file_id, id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
@@ -1,15 +0,0 @@
|
||||
ALTER TABLE profile
|
||||
DROP CONSTRAINT profile_photo_id_fkey,
|
||||
ADD FOREIGN KEY (photo_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT profile_default_project_id_fkey,
|
||||
ADD FOREIGN KEY (default_project_id) REFERENCES project(id) DEFERRABLE,
|
||||
DROP CONSTRAINT profile_default_team_id_fkey,
|
||||
ADD FOREIGN KEY (default_team_id) REFERENCES team(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON profile FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
--- Remove ON DELETE SET NULL from foreign constraint on
|
||||
--- storage_object table
|
||||
ALTER TABLE team_font_variant
|
||||
DROP CONSTRAINT team_font_variant_otf_file_id_fkey,
|
||||
ADD FOREIGN KEY (otf_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_ttf_file_id_fkey,
|
||||
ADD FOREIGN KEY (ttf_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_woff1_file_id_fkey,
|
||||
ADD FOREIGN KEY (woff1_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_woff2_file_id_fkey,
|
||||
ADD FOREIGN KEY (woff2_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_team_id_fkey,
|
||||
ADD FOREIGN KEY (team_id) REFERENCES team(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON team_font_variant FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
@@ -1,10 +0,0 @@
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON team FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
|
||||
ALTER TABLE team
|
||||
DROP CONSTRAINT team_photo_id_fkey,
|
||||
ADD FOREIGN KEY (photo_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
@@ -1,3 +0,0 @@
|
||||
ALTER TABLE project
|
||||
DROP CONSTRAINT project_team_id_fkey,
|
||||
ADD FOREIGN KEY (team_id) REFERENCES team(id) DEFERRABLE;
|
||||
@@ -1,3 +0,0 @@
|
||||
ALTER TABLE file
|
||||
DROP CONSTRAINT file_project_id_fkey,
|
||||
ADD FOREIGN KEY (project_id) REFERENCES project(id) DEFERRABLE;
|
||||
@@ -1,12 +0,0 @@
|
||||
ALTER TABLE file_object_thumbnail
|
||||
DROP CONSTRAINT file_object_thumbnail_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE,
|
||||
DROP CONSTRAINT file_object_thumbnail_media_id_fkey,
|
||||
ADD FOREIGN KEY (media_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
|
||||
--- Mark all related storage_object row as touched
|
||||
-- UPDATE storage_object SET touched_at = now()
|
||||
-- WHERE id IN (SELECT DISTINCT media_id
|
||||
-- FROM file_object_thumbnail
|
||||
-- WHERE media_id IS NOT NULL)
|
||||
-- AND touched_at IS NULL;
|
||||
@@ -1,12 +0,0 @@
|
||||
-- Removes the partitioning.
|
||||
CREATE TABLE new_task (LIKE task INCLUDING ALL);
|
||||
INSERT INTO new_task SELECT * FROM task;
|
||||
ALTER TABLE task RENAME TO old_task;
|
||||
ALTER TABLE new_task RENAME TO task;
|
||||
DROP TABLE old_task;
|
||||
ALTER INDEX new_task_label_name_queue_idx RENAME TO task__label_name_queue__idx;
|
||||
ALTER INDEX new_task_scheduled_at_queue_idx RENAME TO task__scheduled_at_queue__idx;
|
||||
ALTER TABLE task DROP CONSTRAINT new_task_pkey;
|
||||
ALTER TABLE task ADD PRIMARY KEY (id);
|
||||
ALTER TABLE task ALTER COLUMN created_at SET DEFAULT now();
|
||||
ALTER TABLE task ALTER COLUMN modified_at SET DEFAULT now();
|
||||
@@ -1,2 +0,0 @@
|
||||
ALTER TABLE file
|
||||
ADD COLUMN version integer NULL;
|
||||
@@ -1,11 +0,0 @@
|
||||
CREATE TABLE new_audit_log (LIKE audit_log INCLUDING ALL);
|
||||
INSERT INTO new_audit_log SELECT * FROM audit_log;
|
||||
ALTER TABLE audit_log RENAME TO old_audit_log;
|
||||
ALTER TABLE new_audit_log RENAME TO audit_log;
|
||||
DROP TABLE old_audit_log;
|
||||
|
||||
DROP INDEX new_audit_log_id_archived_at_idx;
|
||||
ALTER TABLE audit_log DROP CONSTRAINT new_audit_log_pkey;
|
||||
ALTER TABLE audit_log ADD PRIMARY KEY (id);
|
||||
ALTER TABLE audit_log ALTER COLUMN created_at SET DEFAULT now();
|
||||
ALTER TABLE audit_log ALTER COLUMN tracked_at SET DEFAULT now();
|
||||
@@ -1,103 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.migrations.v2
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.features.components-v2 :as feat]
|
||||
[app.setup :as setup]
|
||||
[app.util.time :as dt]))
|
||||
|
||||
(def ^:private sql:get-teams
|
||||
"SELECT id, features,
|
||||
row_number() OVER (ORDER BY created_at DESC) AS rown
|
||||
FROM team
|
||||
WHERE deleted_at IS NULL
|
||||
AND (not (features @> '{components/v2}') OR features IS NULL)
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(defn- get-teams
|
||||
[conn]
|
||||
(->> (db/cursor conn [sql:get-teams] {:chunk-size 1})
|
||||
(map feat/decode-row)))
|
||||
|
||||
(defn- migrate-teams
|
||||
[{:keys [::db/conn] :as system}]
|
||||
;; Allow long running transaction for this connection
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
|
||||
;; Do not allow other migration running in the same time
|
||||
(db/xact-lock! conn 0)
|
||||
|
||||
;; Run teams migration
|
||||
(run! (fn [{:keys [id rown]}]
|
||||
(try
|
||||
(-> (assoc system ::db/rollback false)
|
||||
(feat/migrate-team! id
|
||||
:rown rown
|
||||
:label "v2-migration"
|
||||
:validate? false
|
||||
:skip-on-graphics-error? true))
|
||||
(catch Throwable _
|
||||
(swap! feat/*stats* update :errors (fnil inc 0))
|
||||
(l/wrn :hint "error on migrating team (skiping)"))))
|
||||
(get-teams conn))
|
||||
|
||||
(setup/set-prop! system :v2-migrated true))
|
||||
|
||||
(defn migrate
|
||||
[system]
|
||||
(let [tpoint (dt/tpoint)
|
||||
stats (atom {})
|
||||
migrated? (setup/get-prop system :v2-migrated false)]
|
||||
|
||||
(when-not migrated?
|
||||
(l/inf :hint "v2 migration started")
|
||||
(try
|
||||
(binding [feat/*stats* stats]
|
||||
(db/tx-run! system migrate-teams))
|
||||
|
||||
(let [stats (deref stats)
|
||||
elapsed (dt/format-duration (tpoint))]
|
||||
(l/inf :hint "v2 migration finished"
|
||||
:files (:processed-files stats)
|
||||
:teams (:processed-teams stats)
|
||||
:errors (:errors stats)
|
||||
:elapsed elapsed))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/err :hint "error on aplying v2 migration" :cause cause))))))
|
||||
|
||||
(def ^:private required-services
|
||||
[[:app.main/assets :app.storage.s3/backend]
|
||||
[:app.main/assets :app.storage.fs/backend]
|
||||
:app.storage/storage
|
||||
:app.db/pool
|
||||
:app.setup/props
|
||||
:app.svgo/optimizer
|
||||
:app.metrics/metrics
|
||||
:app.migrations/migrations
|
||||
:app.http.client/client])
|
||||
|
||||
(defn -main
|
||||
[& _args]
|
||||
(try
|
||||
(let [config-var (requiring-resolve 'app.main/system-config)
|
||||
start-var (requiring-resolve 'app.main/start-custom)
|
||||
stop-var (requiring-resolve 'app.main/stop)
|
||||
system-var (requiring-resolve 'app.main/system)
|
||||
config (select-keys @config-var required-services)]
|
||||
|
||||
(start-var config)
|
||||
(migrate @system-var)
|
||||
(stop-var)
|
||||
(System/exit 0))
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause)
|
||||
(flush)
|
||||
(System/exit -1))))
|
||||
@@ -91,7 +91,7 @@
|
||||
(s/def ::connect? ::us/boolean)
|
||||
(s/def ::io-threads ::us/integer)
|
||||
(s/def ::worker-threads ::us/integer)
|
||||
(s/def ::cache cache/cache?)
|
||||
(s/def ::cache some?)
|
||||
|
||||
(s/def ::redis
|
||||
(s/keys :req [::resources
|
||||
@@ -168,7 +168,7 @@
|
||||
|
||||
(defn- shutdown-resources
|
||||
[{:keys [::resources ::cache ::timer]}]
|
||||
(cache/invalidate! cache)
|
||||
(cache/invalidate-all! cache)
|
||||
|
||||
(when resources
|
||||
(.shutdown ^ClientResources resources))
|
||||
@@ -211,8 +211,7 @@
|
||||
(defn get-or-connect
|
||||
[{:keys [::cache] :as state} key options]
|
||||
(us/assert! ::redis state)
|
||||
(let [create (fn [_] (connect* state options))
|
||||
connection (cache/get cache key create)]
|
||||
(let [connection (cache/get cache key (fn [_] (connect* state options)))]
|
||||
(-> state
|
||||
(dissoc ::cache)
|
||||
(assoc ::connection connection))))
|
||||
|
||||
@@ -27,16 +27,15 @@
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as-alias sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]))
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
|
||||
@@ -59,45 +58,36 @@
|
||||
|
||||
(defn- handle-response
|
||||
[request result]
|
||||
(let [mdata (meta result)
|
||||
response (if (fn? result)
|
||||
(result request)
|
||||
(let [result (rph/unwrap result)]
|
||||
{::rres/status (::http/status mdata 200)
|
||||
::rres/headers (::http/headers mdata {})
|
||||
::rres/body result}))]
|
||||
(-> response
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata))))
|
||||
(if (fn? result)
|
||||
(result request)
|
||||
(let [mdata (meta result)]
|
||||
(-> {::yrs/status (::http/status mdata 200)
|
||||
::yrs/headers (::http/headers mdata {})
|
||||
::yrs/body (rph/unwrap result)}
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata)))))
|
||||
|
||||
(defn- rpc-handler
|
||||
"Ring handler that dispatches cmd requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [params path-params method] :as request}]
|
||||
(let [handler-name (:type path-params)
|
||||
etag (rreq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
[methods {:keys [params path-params] :as request}]
|
||||
(let [type (keyword (:type path-params))
|
||||
etag (yrq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
|
||||
data (-> params
|
||||
(assoc ::request-at (dt/now))
|
||||
(assoc ::session/id (::session/id request))
|
||||
(assoc ::cond/key etag)
|
||||
(cond-> (uuid? profile-id)
|
||||
(assoc ::profile-id profile-id)))
|
||||
data (-> params
|
||||
(assoc ::request-at (dt/now))
|
||||
(assoc ::session/id (::session/id request))
|
||||
(assoc ::cond/key etag)
|
||||
(cond-> (uuid? profile-id)
|
||||
(assoc ::profile-id profile-id)))
|
||||
|
||||
data (vary-meta data assoc ::http/request request)
|
||||
handler-fn (get methods (keyword handler-name) default-handler)]
|
||||
|
||||
(when (and (or (= method :get)
|
||||
(= method :head))
|
||||
(not (str/starts-with? handler-name "get-")))
|
||||
(ex/raise :type :restriction
|
||||
:code :method-not-allowed
|
||||
:hint "method not allowed for this request"))
|
||||
data (vary-meta data assoc ::http/request request)
|
||||
method (get methods type default-handler)]
|
||||
|
||||
(binding [cond/*enabled* true]
|
||||
(let [response (handler-fn data)]
|
||||
(let [response (method data)]
|
||||
(handle-response request response)))))
|
||||
|
||||
(defn- wrap-metrics
|
||||
@@ -151,33 +141,31 @@
|
||||
(defn- wrap-params-validation
|
||||
[_ f mdata]
|
||||
(if-let [schema (::sm/params mdata)]
|
||||
(let [validate (sm/validator schema)
|
||||
explain (sm/explainer schema)
|
||||
decode (sm/decoder schema)]
|
||||
(let [schema (sm/schema schema)
|
||||
valid? (sm/validator schema)
|
||||
explain (sm/explainer schema)
|
||||
decode (sm/decoder schema sm/default-transformer)]
|
||||
|
||||
(fn [cfg params]
|
||||
(let [params (decode params)]
|
||||
(if (validate params)
|
||||
(if (valid? params)
|
||||
(f cfg params)
|
||||
|
||||
(let [params (d/without-qualified params)]
|
||||
(ex/raise :type :validation
|
||||
:code :params-validation
|
||||
::sm/explain (explain params)))))))
|
||||
(ex/raise :type :validation
|
||||
:code :params-validation
|
||||
::sm/explain (explain params))))))
|
||||
f))
|
||||
|
||||
(defn- wrap-output-validation
|
||||
[_ f mdata]
|
||||
(if (contains? cf/flags :rpc-output-validation)
|
||||
(or (when-let [schema (::sm/result mdata)]
|
||||
(let [schema (if (sm/lazy-schema? schema)
|
||||
schema
|
||||
(sm/define schema))
|
||||
validate (sm/validator schema)
|
||||
explain (sm/explainer schema)]
|
||||
(let [schema (sm/schema schema)
|
||||
valid? (sm/validator schema)
|
||||
explain (sm/explainer schema)]
|
||||
(fn [cfg params]
|
||||
(let [response (f cfg params)]
|
||||
(when (map? response)
|
||||
(when-not (validate response)
|
||||
(when-not (valid? response)
|
||||
(ex/raise :type :validation
|
||||
:code :data-validation
|
||||
::sm/explain (explain response))))
|
||||
@@ -201,7 +189,7 @@
|
||||
|
||||
(defn- wrap
|
||||
[cfg f mdata]
|
||||
(l/trc :hint "register method" :name (::sv/name mdata))
|
||||
(l/debug :hint "register method" :name (::sv/name mdata))
|
||||
(let [f (wrap-all cfg f mdata)]
|
||||
(partial f cfg)))
|
||||
|
||||
@@ -249,9 +237,11 @@
|
||||
::ldap/provider
|
||||
::sto/storage
|
||||
::mtx/metrics
|
||||
::setup/props]
|
||||
::main/props
|
||||
::wrk/executor]
|
||||
:opt [::climit
|
||||
::rlimit]))
|
||||
::rlimit]
|
||||
:req-un [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::methods
|
||||
[_ cfg]
|
||||
@@ -266,7 +256,8 @@
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::methods
|
||||
::db/pool
|
||||
::setup/props
|
||||
::main/props
|
||||
::wrk/executor
|
||||
::session/manager]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
|
||||
@@ -20,35 +20,35 @@
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.edn :as edn]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.bulkhead :as pbh])
|
||||
(:import
|
||||
clojure.lang.ExceptionInfo
|
||||
java.util.concurrent.atomic.AtomicLong))
|
||||
clojure.lang.ExceptionInfo))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defn- id->str
|
||||
([id]
|
||||
(-> (str id)
|
||||
(subs 1)))
|
||||
([id key]
|
||||
(if key
|
||||
(str (-> (str id) (subs 1)) "/" key)
|
||||
(id->str id))))
|
||||
(defn- create-bulkhead-cache
|
||||
[{:keys [::wrk/executor]} config]
|
||||
(letfn [(load-fn [key]
|
||||
(let [config (get config (nth key 0))]
|
||||
(l/trace :hint "insert into cache" :key key)
|
||||
(pbh/create :permits (or (:permits config) (:concurrency config))
|
||||
:queue (or (:queue config) (:queue-size config))
|
||||
:timeout (:timeout config)
|
||||
:executor executor
|
||||
:type (:type config :semaphore))))
|
||||
|
||||
(defn- create-cache
|
||||
[{:keys [::wrk/executor]}]
|
||||
(letfn [(on-remove [key _ cause]
|
||||
(let [[id skey] key]
|
||||
(l/trc :hint "disposed" :id (id->str id skey) :reason (str cause))))]
|
||||
(cache/create :executor executor
|
||||
(on-remove [_ _ cause]
|
||||
(l/trace :hint "evict from cache" :key key :reason (str cause)))]
|
||||
|
||||
(cache/create :executor :same-thread
|
||||
:on-remove on-remove
|
||||
:keepalive "5m")))
|
||||
:keepalive "5m"
|
||||
:load-fn load-fn)))
|
||||
|
||||
(s/def ::config/permits ::us/integer)
|
||||
(s/def ::config/queue ::us/integer)
|
||||
@@ -65,210 +65,160 @@
|
||||
|
||||
(s/def ::path ::fs/path)
|
||||
(defmethod ig/pre-init-spec ::rpc/climit [_]
|
||||
(s/keys :req [::mtx/metrics ::wrk/executor ::path]))
|
||||
(s/keys :req [::wrk/executor ::mtx/metrics ::path]))
|
||||
|
||||
(defmethod ig/init-key ::rpc/climit
|
||||
[_ {:keys [::path ::mtx/metrics] :as cfg}]
|
||||
[_ {:keys [::path ::mtx/metrics ::wrk/executor] :as cfg}]
|
||||
(when (contains? cf/flags :rpc-climit)
|
||||
(when-let [params (some->> path slurp edn/read-string)]
|
||||
(l/inf :hint "initializing concurrency limit" :config (str path))
|
||||
(l/info :hint "initializing concurrency limit" :config (str path))
|
||||
(us/verify! ::config params)
|
||||
{::cache (create-cache cfg)
|
||||
{::cache (create-bulkhead-cache cfg params)
|
||||
::config params
|
||||
::wrk/executor executor
|
||||
::mtx/metrics metrics})))
|
||||
|
||||
(s/def ::cache cache/cache?)
|
||||
(s/def ::instance
|
||||
(s/keys :req [::cache ::config]))
|
||||
(s/keys :req [::cache ::config ::wrk/executor]))
|
||||
|
||||
(s/def ::rpc/climit
|
||||
(s/nilable ::instance))
|
||||
|
||||
(defn- create-limiter
|
||||
[config [id skey]]
|
||||
(l/trc :hint "created" :id (id->str id skey))
|
||||
(pbh/create :permits (or (:permits config) (:concurrency config))
|
||||
:queue (or (:queue config) (:queue-size config))
|
||||
:timeout (:timeout config)
|
||||
:type :semaphore))
|
||||
|
||||
|
||||
(defn measure!
|
||||
[metrics mlabels stats elapsed]
|
||||
(let [mpermits (:max-permits stats)
|
||||
permits (:permits stats)
|
||||
queue (:queue stats)
|
||||
queue (- queue mpermits)
|
||||
queue (if (neg? queue) 0 queue)]
|
||||
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue
|
||||
:val queue
|
||||
:labels mlabels)
|
||||
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-permits
|
||||
:val permits
|
||||
:labels mlabels)
|
||||
|
||||
(when elapsed
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-timing
|
||||
:val (inst-ms elapsed)
|
||||
:labels mlabels))))
|
||||
|
||||
(defn log!
|
||||
[action req-id stats limit-id limit-label params elapsed]
|
||||
(let [mpermits (:max-permits stats)
|
||||
queue (:queue stats)
|
||||
queue (- queue mpermits)
|
||||
queue (if (neg? queue) 0 queue)
|
||||
level (if (pos? queue) :warn :trace)]
|
||||
|
||||
(l/log level
|
||||
:hint action
|
||||
:req req-id
|
||||
:id limit-id
|
||||
:label limit-label
|
||||
:queue queue
|
||||
:elapsed (some-> elapsed dt/format-duration)
|
||||
:params (-> (select-keys params [::rpc/profile-id :file-id :profile-id])
|
||||
(set/rename-keys {::rpc/profile-id :profile-id})
|
||||
(update-vals str)))))
|
||||
|
||||
(def ^:private idseq (AtomicLong. 0))
|
||||
|
||||
(defn- invoke
|
||||
[limiter metrics limit-id limit-key limit-label handler params]
|
||||
(let [tpoint (dt/tpoint)
|
||||
mlabels (into-array String [(id->str limit-id)])
|
||||
limit-id (id->str limit-id limit-key)
|
||||
stats (pbh/get-stats limiter)
|
||||
req-id (.incrementAndGet ^AtomicLong idseq)]
|
||||
|
||||
(try
|
||||
(measure! metrics mlabels stats nil)
|
||||
(log! "enqueued" req-id stats limit-id limit-label params nil)
|
||||
(px/invoke! limiter (fn []
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
|
||||
(measure! metrics mlabels stats elapsed)
|
||||
(log! "acquired" req-id stats limit-id limit-label params elapsed)
|
||||
|
||||
(handler params))))
|
||||
|
||||
(catch ExceptionInfo cause
|
||||
(let [{:keys [type code]} (ex-data cause)]
|
||||
(if (= :bulkhead-error type)
|
||||
(let [elapsed (tpoint)]
|
||||
(log! "rejected" req-id stats limit-id limit-label params elapsed)
|
||||
(ex/raise :type :concurrency-limit
|
||||
:code code
|
||||
:hint "concurrency limit reached"
|
||||
:cause cause))
|
||||
(throw cause))))
|
||||
|
||||
(finally
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
|
||||
(measure! metrics mlabels stats nil)
|
||||
(log! "finished" req-id stats limit-id limit-label params elapsed))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MIDDLEWARE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private noop-fn (constantly nil))
|
||||
(def ^:private global-limits
|
||||
[[:root/global noop-fn]
|
||||
[:root/by-profile ::rpc/profile-id]])
|
||||
|
||||
(defn- get-limits
|
||||
[cfg]
|
||||
(when-let [ref (get cfg ::id)]
|
||||
(cond
|
||||
(keyword? ref)
|
||||
[[ref]]
|
||||
|
||||
(and (vector? ref)
|
||||
(keyword (first ref)))
|
||||
[ref]
|
||||
|
||||
(and (vector? ref)
|
||||
(vector? (first ref)))
|
||||
(rseq ref)
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "unable to normalize limit")))))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/climit ::mtx/metrics]} handler mdata]
|
||||
(let [cache (::cache climit)
|
||||
config (::config climit)
|
||||
label (::sv/name mdata)]
|
||||
|
||||
(if climit
|
||||
(reduce (fn [handler [limit-id key-fn]]
|
||||
(if-let [config (get config limit-id)]
|
||||
(let [key-fn (or key-fn noop-fn)]
|
||||
(l/trc :hint "instrumenting method"
|
||||
:method label
|
||||
:limit (id->str limit-id)
|
||||
:timeout (:timeout config)
|
||||
:permits (:permits config)
|
||||
:queue (:queue config)
|
||||
:keyed (not= key-fn noop-fn))
|
||||
|
||||
(if (and (= key-fn ::rpc/profile-id)
|
||||
(false? (::rpc/auth mdata true)))
|
||||
|
||||
;; We don't enforce by-profile limit on methods that does
|
||||
;; not require authentication
|
||||
handler
|
||||
|
||||
(fn [cfg params]
|
||||
(let [limit-key (key-fn params)
|
||||
cache-key [limit-id limit-key]
|
||||
limiter (cache/get cache cache-key (partial create-limiter config))
|
||||
handler (partial handler cfg)]
|
||||
(invoke limiter metrics limit-id limit-key label handler params)))))
|
||||
|
||||
(do
|
||||
(l/wrn :hint "no config found for specified queue" :id (id->str limit-id))
|
||||
handler)))
|
||||
|
||||
handler
|
||||
(concat global-limits (get-limits mdata)))
|
||||
handler)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- build-exec-chain
|
||||
[{:keys [::label ::rpc/climit ::mtx/metrics] :as cfg} f]
|
||||
(let [config (get climit ::config)
|
||||
cache (get climit ::cache)]
|
||||
(reduce (fn [handler [limit-id limit-key :as ckey]]
|
||||
(if-let [config (get config limit-id)]
|
||||
(fn [cfg params]
|
||||
(let [limiter (cache/get cache ckey (partial create-limiter config))
|
||||
handler (partial handler cfg)]
|
||||
(invoke limiter metrics limit-id limit-key label handler params)))
|
||||
(do
|
||||
(l/wrn :hint "config not found" :label label :id limit-id)
|
||||
f)))
|
||||
f
|
||||
(get-limits cfg))))
|
||||
|
||||
(defn invoke!
|
||||
"Run a function in context of climit.
|
||||
Intended to be used in virtual threads."
|
||||
[{:keys [::executor] :as cfg} f params]
|
||||
(let [f (if (some? executor)
|
||||
(fn [cfg params] (px/await! (px/submit! executor (fn [] (f cfg params)))))
|
||||
f)
|
||||
f (build-exec-chain cfg f)]
|
||||
(f cfg params)))
|
||||
[cache metrics id key f]
|
||||
(let [limiter (cache/get cache [id key])
|
||||
tpoint (dt/tpoint)
|
||||
labels (into-array String [(name id)])
|
||||
|
||||
wrapped
|
||||
(fn []
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
(l/trace :hint "executed"
|
||||
:id (name id)
|
||||
:key key
|
||||
:fnh (hash f)
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats)
|
||||
:elapsed (dt/format-duration elapsed))
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-timing
|
||||
:val (inst-ms elapsed)
|
||||
:labels labels)
|
||||
(try
|
||||
(f)
|
||||
(finally
|
||||
(let [elapsed (tpoint)]
|
||||
(l/trace :hint "finished"
|
||||
:id (name id)
|
||||
:key key
|
||||
:fnh (hash f)
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
measure!
|
||||
(fn [stats]
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue
|
||||
:val (:queue stats)
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-permits
|
||||
:val (:permits stats)
|
||||
:labels labels))]
|
||||
|
||||
(try
|
||||
(let [stats (pbh/get-stats limiter)]
|
||||
(measure! stats)
|
||||
(l/trace :hint "enqueued"
|
||||
:id (name id)
|
||||
:key key
|
||||
:fnh (hash f)
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats))
|
||||
(pbh/invoke! limiter wrapped))
|
||||
(catch ExceptionInfo cause
|
||||
(let [{:keys [type code]} (ex-data cause)]
|
||||
(if (= :bulkhead-error type)
|
||||
(ex/raise :type :concurrency-limit
|
||||
:code code
|
||||
:hint "concurrency limit reached")
|
||||
(throw cause))))
|
||||
|
||||
(finally
|
||||
(measure! (pbh/get-stats limiter))))))
|
||||
|
||||
|
||||
(defn run!
|
||||
[{:keys [::id ::cache ::mtx/metrics]} f]
|
||||
(if (and cache id)
|
||||
(invoke! cache metrics id nil f)
|
||||
(f)))
|
||||
|
||||
(defn submit!
|
||||
[{:keys [::id ::cache ::wrk/executor ::mtx/metrics]} f]
|
||||
(let [f (partial px/submit! executor (px/wrap-bindings f))]
|
||||
(if (and cache id)
|
||||
(p/await! (invoke! cache metrics id nil f))
|
||||
(p/await! (f)))))
|
||||
|
||||
(defn configure
|
||||
([{:keys [::rpc/climit]} id]
|
||||
(us/assert! ::rpc/climit climit)
|
||||
(assoc climit ::id id))
|
||||
([{:keys [::rpc/climit]} id executor]
|
||||
(us/assert! ::rpc/climit climit)
|
||||
(-> climit
|
||||
(assoc ::id id)
|
||||
(assoc ::wrk/executor executor))))
|
||||
|
||||
(defmacro with-dispatch!
|
||||
"Dispatch blocking operation to a separated thread protected with the
|
||||
specified concurrency limiter. If climit is not active, the function
|
||||
will be scheduled to execute without concurrency monitoring."
|
||||
[instance & body]
|
||||
(if (vector? instance)
|
||||
`(-> (app.rpc.climit/configure ~@instance)
|
||||
(app.rpc.climit/run! (^:once fn* [] ~@body)))
|
||||
`(run! ~instance (^:once fn* [] ~@body))))
|
||||
|
||||
(defmacro with-dispatch
|
||||
"Dispatch blocking operation to a separated thread protected with
|
||||
the specified semaphore.
|
||||
DEPRECATED"
|
||||
[& params]
|
||||
`(with-dispatch! ~@params))
|
||||
|
||||
(def noop-fn (constantly nil))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/climit ::mtx/metrics]} f {:keys [::id ::key-fn] :or {key-fn noop-fn} :as mdata}]
|
||||
(if (and (some? climit) (some? id))
|
||||
(if-let [config (get-in climit [::config id])]
|
||||
(let [cache (::cache climit)]
|
||||
(l/debug :hint "wrap: instrumenting method"
|
||||
:limit (name id)
|
||||
:service-name (::sv/name mdata)
|
||||
:timeout (:timeout config)
|
||||
:permits (:permits config)
|
||||
:queue (:queue config)
|
||||
:keyed? (some? key-fn))
|
||||
(fn [cfg params]
|
||||
(invoke! cache metrics id (key-fn params) (partial f cfg params))))
|
||||
|
||||
(do
|
||||
(l/warn :hint "no config found for specified queue" :id id)
|
||||
f))
|
||||
|
||||
f))
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
@@ -24,7 +23,7 @@
|
||||
(dissoc row :perms))
|
||||
|
||||
(defn create-access-token
|
||||
[{:keys [::db/conn ::setup/props]} profile-id name expiration]
|
||||
[{:keys [::db/conn ::main/props]} profile-id name expiration]
|
||||
(let [created-at (dt/now)
|
||||
token-id (uuid/next)
|
||||
token (tokens/generate props {:iss "access-token"
|
||||
@@ -48,7 +47,7 @@
|
||||
[{:keys [::db/pool] :as system} profile-id name expiration]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [props (:app.setup/props system)]
|
||||
(create-access-token {::db/conn conn ::setup/props props}
|
||||
(create-access-token {::db/conn conn ::main/props props}
|
||||
profile-id
|
||||
name
|
||||
expiration))))
|
||||
|
||||
@@ -19,20 +19,7 @@
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]))
|
||||
|
||||
(def ^:private event-columns
|
||||
[:id
|
||||
:name
|
||||
:source
|
||||
:type
|
||||
:tracked-at
|
||||
:created-at
|
||||
:profile-id
|
||||
:ip-addr
|
||||
:props
|
||||
:context])
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(defn- event->row [event]
|
||||
[(uuid/next)
|
||||
@@ -40,42 +27,28 @@
|
||||
(:source event)
|
||||
(:type event)
|
||||
(:timestamp event)
|
||||
(:created-at event)
|
||||
(:profile-id event)
|
||||
(db/inet (:ip-addr event))
|
||||
(db/tjson (:props event))
|
||||
(db/tjson (d/without-nils (:context event)))])
|
||||
|
||||
(defn- adjust-timestamp
|
||||
[{:keys [timestamp created-at] :as event}]
|
||||
(let [margin (inst-ms (dt/diff timestamp created-at))]
|
||||
(if (or (neg? margin)
|
||||
(> margin 3600000))
|
||||
;; If event is in future or lags more than 1 hour, we reasign
|
||||
;; timestamp to the server creation date
|
||||
(-> event
|
||||
(assoc :timestamp created-at)
|
||||
(update :context assoc :original-timestamp timestamp))
|
||||
event)))
|
||||
(def ^:private event-columns
|
||||
[:id :name :source :type :tracked-at
|
||||
:profile-id :ip-addr :props :context])
|
||||
|
||||
(defn- handle-events
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id events] :as params}]
|
||||
(let [request (-> params meta ::http/request)
|
||||
ip-addr (audit/parse-client-ip request)
|
||||
tnow (dt/now)
|
||||
xform (comp
|
||||
(map (fn [event]
|
||||
(-> event
|
||||
(assoc :created-at tnow)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :ip-addr ip-addr)
|
||||
(assoc :source "frontend"))))
|
||||
(map #(assoc % :profile-id profile-id))
|
||||
(map #(assoc % :ip-addr ip-addr))
|
||||
(map #(assoc % :source "frontend"))
|
||||
(filter :profile-id)
|
||||
(map adjust-timestamp)
|
||||
(map event->row))
|
||||
events (sequence xform events)]
|
||||
(when (seq events)
|
||||
(db/insert-many! pool :audit-log event-columns events))))
|
||||
(db/insert-multi! pool :audit-log event-columns events))))
|
||||
|
||||
(def schema:event
|
||||
[:map {:title "Event"}
|
||||
@@ -91,7 +64,7 @@
|
||||
[:events [:vector schema:event]]])
|
||||
|
||||
(sv/defmethod ::push-audit-events
|
||||
{::climit/id :submit-audit-events/by-profile
|
||||
{::climit/id :submit-audit-events-by-profile
|
||||
::climit/key-fn ::rpc/profile-id
|
||||
::sm/params schema:push-audit-events
|
||||
::audit/skip true
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
@@ -21,12 +20,10 @@
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
@@ -41,7 +38,7 @@
|
||||
;; ---- COMMAND: login with password
|
||||
|
||||
(defn login-with-password
|
||||
[cfg {:keys [email password] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email password] :as params}]
|
||||
|
||||
(when-not (or (contains? cf/flags :login)
|
||||
(contains? cf/flags :login-with-password))
|
||||
@@ -49,20 +46,18 @@
|
||||
:code :login-disabled
|
||||
:hint "login is disabled in this instance"))
|
||||
|
||||
(letfn [(check-password [cfg profile password]
|
||||
(letfn [(check-password [conn profile password]
|
||||
(if (= (:password profile) "!")
|
||||
(ex/raise :type :validation
|
||||
:code :account-without-password
|
||||
:hint "the current account does not have password")
|
||||
(let [result (profile/verify-password cfg password (:password profile))]
|
||||
(when (:update result)
|
||||
(l/trc :hint "updating profile password"
|
||||
:id (str (:id profile))
|
||||
:email (:email profile))
|
||||
(profile/update-profile-password! cfg (assoc profile :password password)))
|
||||
(l/trace :hint "updating profile password" :id (:id profile) :email (:email profile))
|
||||
(profile/update-profile-password! conn (assoc profile :password password)))
|
||||
(:valid result))))
|
||||
|
||||
(validate-profile [cfg profile]
|
||||
(validate-profile [conn profile]
|
||||
(when-not profile
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials))
|
||||
@@ -72,7 +67,7 @@
|
||||
(when (:is-blocked profile)
|
||||
(ex/raise :type :restriction
|
||||
:code :profile-blocked))
|
||||
(when-not (check-password cfg profile password)
|
||||
(when-not (check-password conn profile password)
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials))
|
||||
(when-let [deleted-at (:deleted-at profile)]
|
||||
@@ -80,30 +75,27 @@
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials)))
|
||||
|
||||
profile)
|
||||
profile)]
|
||||
|
||||
(login [{:keys [::db/conn] :as cfg}]
|
||||
(let [profile (->> (profile/clean-email email)
|
||||
(profile/get-profile-by-email conn)
|
||||
(validate-profile cfg)
|
||||
(profile/strip-private-attrs))
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (->> (profile/get-profile-by-email conn email)
|
||||
(validate-profile conn)
|
||||
(profile/strip-private-attrs))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::main/props cfg) {:token token :iss :team-invitation}))
|
||||
|
||||
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
|
||||
;; invitation because invitations matches exactly; and user can't login with other email and
|
||||
;; accept invitation with other email
|
||||
response (if (and (some? invitation) (= (:id profile) (:member-id invitation)))
|
||||
{:invitation-token (:invitation-token params)}
|
||||
(assoc profile :is-admin (let [admins (cf/get :admins)]
|
||||
(contains? admins (:email profile)))))]
|
||||
(-> response
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-meta {::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))]
|
||||
|
||||
(db/tx-run! cfg login)))
|
||||
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
|
||||
;; invitation because invitations matches exactly; and user can't login with other email and
|
||||
;; accept invitation with other email
|
||||
response (if (and (some? invitation) (= (:id profile) (:member-id invitation)))
|
||||
{:invitation-token (:invitation-token params)}
|
||||
(assoc profile :is-admin (let [admins (cf/get :admins)]
|
||||
(contains? admins (:email profile)))))]
|
||||
(-> response
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-meta {::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
|
||||
(def schema:login-with-password
|
||||
[:map {:title "login-with-password"}
|
||||
@@ -115,7 +107,6 @@
|
||||
"Performs authentication using penpot password."
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::climit/id :auth/global
|
||||
::sm/params schema:login-with-password}
|
||||
[cfg params]
|
||||
(login-with-password cfg params))
|
||||
@@ -134,13 +125,12 @@
|
||||
(defn recover-profile
|
||||
[{:keys [::db/pool] :as cfg} {:keys [token password]}]
|
||||
(letfn [(validate-token [token]
|
||||
(let [tdata (tokens/verify (::setup/props cfg) {:token token :iss :password-recovery})]
|
||||
(let [tdata (tokens/verify (::main/props cfg) {:token token :iss :password-recovery})]
|
||||
(:profile-id tdata)))
|
||||
|
||||
(update-password [conn profile-id]
|
||||
(let [pwd (profile/derive-password cfg password)]
|
||||
(db/update! conn :profile {:password pwd} {:id profile-id})
|
||||
nil))]
|
||||
(db/update! conn :profile {:password pwd} {:id profile-id})))]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(->> (validate-token token)
|
||||
@@ -155,8 +145,7 @@
|
||||
(sv/defmethod ::recover-profile
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::sm/params schema:recover-profile
|
||||
::climit/id :auth/global}
|
||||
::sm/params schema:recover-profile}
|
||||
[cfg params]
|
||||
(recover-profile cfg params))
|
||||
|
||||
@@ -171,7 +160,7 @@
|
||||
:code :registration-disabled)))
|
||||
|
||||
(when (contains? params :invitation-token)
|
||||
(let [invitation (tokens/verify (::setup/props cfg) {:token (:invitation-token params) :iss :team-invitation})]
|
||||
(let [invitation (tokens/verify (::main/props cfg) {:token (:invitation-token params) :iss :team-invitation})]
|
||||
(when-not (= (:email params) (:member-email invitation))
|
||||
(ex/raise :type :restriction
|
||||
:code :email-does-not-match-invitation
|
||||
@@ -204,12 +193,11 @@
|
||||
(pos? (compare elapsed register-retry-threshold))))
|
||||
|
||||
(defn prepare-register
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} params]
|
||||
|
||||
(validate-register-attempt! cfg params)
|
||||
|
||||
(let [email (profile/clean-email email)
|
||||
profile (when-let [profile (profile/get-profile-by-email pool email)]
|
||||
(let [profile (when-let [profile (profile/get-profile-by-email pool (:email params))]
|
||||
(cond
|
||||
(:is-blocked profile)
|
||||
(ex/raise :type :restriction
|
||||
@@ -224,7 +212,7 @@
|
||||
:code :email-already-exists
|
||||
:hint "profile already exists")))
|
||||
|
||||
params {:email email
|
||||
params {:email (:email params)
|
||||
:password (:password params)
|
||||
:invitation-token (:invitation-token params)
|
||||
:backend "penpot"
|
||||
@@ -234,7 +222,7 @@
|
||||
|
||||
params (d/without-nils params)
|
||||
|
||||
token (tokens/generate (::setup/props cfg) params)]
|
||||
token (tokens/generate (::main/props cfg) params)]
|
||||
(with-meta {:token token}
|
||||
{::audit/profile-id uuid/zero})))
|
||||
|
||||
@@ -263,8 +251,7 @@
|
||||
(merge (:props params))
|
||||
(merge {:viewed-tutorial? false
|
||||
:viewed-walkthrough? false
|
||||
:nudge {:big 10 :small 1}
|
||||
:v2-info-shown true})
|
||||
:nudge {:big 10 :small 1}})
|
||||
(db/tjson))
|
||||
|
||||
password (or (:password params) "!")
|
||||
@@ -304,17 +291,13 @@
|
||||
|
||||
(defn create-profile-rels!
|
||||
[conn {:keys [id] :as profile}]
|
||||
(let [features (cfeat/get-enabled-features cf/flags)
|
||||
team (teams/create-team conn
|
||||
{:profile-id id
|
||||
:name "Default"
|
||||
:features features
|
||||
:is-default true})]
|
||||
(let [team (teams/create-team conn {:profile-id id
|
||||
:name "Default"
|
||||
:is-default true})]
|
||||
(-> (db/update! conn :profile
|
||||
{:default-team-id (:id team)
|
||||
:default-project-id (:default-project-id team)}
|
||||
{:id id}
|
||||
{::db/return-keys true})
|
||||
{:id id})
|
||||
(profile/decode-row))))
|
||||
|
||||
|
||||
@@ -341,10 +324,8 @@
|
||||
|
||||
(defn register-profile
|
||||
[{:keys [::db/conn] :as cfg} {:keys [token fullname] :as params}]
|
||||
(let [claims (tokens/verify (::setup/props cfg) {:token token :iss :prepared-register})
|
||||
params (-> claims
|
||||
(into params)
|
||||
(assoc :fullname fullname))
|
||||
(let [claims (tokens/verify (::main/props cfg) {:token token :iss :prepared-register})
|
||||
params (assoc claims :fullname fullname)
|
||||
|
||||
is-active (or (:is-active params)
|
||||
(not (contains? cf/flags :email-verification)))
|
||||
@@ -358,7 +339,7 @@
|
||||
(create-profile-rels! conn))))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))]
|
||||
(tokens/verify (::main/props cfg) {:token token :iss :team-invitation}))]
|
||||
|
||||
;; If profile is filled in claims, means it tries to register
|
||||
;; again, so we proceed to update the modified-at attr
|
||||
@@ -369,6 +350,7 @@
|
||||
{::audit/type "fact"
|
||||
::audit/name "register-profile-retry"
|
||||
::audit/profile-id id}))
|
||||
|
||||
(cond
|
||||
;; If invitation token comes in params, this is because the
|
||||
;; user comes from team-invitation process; in this case,
|
||||
@@ -378,7 +360,7 @@
|
||||
;; email.
|
||||
(and (some? invitation) (= (:email profile) (:member-email invitation)))
|
||||
(let [claims (assoc invitation :member-id (:id profile))
|
||||
token (tokens/generate (::setup/props cfg) claims)
|
||||
token (tokens/generate (::main/props cfg) claims)
|
||||
resp {:invitation-token token}]
|
||||
(-> resp
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
@@ -405,11 +387,12 @@
|
||||
;; In all other cases, send a verification email.
|
||||
:else
|
||||
(do
|
||||
(send-email-verification! conn (::setup/props cfg) profile)
|
||||
(send-email-verification! conn (::main/props cfg) profile)
|
||||
(rph/with-meta profile
|
||||
{::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))))
|
||||
|
||||
|
||||
(def schema:register-profile
|
||||
[:map {:title "register-profile"}
|
||||
[:token schema:token]
|
||||
@@ -418,8 +401,7 @@
|
||||
(sv/defmethod ::register-profile
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::sm/params schema:register-profile
|
||||
::climit/id :auth/global}
|
||||
::sm/params schema:register-profile}
|
||||
[{:keys [::db/pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(-> (assoc cfg ::db/conn conn)
|
||||
@@ -430,14 +412,14 @@
|
||||
(defn request-profile-recovery
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email] :as params}]
|
||||
(letfn [(create-recovery-token [{:keys [id] :as profile}]
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
(let [token (tokens/generate (::main/props cfg)
|
||||
{:iss :password-recovery
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id id})]
|
||||
(assoc profile :token token)))
|
||||
|
||||
(send-email-notification [conn profile]
|
||||
(let [ptoken (tokens/generate (::setup/props cfg)
|
||||
(let [ptoken (tokens/generate (::main/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})})]
|
||||
@@ -451,8 +433,7 @@
|
||||
nil))]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(when-let [profile (->> (profile/clean-email email)
|
||||
(profile/get-profile-by-email conn))]
|
||||
(when-let [profile (profile/get-profile-by-email conn email)]
|
||||
(when-not (eml/allow-send-emails? conn profile)
|
||||
(ex/raise :type :validation
|
||||
:code :profile-is-muted
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,11 +9,9 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -24,21 +22,18 @@
|
||||
[app.rpc.retry :as rtry]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]))
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
;; --- GENERAL PURPOSE INTERNAL HELPERS
|
||||
|
||||
(defn- decode-row
|
||||
(defn decode-row
|
||||
[{:keys [participants position] :as row}]
|
||||
(cond-> row
|
||||
(db/pgpoint? position) (assoc :position (db/decode-pgpoint position))
|
||||
(db/pgobject? participants) (assoc :participants (db/decode-transit-pgobject participants))))
|
||||
|
||||
(def xf-decode-row
|
||||
(map decode-row))
|
||||
|
||||
(def ^:privateqpage-name
|
||||
sql:get-file
|
||||
(def sql:get-file
|
||||
"select f.id, f.modified_at, f.revn, f.features,
|
||||
f.project_id, p.team_id, f.data
|
||||
from file as f
|
||||
@@ -48,19 +43,15 @@
|
||||
|
||||
(defn- get-file
|
||||
"A specialized version of get-file for comments module."
|
||||
[cfg file-id page-id]
|
||||
(let [file (db/exec-one! cfg [sql:get-file file-id])]
|
||||
(when-not file
|
||||
[conn file-id page-id]
|
||||
(binding [pmap/*load-fn* (partial files/load-pointer conn file-id)]
|
||||
(if-let [{:keys [data] :as file} (some-> (db/exec-one! conn [sql:get-file file-id]) (files/decode-row))]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "file not found"))
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(let [{:keys [data] :as file} (files/decode-row file)]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id)
|
||||
(dissoc :data))))))
|
||||
:hint "file not found"))))
|
||||
|
||||
(defn- get-comment-thread
|
||||
[conn thread-id & {:as opts}]
|
||||
@@ -68,8 +59,8 @@
|
||||
(decode-row)))
|
||||
|
||||
(defn- get-comment
|
||||
[conn comment-id & {:as opts}]
|
||||
(db/get-by-id conn :comment comment-id opts))
|
||||
[conn comment-id & {:keys [for-update?]}]
|
||||
(db/get-by-id conn :comment comment-id {:for-update for-update?}))
|
||||
|
||||
(defn- get-next-seqn
|
||||
[conn file-id]
|
||||
@@ -98,25 +89,23 @@
|
||||
|
||||
(declare ^:private get-comment-threads)
|
||||
|
||||
(def ^:private
|
||||
schema:get-comment-threads
|
||||
[:and
|
||||
[:map {:title "get-comment-threads"}
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]]
|
||||
[::sm/contains-any #{:file-id :team-id}]])
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::share-id (s/nilable ::us/uuid))
|
||||
|
||||
(s/def ::get-comment-threads
|
||||
(s/and (s/keys :req [::rpc/profile-id]
|
||||
:opt-un [::file-id ::share-id ::team-id])
|
||||
#(or (:file-id %) (:team-id %))))
|
||||
|
||||
(sv/defmethod ::get-comment-threads
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-comment-threads}
|
||||
[cfg {:keys [::rpc/profile-id file-id share-id] :as params}]
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comment-threads conn profile-id file-id)))
|
||||
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comment-threads conn profile-id file-id))))
|
||||
|
||||
(def ^:private sql:comment-threads
|
||||
(def sql:comment-threads
|
||||
"select distinct on (ct.id)
|
||||
ct.*,
|
||||
f.name as file_name,
|
||||
@@ -141,24 +130,23 @@
|
||||
(defn- get-comment-threads
|
||||
[conn profile-id file-id]
|
||||
(->> (db/exec! conn [sql:comment-threads profile-id file-id])
|
||||
(into [] xf-decode-row)))
|
||||
(into [] (map decode-row))))
|
||||
|
||||
;; --- COMMAND: Get Unread Comment Threads
|
||||
|
||||
(declare ^:private get-unread-comment-threads)
|
||||
|
||||
(def ^:private
|
||||
schema:get-unread-comment-threads
|
||||
[:map {:title "get-unread-comment-threads"}
|
||||
[:team-id ::sm/uuid]])
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::get-unread-comment-threads
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id]))
|
||||
|
||||
(sv/defmethod ::get-unread-comment-threads
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-unread-comment-threads}
|
||||
[cfg {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-unread-comment-threads conn profile-id team-id))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-unread-comment-threads conn profile-id team-id)))
|
||||
|
||||
(def sql:comment-threads-by-team
|
||||
"select distinct on (ct.id)
|
||||
@@ -190,60 +178,62 @@
|
||||
(defn- get-unread-comment-threads
|
||||
[conn profile-id team-id]
|
||||
(->> (db/exec! conn [sql:unread-comment-threads-by-team profile-id team-id])
|
||||
(into [] xf-decode-row)))
|
||||
(into [] (map decode-row))))
|
||||
|
||||
|
||||
;; --- COMMAND: Get Single Comment Thread
|
||||
|
||||
(def ^:private
|
||||
schema:get-comment-thread
|
||||
[:map {:title "get-comment-thread"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::get-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id ::us/id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::get-comment-thread
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id file-id id share-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(let [sql (str "with threads as (" sql:comment-threads ")"
|
||||
"select * from threads where id = ?")]
|
||||
(-> (db/exec-one! conn [sql profile-id file-id id])
|
||||
(decode-row))))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(let [sql (str "with threads as (" sql:comment-threads ")"
|
||||
"select * from threads where id = ?")]
|
||||
(-> (db/exec-one! conn [sql profile-id file-id id])
|
||||
(decode-row)))))
|
||||
|
||||
;; --- COMMAND: Retrieve Comments
|
||||
|
||||
(declare ^:private get-comments)
|
||||
|
||||
(def ^:private
|
||||
schema:get-comments
|
||||
[:map {:title "get-comments"}
|
||||
[:thread-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::thread-id ::us/uuid)
|
||||
(s/def ::get-comments
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::thread-id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::get-comments
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-comments}
|
||||
[cfg {:keys [::rpc/profile-id thread-id share-id]}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comments conn thread-id)))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id thread-id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comments conn thread-id))))
|
||||
|
||||
(def sql:comments
|
||||
"select c.* from comment as c
|
||||
where c.thread_id = ?
|
||||
order by c.created_at asc")
|
||||
|
||||
(defn- get-comments
|
||||
[conn thread-id]
|
||||
(->> (db/query conn :comment
|
||||
{:thread-id thread-id}
|
||||
{:order-by [[:created-at :asc]]})
|
||||
(into [] xf-decode-row)))
|
||||
(into [] (map decode-row))))
|
||||
|
||||
;; --- COMMAND: Get file comments users
|
||||
|
||||
;; All the profiles that had comment the file, plus the current
|
||||
;; profile.
|
||||
|
||||
(def ^:private sql:file-comment-users
|
||||
(def sql:file-comment-users
|
||||
"WITH available_profiles AS (
|
||||
SELECT DISTINCT owner_id AS id
|
||||
FROM comment
|
||||
@@ -262,22 +252,20 @@
|
||||
[conn file-id profile-id]
|
||||
(db/exec! conn [sql:file-comment-users file-id profile-id]))
|
||||
|
||||
(def ^:private
|
||||
schema:get-profiles-for-file-comments
|
||||
[:map {:title "get-profiles-for-file-comments"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::get-profiles-for-file-comments
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::get-profiles-for-file-comments
|
||||
"Retrieves a list of profiles with limited set of properties of all
|
||||
participants on comment threads of the file."
|
||||
{::doc/added "1.15"
|
||||
::doc/changes ["1.15" "Imported from queries and renamed."]
|
||||
::sm/params schema:get-profiles-for-file-comments}
|
||||
[cfg {:keys [::rpc/profile-id file-id share-id]}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-file-comments-users conn file-id profile-id))))
|
||||
::doc/changes ["1.15" "Imported from queries and renamed."]}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id share-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-file-comments-users conn file-id profile-id)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MUTATION COMMANDS
|
||||
@@ -287,52 +275,55 @@
|
||||
|
||||
;; --- COMMAND: Create Comment Thread
|
||||
|
||||
(def ^:private
|
||||
schema:create-comment-thread
|
||||
[:map {:title "create-comment-thread"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:position ::gpt/point]
|
||||
[:content :string]
|
||||
[:page-id ::sm/uuid]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::page-id ::us/uuid)
|
||||
(s/def ::position ::gpt/point)
|
||||
(s/def ::content ::us/string)
|
||||
(s/def ::frame-id ::us/uuid)
|
||||
|
||||
(s/def ::create-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id ::position ::content ::page-id ::frame-id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::create-comment-thread
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true
|
||||
::rtry/enabled true
|
||||
::rtry/when rtry/conflict-exception?
|
||||
::sm/params schema:create-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at file-id page-id share-id position content frame-id]}]
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg}
|
||||
{:keys [::rpc/profile-id ::rpc/request-at file-id page-id share-id position content frame-id]}]
|
||||
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-comment-permissions! cfg profile-id file-id share-id)
|
||||
(let [{:keys [team-id project-id page-name]} (get-file conn file-id page-id)]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [team-id project-id page-name] :as file} (get-file conn file-id page-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
|
||||
(run! (partial quotes/check-quote! cfg)
|
||||
(list {::quotes/id ::quotes/comment-threads-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}
|
||||
{::quotes/id ::quotes/comments-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}))
|
||||
(run! (partial quotes/check-quote! conn)
|
||||
(list {::quotes/id ::quotes/comment-threads-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}
|
||||
{::quotes/id ::quotes/comments-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}))
|
||||
|
||||
(rtry/with-retry {::rtry/when rtry/conflict-exception?
|
||||
::rtry/max-retries 3
|
||||
::rtry/label "create-comment-thread"
|
||||
::db/conn conn}
|
||||
(create-comment-thread conn
|
||||
{:created-at request-at
|
||||
:profile-id profile-id
|
||||
:file-id file-id
|
||||
:page-id page-id
|
||||
:page-name page-name
|
||||
:position position
|
||||
:content content
|
||||
:frame-id frame-id})))))
|
||||
|
||||
(create-comment-thread conn {:created-at request-at
|
||||
:profile-id profile-id
|
||||
:file-id file-id
|
||||
:page-id page-id
|
||||
:page-name page-name
|
||||
:position position
|
||||
:content content
|
||||
:frame-id frame-id})))))
|
||||
|
||||
(defn- create-comment-thread
|
||||
[conn {:keys [profile-id file-id page-id page-name created-at position content frame-id]}]
|
||||
|
||||
(let [;; NOTE: we take the next seq number from a separate query because the whole
|
||||
;; operation can be retried on conflict, and in this case the new seq shold be
|
||||
;; retrieved from the database.
|
||||
@@ -372,228 +363,208 @@
|
||||
|
||||
;; --- COMMAND: Update Comment Thread Status
|
||||
|
||||
(def ^:private
|
||||
schema:update-comment-thread-status
|
||||
[:map {:title "update-comment-thread-status"}
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::share-id (s/nilable ::us/uuid))
|
||||
|
||||
(s/def ::update-comment-thread-status
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::update-comment-thread-status
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread-status}
|
||||
[cfg {:keys [::rpc/profile-id id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(upsert-comment-thread-status! conn profile-id id)))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(upsert-comment-thread-status! conn profile-id id))))
|
||||
|
||||
|
||||
;; --- COMMAND: Update Comment Thread
|
||||
|
||||
(def ^:private
|
||||
schema:update-comment-thread
|
||||
[:map {:title "update-comment-thread"}
|
||||
[:id ::sm/uuid]
|
||||
[:is-resolved :boolean]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::is-resolved ::us/boolean)
|
||||
(s/def ::update-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::is-resolved]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::update-comment-thread
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id id is-resolved share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:is-resolved is-resolved}
|
||||
{:id id})
|
||||
nil))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id is-resolved share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:is-resolved is-resolved}
|
||||
{:id id})
|
||||
nil)))
|
||||
|
||||
|
||||
;; --- COMMAND: Add Comment
|
||||
|
||||
(declare ^:private get-comment-thread)
|
||||
(declare get-comment-thread)
|
||||
(declare create-comment)
|
||||
|
||||
(def ^:private
|
||||
schema:create-comment
|
||||
[:map {:title "create-comment"}
|
||||
[:thread-id ::sm/uuid]
|
||||
[:content :string]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::create-comment
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::thread-id ::content]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::create-comment
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:create-comment}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at thread-id share-id content]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::sql/for-update true)
|
||||
{:keys [team-id project-id page-name] :as file} (get-file cfg file-id page-id)]
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at thread-id share-id content] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)
|
||||
{:keys [team-id project-id page-name] :as file} (get-file conn file-id page-id)]
|
||||
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(quotes/check-quote! conn
|
||||
{::quotes/id ::quotes/comments-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id})
|
||||
(files/check-comment-permissions! conn profile-id (:id file) share-id)
|
||||
(quotes/check-quote! conn
|
||||
{::quotes/id ::quotes/comments-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id (:id file)})
|
||||
|
||||
;; Update the page-name cached attribute on comment thread table.
|
||||
(when (not= page-name (:page-name thread))
|
||||
(db/update! conn :comment-thread
|
||||
{:page-name page-name}
|
||||
{:id thread-id}))
|
||||
;; Update the page-name cached attribute on comment thread table.
|
||||
(when (not= page-name (:page-name thread))
|
||||
(db/update! conn :comment-thread
|
||||
{:page-name page-name}
|
||||
{:id thread-id}))
|
||||
|
||||
(let [comment (db/insert! conn :comment
|
||||
{:id (uuid/next)
|
||||
:created-at request-at
|
||||
:modified-at request-at
|
||||
:thread-id thread-id
|
||||
:owner-id profile-id
|
||||
:content content})
|
||||
props {:file-id file-id
|
||||
:share-id nil}]
|
||||
(let [comment (db/insert! conn :comment
|
||||
{:id (uuid/next)
|
||||
:created-at request-at
|
||||
:modified-at request-at
|
||||
:thread-id thread-id
|
||||
:owner-id profile-id
|
||||
:content content})
|
||||
props {:file-id file-id
|
||||
:share-id nil}]
|
||||
|
||||
;; Update thread modified-at attribute and assoc the current
|
||||
;; profile to the participant set.
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:participants (-> (:participants thread #{})
|
||||
(conj profile-id)
|
||||
(db/tjson))}
|
||||
{:id thread-id})
|
||||
;; Update thread modified-at attribute and assoc the current
|
||||
;; profile to the participant set.
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:participants (-> (:participants thread #{})
|
||||
(conj profile-id)
|
||||
(db/tjson))}
|
||||
{:id thread-id})
|
||||
|
||||
;; Update the current profile status in relation to the
|
||||
;; current thread.
|
||||
(upsert-comment-thread-status! conn profile-id thread-id request-at)
|
||||
|
||||
(vary-meta comment assoc ::audit/props props))))))
|
||||
;; Update the current profile status in relation to the
|
||||
;; current thread.
|
||||
(upsert-comment-thread-status! conn profile-id thread-id request-at)
|
||||
|
||||
(vary-meta comment assoc ::audit/props props)))))
|
||||
|
||||
;; --- COMMAND: Update Comment
|
||||
|
||||
(def ^:private
|
||||
schema:update-comment
|
||||
[:map {:title "update-comment"}
|
||||
[:id ::sm/uuid]
|
||||
[:content :string]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::update-comment
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::content]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::update-comment
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at id share-id content]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::sql/for-update true)
|
||||
{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::sql/for-update true)]
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id share-id content] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::db/for-update? true)
|
||||
{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)]
|
||||
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
|
||||
;; Don't allow edit comments to not owners
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
;; Don't allow edit comments to not owners
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
|
||||
(let [{:keys [page-name]} (get-file cfg file-id page-id)]
|
||||
(db/update! conn :comment
|
||||
{:content content
|
||||
:modified-at request-at}
|
||||
{:id id})
|
||||
(let [{:keys [page-name] :as file} (get-file conn file-id page-id)]
|
||||
(db/update! conn :comment
|
||||
{:content content
|
||||
:modified-at request-at}
|
||||
{:id id})
|
||||
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:page-name page-name}
|
||||
{:id thread-id})
|
||||
nil)))))
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:page-name page-name}
|
||||
{:id thread-id})
|
||||
nil))))
|
||||
|
||||
;; --- COMMAND: Delete Comment Thread
|
||||
|
||||
(def ^:private
|
||||
schema:delete-comment-thread
|
||||
[:map {:title "delete-comment-thread"}
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::delete-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::delete-comment-thread
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:delete-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
|
||||
(db/delete! conn :comment-thread {:id id})
|
||||
nil))))
|
||||
(db/delete! conn :comment-thread {:id id})
|
||||
nil)))
|
||||
|
||||
;; --- COMMAND: Delete comment
|
||||
|
||||
(def ^:private
|
||||
schema:delete-comment
|
||||
[:map {:title "delete-comment"}
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::delete-comment
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::delete-comment
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:delete-comment}
|
||||
[cfg {:keys [::rpc/profile-id id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [owner-id thread-id] :as comment} (get-comment conn id ::sql/for-update true)
|
||||
{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
(db/delete! conn :comment {:id id})
|
||||
nil))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [owner-id thread-id] :as comment} (get-comment conn id ::db/for-update? true)
|
||||
{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
(db/delete! conn :comment {:id id}))))
|
||||
|
||||
|
||||
;; --- COMMAND: Update comment thread position
|
||||
|
||||
(def ^:private
|
||||
schema:update-comment-thread-position
|
||||
[:map {:title "update-comment-thread-position"}
|
||||
[:id ::sm/uuid]
|
||||
[:position ::gpt/point]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::update-comment-thread-position
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::position ::frame-id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::update-comment-thread-position
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread-position}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at id position frame-id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:position (db/pgpoint position)
|
||||
:frame-id frame-id}
|
||||
{:id (:id thread)})
|
||||
nil))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id position frame-id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at (::rpc/request-at params)
|
||||
:position (db/pgpoint position)
|
||||
:frame-id frame-id}
|
||||
{:id (:id thread)})
|
||||
nil)))
|
||||
|
||||
;; --- COMMAND: Update comment frame
|
||||
|
||||
(def ^:private
|
||||
schema:update-comment-thread-frame
|
||||
[:map {:title "update-comment-thread-frame"}
|
||||
[:id ::sm/uuid]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
(s/def ::update-comment-thread-frame
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::frame-id]
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::update-comment-thread-frame
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread-frame}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at id frame-id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:frame-id frame-id}
|
||||
{:id id})
|
||||
nil))))
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id frame-id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at (::rpc/request-at params)
|
||||
:frame-id frame-id}
|
||||
{:id id})
|
||||
nil)))
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,28 +7,24 @@
|
||||
(ns app.rpc.commands.files-create
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.defaults :refer [version]]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.files.features :as ffeat]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]))
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
(defn create-file-role!
|
||||
[conn {:keys [file-id profile-id role]}]
|
||||
@@ -38,117 +34,75 @@
|
||||
(db/insert! conn :file-profile-rel))))
|
||||
|
||||
(defn create-file
|
||||
[{:keys [::db/conn] :as cfg}
|
||||
{:keys [id name project-id is-shared revn
|
||||
modified-at deleted-at create-page
|
||||
ignore-sync-until features]
|
||||
:or {is-shared false revn 0 create-page true}
|
||||
:as params}]
|
||||
[conn {:keys [id name project-id is-shared revn
|
||||
modified-at deleted-at create-page
|
||||
ignore-sync-until features]
|
||||
:or {is-shared false revn 0 create-page true}
|
||||
:as params}]
|
||||
|
||||
(dm/assert!
|
||||
"expected a valid connection"
|
||||
(db/connection? conn))
|
||||
(let [id (or id (uuid/next))
|
||||
features (->> features
|
||||
(into (files/get-default-features))
|
||||
(files/check-features-compatibility!))
|
||||
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
cfeat/*current* features]
|
||||
(let [id (or id (uuid/next))
|
||||
|
||||
data (if create-page
|
||||
pointers (atom {})
|
||||
data (binding [pmap/*tracked* pointers
|
||||
ffeat/*current* features
|
||||
ffeat/*wrap-with-objects-map-fn* (if (features "storate/objects-map") omap/wrap identity)
|
||||
ffeat/*wrap-with-pointer-map-fn* (if (features "storage/pointer-map") pmap/wrap identity)]
|
||||
(if create-page
|
||||
(ctf/make-file-data id)
|
||||
(ctf/make-file-data id nil))
|
||||
(ctf/make-file-data id nil)))
|
||||
|
||||
file {:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:version version
|
||||
:data data
|
||||
:features features
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}
|
||||
features (db/create-array conn "text" features)
|
||||
file (db/insert! conn :file
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:data (blob/encode data)
|
||||
:features features
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}))]
|
||||
|
||||
file (if (contains? features "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
(binding [pmap/*tracked* pointers]
|
||||
(files/persist-pointers! conn id))
|
||||
|
||||
file (if (contains? features "fdata/pointer-map")
|
||||
(feat.fdata/enable-pointer-map file)
|
||||
file)
|
||||
(->> (assoc params :file-id id :role :owner)
|
||||
(create-file-role! conn))
|
||||
|
||||
file (d/without-nils file)]
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
|
||||
(db/insert! conn :file
|
||||
(-> file
|
||||
(update :data blob/encode)
|
||||
(update :features db/encode-pgarray conn "text"))
|
||||
{::db/return-keys false})
|
||||
(files/decode-row file)))
|
||||
|
||||
(when (contains? features "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! cfg id))
|
||||
|
||||
(->> (assoc params :file-id id :role :owner)
|
||||
(create-file-role! conn))
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
|
||||
file)))
|
||||
|
||||
(def ^:private schema:create-file
|
||||
[:map {:title "create-file"}
|
||||
[:name :string]
|
||||
[:project-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:is-shared {:optional true} :boolean]
|
||||
[:features {:optional true} ::cfeat/features]])
|
||||
(s/def ::create-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::files/name
|
||||
::files/project-id]
|
||||
:opt-un [::files/id
|
||||
::files/is-shared
|
||||
::files/features]))
|
||||
|
||||
(sv/defmethod ::create-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::webhooks/event? true
|
||||
::sm/params schema:create-file}
|
||||
[cfg {:keys [::rpc/profile-id project-id] :as params}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(projects/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id project-id)
|
||||
team-id (:id team)
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(projects/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team-id (files/get-team-id conn project-id)
|
||||
params (assoc params :profile-id profile-id)]
|
||||
|
||||
;; When we create files, we only need to respect the team
|
||||
;; features, because some features can be enabled
|
||||
;; globally, but the team is still not migrated properly.
|
||||
features (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
(run! (partial quotes/check-quote! conn)
|
||||
(list {::quotes/id ::quotes/files-per-project
|
||||
::quotes/team-id team-id
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/project-id project-id}))
|
||||
|
||||
;; We also include all no migration features declared by
|
||||
;; client; that enables the ability to enable a runtime
|
||||
;; feature on frontend and make it permanent on file
|
||||
features (-> (:features params #{})
|
||||
(set/intersection cfeat/no-migration-features)
|
||||
(set/union features))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features features))]
|
||||
|
||||
(run! (partial quotes/check-quote! conn)
|
||||
(list {::quotes/id ::quotes/files-per-project
|
||||
::quotes/team-id team-id
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/project-id project-id}))
|
||||
|
||||
;; When newly computed features does not match exactly with
|
||||
;; the features defined on team row, we update it.
|
||||
(when (not= features (:features team))
|
||||
(let [features (db/create-array conn "text" features)]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id team-id})))
|
||||
|
||||
(-> (create-file cfg params)
|
||||
(vary-meta assoc ::audit/props {:team-id team-id}))))))
|
||||
(-> (create-file conn params)
|
||||
(vary-meta assoc ::audit/props {:team-id team-id})))))
|
||||
|
||||
@@ -12,17 +12,14 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.main :as-alias main]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.storage :as sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
[app.util.time :as dt]))
|
||||
|
||||
(defn check-authorized!
|
||||
[{:keys [::db/pool]} profile-id]
|
||||
@@ -60,120 +57,70 @@
|
||||
::sm/params schema:get-file-snapshots}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/run! cfg get-file-snapshots params))
|
||||
(db/run! cfg #(get-file-snapshots % params)))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [file-id id]}]
|
||||
(let [storage (media/configure-assets-storage storage conn)
|
||||
file (files/get-minimal-file conn file-id {::db/for-update true})
|
||||
snapshot (db/get* conn :file-change
|
||||
{:file-id file-id
|
||||
:id id}
|
||||
{::db/for-share true})]
|
||||
params {:id id :file-id file-id}
|
||||
options {:columns [:id :data :revn]}
|
||||
snapshot (db/get* conn :file-change params options)]
|
||||
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:id id
|
||||
:file-id file-id))
|
||||
(when (and (some? snapshot)
|
||||
(some? (:data snapshot)))
|
||||
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :precondition
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
(l/debug :hint "snapshot found"
|
||||
:snapshot-id (:id snapshot)
|
||||
:file-id file-id)
|
||||
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)}
|
||||
{:id file-id})
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)
|
||||
:revn (inc (:revn file))
|
||||
:features (:features snapshot)}
|
||||
{:id file-id})
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "delete from file_object_thumbnail "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/del-object! storage media-id)))
|
||||
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "delete from file_thumbnail "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/del-object! storage media-id)))
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
{:id (:id snapshot)})))
|
||||
|
||||
{:id (:id snapshot)
|
||||
:label (:label snapshot)}))
|
||||
|
||||
(defn- resolve-snapshot-by-label
|
||||
[conn file-id label]
|
||||
(->> (db/query conn :file-change
|
||||
{:file-id file-id
|
||||
:label label}
|
||||
{::sql/order-by [[:created-at :desc]]
|
||||
::sql/columns [:file-id :id :label]})
|
||||
(first)))
|
||||
|
||||
(def ^:private
|
||||
schema:restore-file-snapshot
|
||||
[:and
|
||||
[:map
|
||||
[:file-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:label {:optional true} :string]]
|
||||
[::sm/contains-any #{:id :label}]])
|
||||
(def ^:private schema:restore-file-snapshot
|
||||
[:map
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::restore-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::doc/skip true
|
||||
::sm/params schema:restore-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id file-id id label] :as params}]
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [params (cond-> params
|
||||
(and (not id) (string? label))
|
||||
(merge (resolve-snapshot-by-label conn file-id label)))]
|
||||
(restore-file-snapshot! cfg params)))))
|
||||
(db/tx-run! cfg #(restore-file-snapshot! % params)))
|
||||
|
||||
(defn take-file-snapshot!
|
||||
[cfg {:keys [file-id label]}]
|
||||
(let [conn (db/get-connection cfg)
|
||||
file (db/get conn :file {:id file-id})
|
||||
id (uuid/next)]
|
||||
|
||||
(l/debug :hint "creating file snapshot"
|
||||
:file-id (str file-id)
|
||||
:label label)
|
||||
|
||||
(db/insert! conn :file-change
|
||||
{:id id
|
||||
:revn (:revn file)
|
||||
:data (:data file)
|
||||
:features (:features file)
|
||||
:file-id (:id file)
|
||||
:label label}
|
||||
{::db/return-keys false})
|
||||
|
||||
{:id id :label label}))
|
||||
|
||||
(defn generate-snapshot-label
|
||||
[]
|
||||
(let [ts (-> (dt/now)
|
||||
(dt/format-instant)
|
||||
(str/replace #"[T:\.]" "-")
|
||||
(str/rtrim "Z"))]
|
||||
(str "snapshot-" ts)))
|
||||
[{:keys [::db/conn]} {:keys [file-id label]}]
|
||||
(when-let [file (db/get* conn :file {:id file-id})]
|
||||
(let [id (uuid/next)
|
||||
label (or label (str "Snapshot at " (dt/format-instant (dt/now) :rfc1123)))]
|
||||
(l/debug :hint "persisting file snapshot" :file-id file-id :label label)
|
||||
(db/insert! conn :file-change
|
||||
{:id id
|
||||
:revn (:revn file)
|
||||
:data (:data file)
|
||||
:features (:features file)
|
||||
:file-id (:id file)
|
||||
:label label})
|
||||
{:id id})))
|
||||
|
||||
(def ^:private schema:take-file-snapshot
|
||||
[:map [:file-id ::sm/uuid]])
|
||||
@@ -184,8 +131,5 @@
|
||||
::sm/params schema:take-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [params (update params :label (fn [label]
|
||||
(or label (generate-snapshot-label))))]
|
||||
(take-file-snapshot! cfg params)))))
|
||||
(db/tx-run! cfg #(take-file-snapshot! % params)))
|
||||
|
||||
|
||||
@@ -7,165 +7,105 @@
|
||||
(ns app.rpc.commands.files-temp
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.pages :as cp]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.components-v2 :as feat.compv2]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-create :as files.create]
|
||||
[app.rpc.commands.files-create :refer [create-file]]
|
||||
[app.rpc.commands.files-update :as-alias files.update]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]))
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
;; --- MUTATION COMMAND: create-temp-file
|
||||
|
||||
(def ^:private schema:create-temp-file
|
||||
[:map {:title "create-temp-file"}
|
||||
[:name :string]
|
||||
[:project-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:is-shared :boolean]
|
||||
[:features ::cfeat/features]
|
||||
[:create-page :boolean]])
|
||||
(s/def ::create-page ::us/boolean)
|
||||
|
||||
(s/def ::create-temp-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::files/name
|
||||
::files/project-id]
|
||||
:opt-un [::files/id
|
||||
::files/is-shared
|
||||
::files/features
|
||||
::create-page]))
|
||||
|
||||
(sv/defmethod ::create-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:create-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id project-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(projects/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team (teams/get-team conn :profile-id profile-id :project-id project-id)
|
||||
|
||||
;; When we create files, we only need to respect the team
|
||||
;; features, because some features can be enabled
|
||||
;; globally, but the team is still not migrated properly.
|
||||
input-features (:features params #{})
|
||||
|
||||
;; If the imported project doesn't contain v2 we need to remove it
|
||||
team-features
|
||||
(cond-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(not (contains? input-features "components/v2"))
|
||||
(disj "components/v2"))
|
||||
|
||||
|
||||
;; We also include all no migration features declared by
|
||||
;; client; that enables the ability to enable a runtime
|
||||
;; feature on frontend and make it permanent on file
|
||||
features (-> input-features
|
||||
(set/intersection cfeat/no-migration-features)
|
||||
(set/union team-features))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :deleted-at (dt/in-future {:days 1}))
|
||||
(assoc :features features))]
|
||||
|
||||
(files.create/create-file cfg params)))))
|
||||
::doc/module :files}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(projects/check-edition-permissions! conn profile-id project-id)
|
||||
(create-file conn (assoc params :profile-id profile-id :deleted-at (dt/in-future {:days 1})))))
|
||||
|
||||
;; --- MUTATION COMMAND: update-temp-file
|
||||
|
||||
(defn update-temp-file
|
||||
[conn {:keys [profile-id session-id id revn changes] :as params}]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at (dt/now)
|
||||
:file-id id
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)}))
|
||||
|
||||
(def ^:private schema:update-temp-file
|
||||
[:map {:title "update-temp-file"}
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:revn {:min 0} :int]
|
||||
[:session-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
(s/def ::update-temp-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::files.update/changes
|
||||
::files.update/revn
|
||||
::files.update/session-id
|
||||
::files/id]))
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:update-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id session-id id revn changes] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at (dt/now)
|
||||
:file-id id
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)})
|
||||
nil)))
|
||||
::doc/module :files}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(update-temp-file conn (assoc params :profile-id profile-id))
|
||||
nil))
|
||||
|
||||
;; --- MUTATION COMMAND: persist-temp-file
|
||||
|
||||
(defn persist-temp-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id ::rpc/profile-id] :as params}]
|
||||
(let [file (files/get-file cfg id
|
||||
:migrate? false
|
||||
:lock-for-update? true)]
|
||||
[conn {:keys [id] :as params}]
|
||||
(let [file (db/get-by-id conn :file id)
|
||||
revs (db/query conn :file-change
|
||||
{:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
revn (count revs)]
|
||||
|
||||
(when (nil? (:deleted-at file))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
|
||||
(let [changes (->> (db/cursor conn
|
||||
(sql/select :file-change {:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
{:chunk-size 10})
|
||||
(sequence (mapcat (comp blob/decode :changes))))
|
||||
|
||||
file (update file :data cpc/process-changes changes)
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (fdata/enable-pointer-map file)]
|
||||
(fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)]
|
||||
|
||||
;; Delete changes from the changes history
|
||||
(db/delete! conn :file-change {:file-id id})
|
||||
|
||||
(let [data
|
||||
(->> revs
|
||||
(mapcat #(->> % :changes blob/decode))
|
||||
(cp/process-changes (blob/decode (:data file))))]
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:revn 1
|
||||
:data (blob/encode (:data file))}
|
||||
{:id id})
|
||||
:revn revn
|
||||
:data (blob/encode data)}
|
||||
{:id id}))
|
||||
nil))
|
||||
|
||||
(let [team (teams/get-team conn :profile-id profile-id :project-id (:project-id file))
|
||||
file-features (:features file)
|
||||
team-features (cfeat/get-team-enabled-features cf/flags team)]
|
||||
(when (and (contains? team-features "components/v2")
|
||||
(not (contains? file-features "components/v2")))
|
||||
;; Migrate components v2
|
||||
(feat.compv2/migrate-file! cfg
|
||||
(:id file)
|
||||
:max-procs 2
|
||||
:validate? true
|
||||
:throw-on-validate? true)))
|
||||
|
||||
nil)))
|
||||
|
||||
(def ^:private schema:persist-temp-file
|
||||
[:map {:title "persist-temp-file"}
|
||||
[:id ::sm/uuid]])
|
||||
(s/def ::persist-temp-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::files/id]))
|
||||
|
||||
(sv/defmethod ::persist-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:persist-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(persist-temp-file cfg params))))
|
||||
::doc/module :files}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(persist-temp-file conn params)))
|
||||
|
||||
@@ -8,27 +8,22 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.shapes :as gsh]
|
||||
[app.common.pages.helpers :as cph]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.thumbnails :as thc]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.shape-tree :as ctt]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.db.sql :as sql]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.cond :as-alias cond]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.retry :as rtry]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.storage :as sto]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
@@ -43,60 +38,88 @@
|
||||
|
||||
;; --- COMMAND QUERY: get-file-object-thumbnails
|
||||
|
||||
(defn- get-object-thumbnails-by-tag
|
||||
[conn file-id tag]
|
||||
(let [sql (str/concat
|
||||
"select object_id, media_id, tag "
|
||||
" from file_tagged_object_thumbnail"
|
||||
" where file_id=? and tag=? and deleted_at is null")
|
||||
res (db/exec! conn [sql file-id tag])]
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
(files/resolve-public-uri (:media-id row))))
|
||||
(d/without-nils))))
|
||||
|
||||
(defn- get-object-thumbnails
|
||||
([conn file-id]
|
||||
(let [sql (str/concat
|
||||
"select object_id, media_id, tag "
|
||||
" from file_tagged_object_thumbnail"
|
||||
" where file_id=? and deleted_at is null")
|
||||
"select object_id, data, media_id "
|
||||
" from file_object_thumbnail"
|
||||
" where file_id=?")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
(files/resolve-public-uri (:media-id row))))
|
||||
(or (some-> row :media-id files/resolve-public-uri)
|
||||
(:data row))))
|
||||
(d/without-nils))))
|
||||
|
||||
([conn file-id object-ids]
|
||||
(let [sql (str/concat
|
||||
"select object_id, media_id, tag "
|
||||
" from file_tagged_object_thumbnail"
|
||||
" where file_id=? and object_id = ANY(?) and deleted_at is null")
|
||||
"select object_id, data, media_id "
|
||||
" from file_object_thumbnail"
|
||||
" where file_id=? and object_id = ANY(?)")
|
||||
ids (db/create-array conn "text" (seq object-ids))
|
||||
res (db/exec! conn [sql file-id ids])]
|
||||
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
(files/resolve-public-uri (:media-id row))))
|
||||
(d/without-nils)))))
|
||||
(d/index-by :object-id
|
||||
(fn [row]
|
||||
(or (some-> row :media-id files/resolve-public-uri)
|
||||
(:data row)))
|
||||
res))))
|
||||
|
||||
(sv/defmethod ::get-file-object-thumbnails
|
||||
"Retrieve a file object thumbnails."
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params [:map {:title "get-file-object-thumbnails"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:tag {:optional true} :string]]
|
||||
[:file-id ::sm/uuid]]
|
||||
::sm/result [:map-of :string :string]
|
||||
::cond/get-object #(files/get-minimal-file %1 (:file-id %2))
|
||||
::cond/reuse-key? true
|
||||
::cond/key-fn files/get-file-etag}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id tag] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
(if tag
|
||||
(get-object-thumbnails-by-tag conn file-id tag)
|
||||
(get-object-thumbnails conn file-id))))
|
||||
(get-object-thumbnails conn file-id)))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-thumbnail
|
||||
|
||||
(defn get-file-thumbnail
|
||||
[conn file-id revn]
|
||||
(let [sql (sql/select :file-thumbnail
|
||||
(cond-> {:file-id file-id}
|
||||
revn (assoc :revn revn))
|
||||
{:limit 1
|
||||
:order-by [[:revn :desc]]})
|
||||
row (db/exec-one! conn sql)]
|
||||
|
||||
(when-not row
|
||||
(ex/raise :type :not-found
|
||||
:code :file-thumbnail-not-found))
|
||||
|
||||
(when-not (:data row)
|
||||
(ex/raise :type :not-found
|
||||
:code :file-thumbnail-not-found))
|
||||
|
||||
{:data (:data row)
|
||||
:props (some-> (:props row) db/decode-transit-pgobject)
|
||||
:revn (:revn row)
|
||||
:file-id (:file-id row)}))
|
||||
|
||||
(s/def ::revn ::us/integer)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
|
||||
(s/def ::get-file-thumbnail
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id]
|
||||
:opt-un [::revn]))
|
||||
|
||||
(sv/defmethod ::get-file-thumbnail
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::doc/deprecated "1.19"}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id file-id revn]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
(-> (get-file-thumbnail conn file-id revn)
|
||||
(rph/with-http-cache long-cache-duration))))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-data-for-thumbnail
|
||||
|
||||
@@ -104,11 +127,23 @@
|
||||
;; loading all pages into memory for find the frame set for thumbnail.
|
||||
|
||||
(defn get-file-data-for-thumbnail
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
|
||||
[conn {:keys [data id] :as file}]
|
||||
(letfn [;; function responsible on finding the frame marked to be
|
||||
;; used as thumbnail; the returned frame always have
|
||||
;; the :page-id set to the page that it belongs.
|
||||
(get-thumbnail-frame [{:keys [data]}]
|
||||
|
||||
(get-thumbnail-frame [data]
|
||||
;; NOTE: this is a hack for avoid perform blocking
|
||||
;; operation inside the for loop, clojure lazy-seq uses
|
||||
;; synchronized blocks that does not plays well with
|
||||
;; virtual threads, so we need to perform the load
|
||||
;; operation first. This operation forces all pointer maps
|
||||
;; load into the memory.
|
||||
(->> (-> data :pages-index vals)
|
||||
(filter pmap/pointer-map?)
|
||||
(run! pmap/load!))
|
||||
|
||||
;; Then proceed to find the frame set for thumbnail
|
||||
(d/seek #(or (:use-for-thumbnail %)
|
||||
(:use-for-thumbnail? %)) ; NOTE: backward comp (remove on v1.21)
|
||||
(for [page (-> data :pages-index vals)
|
||||
@@ -119,24 +154,24 @@
|
||||
;; all unneeded shapes if a concrete frame is provided. If no
|
||||
;; frame, the objects is returned untouched.
|
||||
(filter-objects [objects frame-id]
|
||||
(d/index-by :id (cfh/get-children-with-self objects frame-id)))
|
||||
(d/index-by :id (cph/get-children-with-self objects frame-id)))
|
||||
|
||||
;; function responsible of assoc available thumbnails
|
||||
;; to frames and remove all children shapes from objects if
|
||||
;; thumbnails is available
|
||||
(assoc-thumbnails [objects page-id thumbnails]
|
||||
(loop [objects objects
|
||||
frames (filter cfh/frame-shape? (vals objects))]
|
||||
frames (filter cph/frame-shape? (vals objects))]
|
||||
|
||||
(if-let [frame (-> frames first)]
|
||||
(let [frame-id (:id frame)
|
||||
object-id (thc/fmt-object-id (:id file) page-id frame-id "frame")
|
||||
object-id (str page-id frame-id)
|
||||
frame (if-let [thumb (get thumbnails object-id)]
|
||||
(assoc frame :thumbnail thumb :shapes [])
|
||||
(dissoc frame :thumbnail))
|
||||
|
||||
children-ids
|
||||
(cfh/get-children-ids objects frame-id)
|
||||
(cph/get-children-ids objects frame-id)
|
||||
|
||||
bounds
|
||||
(when (:show-content frame)
|
||||
@@ -157,44 +192,41 @@
|
||||
|
||||
objects)))]
|
||||
|
||||
(let [frame (get-thumbnail-frame file)
|
||||
frame-id (:id frame)
|
||||
page-id (or (:page-id frame)
|
||||
(-> data :pages first))
|
||||
(binding [pmap/*load-fn* (partial files/load-pointer conn id)]
|
||||
(let [frame (get-thumbnail-frame data)
|
||||
frame-id (:id frame)
|
||||
page-id (or (:page-id frame)
|
||||
(-> data :pages first))
|
||||
|
||||
page (dm/get-in data [:pages-index page-id])
|
||||
page (cond-> page (pmap/pointer-map? page) deref)
|
||||
frame-ids (if (some? frame) (list frame-id) (map :id (ctt/get-frames (:objects page))))
|
||||
page (dm/get-in data [:pages-index page-id])
|
||||
page (cond-> page (pmap/pointer-map? page) deref)
|
||||
frame-ids (if (some? frame) (list frame-id) (map :id (ctt/get-frames (:objects page))))
|
||||
|
||||
obj-ids (map #(thc/fmt-object-id (:id file) page-id % "frame") frame-ids)
|
||||
thumbs (get-object-thumbnails conn id obj-ids)]
|
||||
obj-ids (map #(str page-id %) frame-ids)
|
||||
thumbs (get-object-thumbnails conn id obj-ids)]
|
||||
|
||||
(cond-> page
|
||||
;; If we have frame, we need to specify it on the page level
|
||||
;; and remove the all other unrelated objects.
|
||||
(some? frame-id)
|
||||
(-> (assoc :thumbnail-frame-id frame-id)
|
||||
(update :objects filter-objects frame-id))
|
||||
(cond-> page
|
||||
;; If we have frame, we need to specify it on the page level
|
||||
;; and remove the all other unrelated objects.
|
||||
(some? frame-id)
|
||||
(-> (assoc :thumbnail-frame-id frame-id)
|
||||
(update :objects filter-objects frame-id))
|
||||
|
||||
;; Assoc the available thumbnails and prune not visible shapes
|
||||
;; for avoid transfer unnecessary data.
|
||||
:always
|
||||
(update :objects assoc-thumbnails page-id thumbs)))))
|
||||
;; Assoc the available thumbnails and prune not visible shapes
|
||||
;; for avoid transfer unnecessary data.
|
||||
:always
|
||||
(update :objects assoc-thumbnails page-id thumbs))))))
|
||||
|
||||
(def ^:private
|
||||
schema:get-file-data-for-thumbnail
|
||||
(sm/define
|
||||
[:map {:title "get-file-data-for-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:features {:optional true} ::cfeat/features]]))
|
||||
(def ^:private schema:get-file-data-for-thumbnail
|
||||
[:map {:title "get-file-data-for-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:features {:optional true} files/schema:features]])
|
||||
|
||||
(def ^:private
|
||||
schema:partial-file
|
||||
(sm/define
|
||||
[:map {:title "PartialFile"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
[:page :any]]))
|
||||
(def ^:private schema:partial-file
|
||||
[:map {:title "PartialFile"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
[:page :any]])
|
||||
|
||||
(sv/defmethod ::get-file-data-for-thumbnail
|
||||
"Retrieves the data for generate the thumbnail of the file. Used
|
||||
@@ -203,142 +235,188 @@
|
||||
::doc/module :files
|
||||
::sm/params schema:get-file-data-for-thumbnail
|
||||
::sm/result schema:partial-file}
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:file-id file-id)
|
||||
|
||||
file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(-> (files/get-file cfg file-id :migrate? false)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(fmg/migrate-file)))]
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file) (:features params)))
|
||||
|
||||
{:file-id file-id
|
||||
:revn (:revn file)
|
||||
:page (get-file-data-for-thumbnail cfg file)}))))
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id features] :as props}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
;; NOTE: we force here the "storage/pointer-map" feature, because
|
||||
;; it used internally only and is independent if user supports it
|
||||
;; or not.
|
||||
(let [feat (into #{"storage/pointer-map"} features)
|
||||
file (files/get-file conn file-id feat)]
|
||||
{:file-id file-id
|
||||
:revn (:revn file)
|
||||
:page (get-file-data-for-thumbnail conn file)})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MUTATION COMMANDS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:get-file-object-thumbnail
|
||||
"SELECT * FROM file_tagged_object_thumbnail
|
||||
WHERE file_id = ? AND object_id = ? AND tag = ?
|
||||
FOR UPDATE")
|
||||
;; --- MUTATION COMMAND: upsert-file-object-thumbnail
|
||||
|
||||
(def sql:create-file-object-thumbnail
|
||||
"INSERT INTO file_tagged_object_thumbnail (file_id, object_id, tag, media_id)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT (file_id, object_id, tag)
|
||||
DO UPDATE SET updated_at=?, media_id=?, deleted_at=null
|
||||
RETURNING *")
|
||||
(def sql:upsert-object-thumbnail
|
||||
"insert into file_object_thumbnail(file_id, object_id, data)
|
||||
values (?, ?, ?)
|
||||
on conflict(file_id, object_id) do
|
||||
update set data = ?;")
|
||||
|
||||
(defn upsert-file-object-thumbnail!
|
||||
[conn {:keys [file-id object-id data]}]
|
||||
(if data
|
||||
(db/exec-one! conn [sql:upsert-object-thumbnail file-id object-id data data])
|
||||
(db/delete! conn :file-object-thumbnail {:file-id file-id :object-id object-id})))
|
||||
|
||||
(s/def ::data (s/nilable ::us/string))
|
||||
(s/def ::object-id ::us/string)
|
||||
|
||||
(s/def ::upsert-file-object-thumbnail
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id ::object-id]
|
||||
:opt-un [::data]))
|
||||
|
||||
(sv/defmethod ::upsert-file-object-thumbnail
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::doc/deprecated "1.19"
|
||||
::audit/skip true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
|
||||
(when-not (db/read-only? conn)
|
||||
(upsert-file-object-thumbnail! conn params)
|
||||
nil)))
|
||||
|
||||
|
||||
;; --- MUTATION COMMAND: create-file-object-thumbnail
|
||||
|
||||
(def ^:private sql:create-object-thumbnail
|
||||
"insert into file_object_thumbnail(file_id, object_id, media_id)
|
||||
values (?, ?, ?)
|
||||
on conflict(file_id, object_id) do
|
||||
update set media_id = ?;")
|
||||
|
||||
(defn- create-file-object-thumbnail!
|
||||
[{:keys [::db/conn ::sto/storage]} file-id object-id media]
|
||||
|
||||
(defn- persist-thumbnail!
|
||||
[storage media created-at]
|
||||
(let [path (:path media)
|
||||
mtype (:mtype media)
|
||||
hash (sto/calculate-hash path)
|
||||
data (-> (sto/content path)
|
||||
(sto/wrap-with-hash hash))]
|
||||
(sto/wrap-with-hash hash))
|
||||
media (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? false
|
||||
:content-type mtype
|
||||
:bucket "file-object-thumbnail"})]
|
||||
|
||||
(sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at created-at
|
||||
:content-type mtype
|
||||
:bucket "file-object-thumbnail"})))
|
||||
(db/exec-one! conn [sql:create-object-thumbnail file-id object-id
|
||||
(:id media) (:id media)])))
|
||||
|
||||
|
||||
|
||||
(defn- create-file-object-thumbnail!
|
||||
[{:keys [::sto/storage] :as cfg} file-id object-id media tag]
|
||||
(let [tsnow (dt/now)
|
||||
media (persist-thumbnail! storage media tsnow)
|
||||
[th1 th2] (db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [th1 (db/exec-one! conn [sql:get-file-object-thumbnail file-id object-id tag])
|
||||
th2 (db/exec-one! conn [sql:create-file-object-thumbnail
|
||||
file-id object-id tag (:id media)
|
||||
tsnow (:id media)])]
|
||||
[th1 th2])))]
|
||||
|
||||
(when (and (some? th1)
|
||||
(not= (:media-id th1)
|
||||
(:media-id th2)))
|
||||
(sto/touch-object! storage (:media-id th1)))
|
||||
|
||||
th2))
|
||||
|
||||
(def ^:private
|
||||
schema:create-file-object-thumbnail
|
||||
(def schema:create-file-object-thumbnail
|
||||
[:map {:title "create-file-object-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:object-id :string]
|
||||
[:media ::media/upload]
|
||||
[:tag {:optional true} :string]])
|
||||
[:media ::media/upload]])
|
||||
|
||||
(sv/defmethod ::create-file-object-thumbnail
|
||||
{::doc/added "1.19"
|
||||
{:doc/added "1.19"
|
||||
::doc/module :files
|
||||
::climit/id [[:file-thumbnail-ops/by-profile ::rpc/profile-id]
|
||||
[:file-thumbnail-ops/global]]
|
||||
::rtry/enabled true
|
||||
::rtry/when rtry/conflict-exception?
|
||||
::audit/skip true
|
||||
::sm/params schema:create-file-object-thumbnail}
|
||||
|
||||
[cfg {:keys [::rpc/profile-id file-id object-id media tag]}]
|
||||
(media/validate-media-type! media)
|
||||
(media/validate-media-size! media)
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id object-id media]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(media/validate-media-type! media)
|
||||
(media/validate-media-size! media)
|
||||
|
||||
(db/run! cfg files/check-edition-permissions! profile-id file-id)
|
||||
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(create-file-object-thumbnail! cfg file-id object-id media (or tag "frame"))))
|
||||
(when-not (db/read-only? conn)
|
||||
(-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::db/conn conn)
|
||||
(create-file-object-thumbnail! file-id object-id media))
|
||||
nil)))
|
||||
|
||||
;; --- MUTATION COMMAND: delete-file-object-thumbnail
|
||||
|
||||
(defn- delete-file-object-thumbnail!
|
||||
[{:keys [::db/conn ::sto/storage]} file-id object-id]
|
||||
(when-let [{:keys [media-id tag]} (db/get* conn :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:object-id object-id}
|
||||
{::sql/for-update true})]
|
||||
(sto/touch-object! storage media-id)
|
||||
(db/update! conn :file-tagged-object-thumbnail
|
||||
{:deleted-at (dt/now)}
|
||||
(when-let [{:keys [media-id]} (db/get* conn :file-object-thumbnail
|
||||
{:file-id file-id
|
||||
:object-id object-id}
|
||||
{::db/for-update? true})]
|
||||
(when media-id
|
||||
(sto/del-object! storage media-id))
|
||||
|
||||
(db/delete! conn :file-object-thumbnail
|
||||
{:file-id file-id
|
||||
:object-id object-id
|
||||
:tag tag})))
|
||||
:object-id object-id})
|
||||
nil))
|
||||
|
||||
(s/def ::delete-file-object-thumbnail
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id ::object-id]))
|
||||
|
||||
(sv/defmethod ::delete-file-object-thumbnail
|
||||
{::doc/added "1.19"
|
||||
{:doc/added "1.19"
|
||||
::doc/module :files
|
||||
::doc/deprecated "1.20"
|
||||
::climit/id [[:file-thumbnail-ops/by-profile ::rpc/profile-id]
|
||||
[:file-thumbnail-ops/global]]
|
||||
::audit/skip true}
|
||||
[cfg {:keys [::rpc/profile-id file-id object-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage conn)
|
||||
(delete-file-object-thumbnail! file-id object-id))
|
||||
nil))))
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id object-id]}]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
|
||||
(when-not (db/read-only? conn)
|
||||
(-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::db/conn conn)
|
||||
(delete-file-object-thumbnail! file-id object-id))
|
||||
nil)))
|
||||
|
||||
;; --- MUTATION COMMAND: upsert-file-thumbnail
|
||||
|
||||
(def ^:private sql:upsert-file-thumbnail
|
||||
"insert into file_thumbnail (file_id, revn, data, props)
|
||||
values (?, ?, ?, ?::jsonb)
|
||||
on conflict(file_id, revn) do
|
||||
update set data = ?, props=?, updated_at=now();")
|
||||
|
||||
(defn- upsert-file-thumbnail!
|
||||
[conn {:keys [file-id revn data props]}]
|
||||
(let [props (db/tjson (or props {}))]
|
||||
(db/exec-one! conn [sql:upsert-file-thumbnail
|
||||
file-id revn data props data props])))
|
||||
|
||||
(s/def ::revn ::us/integer)
|
||||
(s/def ::props map?)
|
||||
|
||||
(s/def ::upsert-file-thumbnail
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id ::revn ::props ::data]))
|
||||
|
||||
(sv/defmethod ::upsert-file-thumbnail
|
||||
"Creates or updates the file thumbnail. Mainly used for paint the
|
||||
grid thumbnails."
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::doc/deprecated "1.19"
|
||||
::audit/skip true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(upsert-file-thumbnail! conn params)
|
||||
nil)))
|
||||
|
||||
;; --- MUTATION COMMAND: create-file-thumbnail
|
||||
|
||||
(def ^:private sql:create-file-thumbnail
|
||||
"insert into file_thumbnail (file_id, revn, media_id, props)
|
||||
values (?, ?, ?, ?::jsonb)
|
||||
on conflict(file_id, revn) do
|
||||
update set media_id=?, props=?, updated_at=now();")
|
||||
|
||||
(defn- create-file-thumbnail!
|
||||
[{:keys [::db/conn ::sto/storage]} {:keys [file-id revn props media] :as params}]
|
||||
(media/validate-media-type! media)
|
||||
@@ -350,67 +428,35 @@
|
||||
hash (sto/calculate-hash path)
|
||||
data (-> (sto/content path)
|
||||
(sto/wrap-with-hash hash))
|
||||
tnow (dt/now)
|
||||
media (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at tnow
|
||||
::sto/deduplicate? false
|
||||
:content-type mtype
|
||||
:bucket "file-thumbnail"})
|
||||
|
||||
thumb (db/get* conn :file-thumbnail
|
||||
{:file-id file-id
|
||||
:revn revn}
|
||||
{::db/remove-deleted false
|
||||
::sql/for-update true})]
|
||||
|
||||
(if (some? thumb)
|
||||
(do
|
||||
;; We mark the old media id as touched if it does not match
|
||||
(when (not= (:id media) (:media-id thumb))
|
||||
(sto/touch-object! storage (:media-id thumb)))
|
||||
|
||||
(db/update! conn :file-thumbnail
|
||||
{:media-id (:id media)
|
||||
:deleted-at nil
|
||||
:updated-at tnow
|
||||
:props props}
|
||||
{:file-id file-id
|
||||
:revn revn}))
|
||||
|
||||
(db/insert! conn :file-thumbnail
|
||||
{:file-id file-id
|
||||
:revn revn
|
||||
:created-at tnow
|
||||
:updated-at tnow
|
||||
:props props
|
||||
:media-id (:id media)}))
|
||||
|
||||
:bucket "file-thumbnail"})]
|
||||
(db/exec-one! conn [sql:create-file-thumbnail file-id revn
|
||||
(:id media) props
|
||||
(:id media) props])
|
||||
media))
|
||||
|
||||
(def ^:private
|
||||
schema:create-file-thumbnail
|
||||
[:map {:title "create-file-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:revn :int]
|
||||
[:media ::media/upload]])
|
||||
|
||||
(sv/defmethod ::create-file-thumbnail
|
||||
"Creates or updates the file thumbnail. Mainly used for paint the
|
||||
grid thumbnails."
|
||||
{::doc/added "1.19"
|
||||
::doc/module :files
|
||||
::audit/skip true
|
||||
::climit/id [[:file-thumbnail-ops/by-profile ::rpc/profile-id]
|
||||
[:file-thumbnail-ops/global]]
|
||||
::rtry/enabled true
|
||||
::rtry/when rtry/conflict-exception?
|
||||
::sm/params schema:create-file-thumbnail}
|
||||
::sm/params [:map {:title "create-file-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:revn :int]
|
||||
[:media ::media/upload]]
|
||||
}
|
||||
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)
|
||||
media (create-file-thumbnail! cfg params)]
|
||||
{:uri (files/resolve-public-uri (:id media))})))))
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(let [media (-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::db/conn conn)
|
||||
(create-file-thumbnail! params))]
|
||||
|
||||
{:uri (files/resolve-public-uri (:id media))}))))
|
||||
|
||||
@@ -6,19 +6,18 @@
|
||||
|
||||
(ns app.rpc.commands.files-update
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as val]
|
||||
[app.common.files.features :as ffeat]
|
||||
[app.common.files.migrations :as pmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pages :as cp]
|
||||
[app.common.pages.changes :as cpc]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.generators :as smg]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.http.errors :as errors]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
[app.metrics :as mtx]
|
||||
@@ -26,39 +25,42 @@
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[promesa.exec :as px]))
|
||||
[app.util.time :as dt]))
|
||||
|
||||
;; --- SCHEMA
|
||||
|
||||
(def ^:private
|
||||
schema:update-file
|
||||
[:map {:title "update-file"}
|
||||
(sm/def! ::changes
|
||||
[:vector ::cpc/change])
|
||||
|
||||
(sm/def! ::change-with-metadata
|
||||
[:map {:title "ChangeWithMetadata"}
|
||||
[:changes ::changes]
|
||||
[:hint-origin {:optional true} :keyword]
|
||||
[:hint-events {:optional true} [:vector :string]]])
|
||||
|
||||
(sm/def! ::update-file-params
|
||||
[:map {:title "UpdateFileParams"}
|
||||
[:id ::sm/uuid]
|
||||
[:session-id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:changes {:optional true} [:vector ::cpc/change]]
|
||||
[:features {:optional true
|
||||
:gen/max 3
|
||||
:gen/gen (smg/subseq files/supported-features)}
|
||||
::sm/set-of-strings]
|
||||
[:changes {:optional true} ::changes]
|
||||
[:changes-with-metadata {:optional true}
|
||||
[:vector [:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:hint-origin {:optional true} :keyword]
|
||||
[:hint-events {:optional true} [:vector :string]]]]]
|
||||
[:skip-validate {:optional true} :boolean]])
|
||||
[:vector ::change-with-metadata]]])
|
||||
|
||||
(def ^:private
|
||||
schema:update-file-result
|
||||
[:vector {:title "update-file-result"}
|
||||
[:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
(sm/def! ::update-file-result
|
||||
[:vector {:title "UpdateFileResults"}
|
||||
[:map {:title "UpdateFileResult"}
|
||||
[:changes ::changes]
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
@@ -70,26 +72,14 @@
|
||||
;; to all clients using it.
|
||||
|
||||
(def ^:private library-change-types
|
||||
#{:add-color
|
||||
:mod-color
|
||||
:del-color
|
||||
:add-media
|
||||
:mod-media
|
||||
:del-media
|
||||
:add-component
|
||||
:mod-component
|
||||
:del-component
|
||||
:restore-component
|
||||
:add-typography
|
||||
:mod-typography
|
||||
:del-typography})
|
||||
#{:add-color :mod-color :del-color
|
||||
:add-media :mod-media :del-media
|
||||
:add-component :mod-component :del-component
|
||||
:add-typography :mod-typography :del-typography})
|
||||
|
||||
(def ^:private file-change-types
|
||||
#{:add-obj
|
||||
:mod-obj
|
||||
:del-obj
|
||||
:reg-objects
|
||||
:mov-objects})
|
||||
#{:add-obj :mod-obj :del-obj
|
||||
:reg-objects :mov-objects})
|
||||
|
||||
(defn- library-change?
|
||||
[{:keys [type] :as change}]
|
||||
@@ -116,13 +106,20 @@
|
||||
|
||||
(defn- wrap-with-pointer-map-context
|
||||
[f]
|
||||
(fn [cfg {:keys [id] :as file}]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(fn [{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(binding [pmap/*tracked* (atom {})
|
||||
pmap/*load-fn* (partial files/load-pointer conn id)
|
||||
ffeat/*wrap-with-pointer-map-fn* pmap/wrap]
|
||||
(let [result (f cfg file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
(files/persist-pointers! conn id)
|
||||
result))))
|
||||
|
||||
(defn- wrap-with-objects-map-context
|
||||
[f]
|
||||
(fn [cfg file]
|
||||
(binding [ffeat/*wrap-with-objects-map-fn* omap/wrap]
|
||||
(f cfg file))))
|
||||
|
||||
(declare get-lagged-changes)
|
||||
(declare send-notifications!)
|
||||
(declare update-file)
|
||||
@@ -135,85 +132,74 @@
|
||||
;; database.
|
||||
|
||||
(sv/defmethod ::update-file
|
||||
{::climit/id [[:update-file/by-profile ::rpc/profile-id]
|
||||
[:update-file/global]]
|
||||
{::climit/id :update-file-by-id
|
||||
::climit/key-fn :id
|
||||
::webhooks/event? true
|
||||
::webhooks/batch-timeout (dt/duration "2m")
|
||||
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
|
||||
|
||||
::sm/params schema:update-file
|
||||
::sm/result schema:update-file-result
|
||||
::sm/params ::update-file-params
|
||||
::sm/result ::update-file-result
|
||||
|
||||
::doc/module :files
|
||||
::doc/added "1.17"}
|
||||
[cfg {:keys [::rpc/profile-id id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
|
||||
(let [file (get-file conn id)
|
||||
team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:team-id (:team-id file))
|
||||
|
||||
features (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file) (:features params)))
|
||||
|
||||
params (assoc params
|
||||
:profile-id profile-id
|
||||
:features features
|
||||
:team team
|
||||
:file file)
|
||||
|
||||
tpoint (dt/tpoint)]
|
||||
|
||||
;; When newly computed features does not match exactly with
|
||||
;; the features defined on team row, we update it.
|
||||
(when (not= features (:features team))
|
||||
(let [features (db/create-array conn "text" features)]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id (:id team)})))
|
||||
|
||||
(binding [l/*context* (some-> (meta params)
|
||||
(get :app.http/request)
|
||||
(errors/request->context))]
|
||||
(-> (update-file cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))))
|
||||
(let [cfg (assoc cfg ::db/conn conn)
|
||||
params (assoc params :profile-id profile-id)
|
||||
tpoint (dt/tpoint)]
|
||||
(-> (update-file cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))
|
||||
|
||||
(defn update-file
|
||||
[{:keys [::mtx/metrics] :as cfg}
|
||||
{:keys [file features changes changes-with-metadata] :as params}]
|
||||
(let [features (-> features
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union (:features file)))
|
||||
[{:keys [::db/conn ::mtx/metrics] :as cfg} {:keys [profile-id id changes changes-with-metadata] :as params}]
|
||||
(let [file (get-file conn id)
|
||||
features (->> (concat (:features file)
|
||||
(:features params))
|
||||
(into (files/get-default-features))
|
||||
(files/check-features-compatibility!))]
|
||||
|
||||
update-fn (cond-> update-file*
|
||||
(contains? features "fdata/pointer-map")
|
||||
(wrap-with-pointer-map-context))
|
||||
(files/check-edition-permissions! conn profile-id (:id file))
|
||||
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
(vec changes))]
|
||||
(binding [ffeat/*current* features
|
||||
ffeat/*previous* (:features file)]
|
||||
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
(let [update-fn (cond-> update-file*
|
||||
(contains? features "storage/pointer-map")
|
||||
(wrap-with-pointer-map-context)
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
(contains? features "storage/objects-map")
|
||||
(wrap-with-objects-map-context))
|
||||
|
||||
(binding [cfeat/*current* features
|
||||
cfeat/*previous* (:features file)]
|
||||
(let [file (assoc file :features features)
|
||||
params (-> params
|
||||
(assoc :file file)
|
||||
(assoc :changes changes)
|
||||
(assoc ::created-at (dt/now)))]
|
||||
file (assoc file :features features)
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
(vec changes))
|
||||
|
||||
params (-> params
|
||||
(assoc :file file)
|
||||
(assoc :changes changes)
|
||||
(assoc ::created-at (dt/now)))]
|
||||
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
(when (not= features (:features file))
|
||||
(let [features (db/create-array conn "text" features)]
|
||||
(db/update! conn :file
|
||||
{:features features}
|
||||
{:id id})))
|
||||
|
||||
(-> (update-fn cfg params)
|
||||
(vary-meta assoc ::audit/replace-props
|
||||
@@ -224,12 +210,12 @@
|
||||
:team-id (:team-id file)}))))))
|
||||
|
||||
(defn- update-file*
|
||||
[{:keys [::db/conn ::wrk/executor] :as cfg}
|
||||
{:keys [profile-id file changes session-id ::created-at skip-validate] :as params}]
|
||||
(let [;; Process the file data on separated thread for avoid to do
|
||||
;; the CPU intensive operation on vthread.
|
||||
file (px/invoke! executor (partial update-file-data cfg file changes skip-validate))
|
||||
features (db/create-array conn "text" (:features file))]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id file changes session-id ::created-at] :as params}]
|
||||
(let [;; Process the file data in the CLIMIT context; scheduling it
|
||||
;; to be executed on a separated executor for avoid to do the
|
||||
;; CPU intensive operation on vthread.
|
||||
file (-> (climit/configure cfg :update-file)
|
||||
(climit/submit! (partial update-file-data file changes)))]
|
||||
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
@@ -241,14 +227,11 @@
|
||||
:features (db/create-array conn "text" (:features file))
|
||||
:data (when (take-snapshot? file)
|
||||
(:data file))
|
||||
:changes (blob/encode changes)}
|
||||
{::db/return-keys false})
|
||||
:changes (blob/encode changes)})
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features features
|
||||
:data-backend nil
|
||||
:modified-at created-at
|
||||
:has-media-trimmed false}
|
||||
@@ -265,88 +248,24 @@
|
||||
;; Retrieve and return lagged data
|
||||
(get-lagged-changes conn params))))
|
||||
|
||||
(defn- soft-validate-file-schema!
|
||||
[file]
|
||||
(try
|
||||
(val/validate-file-schema! file)
|
||||
(catch Throwable cause
|
||||
(l/error :hint "file schema validation error" :cause cause))))
|
||||
|
||||
(defn- soft-validate-file!
|
||||
[file libs]
|
||||
(try
|
||||
(val/validate-file! file libs)
|
||||
(catch Throwable cause
|
||||
(l/error :hint "file validation error"
|
||||
:cause cause))))
|
||||
|
||||
(defn- update-file-data
|
||||
[{:keys [::db/conn] :as cfg} file changes skip-validate]
|
||||
(let [file (update file :data (fn [data]
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id (:id file)))))
|
||||
[file changes]
|
||||
(-> file
|
||||
(update :revn inc)
|
||||
(update :data (fn [data]
|
||||
(cond-> data
|
||||
:always
|
||||
(-> (blob/decode)
|
||||
(assoc :id (:id file))
|
||||
(pmg/migrate-data))
|
||||
|
||||
;; For avoid unnecesary overhead of creating multiple pointers
|
||||
;; and handly internally with objects map in their worst
|
||||
;; case (when probably all shapes and all pointers will be
|
||||
;; readed in any case), we just realize/resolve them before
|
||||
;; applying the migration to the file
|
||||
file (if (fmg/need-migration? file)
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file))
|
||||
file)
|
||||
(and (contains? ffeat/*current* "components/v2")
|
||||
(not (contains? ffeat/*previous* "components/v2")))
|
||||
(ctf/migrate-to-components-v2)
|
||||
|
||||
;; WARNING: this ruins performance; maybe we need to find
|
||||
;; some other way to do general validation
|
||||
libs (when (and (or (contains? cf/flags :file-validation)
|
||||
(contains? cf/flags :soft-file-validation))
|
||||
(not skip-validate))
|
||||
(->> (files/get-file-libraries conn (:id file))
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
|
||||
pmap/*tracked* nil]
|
||||
;; We do not resolve the objects maps here
|
||||
;; because there is a lower probability that all
|
||||
;; shapes needed to be loded into memory, so we
|
||||
;; leeave it on lazy status
|
||||
(-> (files/get-file cfg id :migrate? false)
|
||||
(update :data feat.fdata/process-pointers deref) ; ensure all pointers resolved
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file))))))
|
||||
(d/index-by :id)))
|
||||
|
||||
|
||||
file (-> (files/check-version! file)
|
||||
(update :revn inc)
|
||||
(update :data cpc/process-changes changes)
|
||||
(update :data d/without-nils))]
|
||||
|
||||
(when (contains? cf/flags :soft-file-validation)
|
||||
(soft-validate-file! file libs))
|
||||
|
||||
(when (contains? cf/flags :soft-file-schema-validation)
|
||||
(soft-validate-file-schema! file))
|
||||
|
||||
(when (and (contains? cf/flags :file-validation)
|
||||
(not skip-validate))
|
||||
(val/validate-file! file libs))
|
||||
|
||||
(when (and (contains? cf/flags :file-schema-validation)
|
||||
(not skip-validate))
|
||||
(val/validate-file-schema! file))
|
||||
|
||||
(cond-> file
|
||||
(contains? cfeat/*current* "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map)
|
||||
|
||||
(contains? cfeat/*current* "fdata/pointer-map")
|
||||
(feat.fdata/enable-pointer-map)
|
||||
|
||||
:always
|
||||
(update :data blob/encode))))
|
||||
:always
|
||||
(-> (cp/process-changes changes)
|
||||
(blob/encode)))))))
|
||||
|
||||
(defn- take-snapshot?
|
||||
"Defines the rule when file `data` snapshot should be saved."
|
||||
@@ -375,7 +294,7 @@
|
||||
(vec)))
|
||||
|
||||
(defn- send-notifications!
|
||||
[cfg {:keys [file team changes session-id] :as params}]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [file changes session-id] :as params}]
|
||||
(let [lchanges (filter library-change? changes)
|
||||
msgbus (::mbus/msgbus cfg)]
|
||||
|
||||
@@ -389,12 +308,14 @@
|
||||
:changes changes})
|
||||
|
||||
(when (and (:is-shared file) (seq lchanges))
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id team)
|
||||
:message {:type :library-change
|
||||
:profile-id (:profile-id params)
|
||||
:file-id (:id file)
|
||||
:session-id session-id
|
||||
:revn (:revn file)
|
||||
:modified-at (dt/now)
|
||||
:changes lchanges}))))
|
||||
(let [team-id (or (:team-id file)
|
||||
(files/get-team-id conn (:project-id file)))]
|
||||
(mbus/pub! msgbus
|
||||
:topic team-id
|
||||
:message {:type :library-change
|
||||
:profile-id (:profile-id params)
|
||||
:file-id (:id file)
|
||||
:session-id session-id
|
||||
:revn (:revn file)
|
||||
:modified-at (dt/now)
|
||||
:changes lchanges})))))
|
||||
|
||||
@@ -8,15 +8,14 @@
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
@@ -26,28 +25,38 @@
|
||||
[app.storage :as sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[promesa.exec :as px]))
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
|
||||
(def valid-style #{"normal" "italic"})
|
||||
|
||||
(s/def ::data (s/map-of ::us/string any?))
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::font-id ::us/uuid)
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::name ::us/not-empty-string)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::share-id ::us/uuid)
|
||||
(s/def ::style valid-style)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::weight valid-weight)
|
||||
|
||||
;; --- QUERY: Get font variants
|
||||
|
||||
(def ^:private
|
||||
schema:get-font-variants
|
||||
[:schema {:title "get-font-variants"}
|
||||
[:and
|
||||
[:map
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:project-id {:optional true} ::sm/uuid]
|
||||
[:share-id {:optional true} ::sm/uuid]]
|
||||
[::sm/contains-any #{:team-id :file-id :project-id}]]])
|
||||
(s/def ::get-font-variants
|
||||
(s/and
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:opt-un [::team-id
|
||||
::file-id
|
||||
::project-id
|
||||
::share-id])
|
||||
(fn [o]
|
||||
(or (contains? o :team-id)
|
||||
(contains? o :file-id)
|
||||
(contains? o :project-id)))))
|
||||
|
||||
(sv/defmethod ::get-font-variants
|
||||
{::doc/added "1.18"
|
||||
::sm/params schema:get-font-variants}
|
||||
{::doc/added "1.18"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id file-id project-id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(cond
|
||||
@@ -71,39 +80,34 @@
|
||||
perms (files/get-permissions conn profile-id file-id share-id)]
|
||||
(files/check-read-permissions! perms)
|
||||
(db/query conn :team-font-variant
|
||||
{:team-id (:team-id project)
|
||||
:deleted-at nil})))))
|
||||
{:team-id (:team-id project)
|
||||
:deleted-at nil})))))
|
||||
|
||||
|
||||
(declare create-font-variant)
|
||||
|
||||
(def ^:private schema:create-font-variant
|
||||
[:map {:title "create-font-variant"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:data [:map-of :string :any]]
|
||||
[:font-id ::sm/uuid]
|
||||
[:font-family :string]
|
||||
[:font-weight [::sm/one-of {:format "number"} valid-weight]]
|
||||
[:font-style [::sm/one-of {:format "string"} valid-style]]])
|
||||
(s/def ::create-font-variant
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id
|
||||
::data
|
||||
::font-id
|
||||
::font-family
|
||||
::font-weight
|
||||
::font-style]))
|
||||
|
||||
(sv/defmethod ::create-font-variant
|
||||
{::doc/added "1.18"
|
||||
::climit/id [[:process-font/by-profile ::rpc/profile-id]
|
||||
[:process-font/global]]
|
||||
::webhooks/event? true
|
||||
::sm/params schema:create-font-variant}
|
||||
[cfg {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(quotes/check-quote! conn {::quotes/id ::quotes/font-variants-per-team
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id})
|
||||
(create-font-variant cfg (assoc params :profile-id profile-id))))))
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(teams/check-edition-permissions! pool profile-id team-id)
|
||||
(quotes/check-quote! pool {::quotes/id ::quotes/font-variants-per-team
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id})
|
||||
(create-font-variant cfg (assoc params :profile-id profile-id))))
|
||||
|
||||
(defn create-font-variant
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor]} {:keys [data] :as params}]
|
||||
[{:keys [::sto/storage ::db/pool] :as cfg} {:keys [data] :as params}]
|
||||
(letfn [(generate-missing! [data]
|
||||
(let [data (media/run {:cmd :generate-fonts :input data})]
|
||||
(when (and (not (contains? data "font/otf"))
|
||||
@@ -131,7 +135,6 @@
|
||||
ttf-params (prepare-font data "font/ttf")
|
||||
wf1-params (prepare-font data "font/woff")
|
||||
wf2-params (prepare-font data "font/woff2")]
|
||||
|
||||
(cond-> {}
|
||||
(some? otf-params)
|
||||
(assoc :otf (sto/put-object! storage otf-params))
|
||||
@@ -143,7 +146,7 @@
|
||||
(assoc :woff2 (sto/put-object! storage wf2-params)))))
|
||||
|
||||
(insert-font-variant! [{:keys [woff1 woff2 otf ttf]}]
|
||||
(db/insert! conn :team-font-variant
|
||||
(db/insert! pool :team-font-variant
|
||||
{:id (uuid/next)
|
||||
:team-id (:team-id params)
|
||||
:font-id (:font-id params)
|
||||
@@ -153,114 +156,74 @@
|
||||
:woff1-file-id (:id woff1)
|
||||
:woff2-file-id (:id woff2)
|
||||
:otf-file-id (:id otf)
|
||||
:ttf-file-id (:id ttf)}))]
|
||||
:ttf-file-id (:id ttf)}))
|
||||
]
|
||||
|
||||
(let [data (px/invoke! executor (partial generate-missing! data))
|
||||
(let [data (-> (climit/configure cfg :process-font)
|
||||
(climit/submit! (partial generate-missing! data)))
|
||||
assets (persist-fonts-files! data)
|
||||
result (insert-font-variant! assets)]
|
||||
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))
|
||||
|
||||
;; --- UPDATE FONT FAMILY
|
||||
|
||||
(def ^:private
|
||||
schema:update-font
|
||||
[:map {:title "update-font"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]])
|
||||
(s/def ::update-font
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id ::id ::name]))
|
||||
|
||||
(sv/defmethod ::update-font
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:update-font}
|
||||
[cfg {:keys [::rpc/profile-id team-id id name]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
|
||||
(db/update! conn :team-font-variant
|
||||
{:font-family name}
|
||||
{:font-id id
|
||||
:team-id team-id})
|
||||
|
||||
(rph/with-meta (rph/wrap nil)
|
||||
{::audit/replace-props {:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:profile-id profile-id}}))))
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id team-id id name]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(rph/with-meta
|
||||
(db/update! conn :team-font-variant
|
||||
{:font-family name}
|
||||
{:font-id id
|
||||
:team-id team-id})
|
||||
{::audit/replace-props {:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:profile-id profile-id}})))
|
||||
|
||||
;; --- DELETE FONT
|
||||
|
||||
(def ^:private
|
||||
schema:delete-font
|
||||
[:map {:title "delete-font"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
(s/def ::delete-font
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id ::id]))
|
||||
|
||||
(sv/defmethod ::delete-font
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:delete-font}
|
||||
[cfg {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn ::sto/storage] :as cfg}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [fonts (db/query conn :team-font-variant
|
||||
{:team-id team-id
|
||||
:font-id id
|
||||
:deleted-at nil}
|
||||
{::sql/for-update true})
|
||||
storage (media/configure-assets-storage storage conn)
|
||||
tnow (dt/now)]
|
||||
|
||||
(when-not (seq fonts)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found))
|
||||
|
||||
(doseq [font fonts]
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at tnow}
|
||||
{:id (:id font)})
|
||||
(some->> (:woff1-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:woff2-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:ttf-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:otf-file-id font) (sto/touch-object! storage)))
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:id id
|
||||
:team-id team-id
|
||||
:name (:font-family (peek fonts))
|
||||
:profile-id profile-id}})))))
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [font (db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/now)}
|
||||
{:font-id id :team-id team-id})]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:id id
|
||||
:team-id team-id
|
||||
:name (:font-family font)
|
||||
:profile-id profile-id}}))))
|
||||
|
||||
;; --- DELETE FONT VARIANT
|
||||
|
||||
(def ^:private schema:delete-font-variant
|
||||
[:map {:title "delete-font-variant"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
(s/def ::delete-font-variant
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id ::id]))
|
||||
|
||||
(sv/defmethod ::delete-font-variant
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:delete-font-variant}
|
||||
[cfg {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn ::sto/storage] :as cfg}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [variant (db/get conn :team-font-variant
|
||||
{:id id :team-id team-id}
|
||||
{::sql/for-update true})
|
||||
storage (media/configure-assets-storage storage conn)]
|
||||
|
||||
(db/update! conn :team-font-variant
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [variant (db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/now)}
|
||||
{:id (:id variant)})
|
||||
{:id id :team-id team-id})]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:font-family (:font-family variant)
|
||||
:font-id (:font-id variant)}}))))
|
||||
|
||||
(some->> (:woff1-file-id variant) (sto/touch-object! storage))
|
||||
(some->> (:woff2-file-id variant) (sto/touch-object! storage))
|
||||
(some->> (:ttf-file-id variant) (sto/touch-object! storage))
|
||||
(some->> (:otf-file-id variant) (sto/touch-object! storage))
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:font-family (:font-family variant)
|
||||
:font-id (:font-id variant)}})))))
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -41,7 +40,7 @@
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::doc/module :auth}
|
||||
[{:keys [::setup/props ::ldap/provider] :as cfg} params]
|
||||
[{:keys [::main/props ::ldap/provider] :as cfg} params]
|
||||
(when-not provider
|
||||
(ex/raise :type :restriction
|
||||
:code :ldap-not-initialized
|
||||
@@ -79,13 +78,13 @@
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
|
||||
(defn- login-or-register
|
||||
[cfg info]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(or (some->> (:email info)
|
||||
(profile/clean-email)
|
||||
(profile/get-profile-by-email conn))
|
||||
(->> (assoc info :is-active true :is-demo false)
|
||||
(auth/create-profile! conn)
|
||||
(auth/create-profile-rels! conn)
|
||||
(profile/strip-private-attrs))))))
|
||||
[{:keys [::db/pool] :as cfg} info]
|
||||
(db/with-atomic [conn pool]
|
||||
(or (some->> (:email info)
|
||||
(profile/get-profile-by-email conn)
|
||||
(profile/decode-row))
|
||||
(->> (assoc info :is-active true :is-demo false)
|
||||
(auth/create-profile! conn)
|
||||
(auth/create-profile-rels! conn)
|
||||
(profile/strip-private-attrs)))))
|
||||
|
||||
|
||||
@@ -7,252 +7,276 @@
|
||||
(ns app.rpc.commands.management
|
||||
"A collection of RPC methods for manage the files, projects and team organization."
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.migrations :as pmg]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.sse :as sse]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.binfile :as binfile]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.projects :as proj]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.commands.teams :as teams :refer [create-project-role create-project]]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.setup :as-alias setup]
|
||||
[app.setup.templates :as tmpl]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[clojure.walk :as walk]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
;; --- COMMAND: Duplicate File
|
||||
|
||||
(defn duplicate-file
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}]
|
||||
(let [;; We don't touch the original file on duplication
|
||||
file (bfc/get-file cfg file-id)
|
||||
project-id (:project-id file)
|
||||
file (-> file
|
||||
(update :id bfc/lookup-index)
|
||||
(update :project-id bfc/lookup-index)
|
||||
(cond-> (string? name)
|
||||
(assoc :name name))
|
||||
(cond-> (true? reset-shared-flag)
|
||||
(assoc :is-shared false)))
|
||||
(declare duplicate-file)
|
||||
|
||||
flibs (bfc/get-files-rels cfg #{file-id})
|
||||
fmeds (bfc/get-file-media cfg file)]
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
|
||||
(when (uuid? profile-id)
|
||||
(proj/check-edition-permissions! conn profile-id project-id))
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index fmeds :id)
|
||||
|
||||
;; Process and persist file
|
||||
(let [file (->> (bfc/process-file file)
|
||||
(bfc/persist-file! cfg))]
|
||||
|
||||
;; The file profile creation is optional, so when no profile is
|
||||
;; present (when this function is called from profile less
|
||||
;; environment: SREPL) we just omit the creation of the relation
|
||||
(when (uuid? profile-id)
|
||||
(db/insert! conn :file-profile-rel
|
||||
{:file-id (:id file)
|
||||
:profile-id profile-id
|
||||
:is-owner true
|
||||
:is-admin true
|
||||
:can-edit true}
|
||||
{::db/return-keys? false}))
|
||||
|
||||
(doseq [params (sequence (comp
|
||||
(map #(bfc/remap-id % :file-id))
|
||||
(map #(bfc/remap-id % :library-file-id))
|
||||
(map #(assoc % :synced-at timestamp))
|
||||
(map #(assoc % :created-at timestamp)))
|
||||
flibs)]
|
||||
(db/insert! conn :file-library-rel params ::db/return-keys false))
|
||||
|
||||
(doseq [params (sequence (comp
|
||||
(map #(bfc/remap-id % :id))
|
||||
(map #(assoc % :created-at timestamp))
|
||||
(map #(bfc/remap-id % :file-id)))
|
||||
fmeds)]
|
||||
(db/insert! conn :file-media-object params ::db/return-keys false))
|
||||
|
||||
file)))
|
||||
|
||||
(def ^:private
|
||||
schema:duplicate-file
|
||||
(sm/define
|
||||
[:map {:title "duplicate-file"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:name {:optional true} :string]]))
|
||||
(s/def ::duplicate-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id]
|
||||
:opt-un [::name]))
|
||||
|
||||
(sv/defmethod ::duplicate-file
|
||||
"Duplicate a single file in the same team."
|
||||
{::doc/added "1.16"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:duplicate-file}
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(duplicate-file conn (assoc params :profile-id profile-id))))
|
||||
|
||||
(binding [bfc/*state* (volatile! {:index {file-id (uuid/next)}})]
|
||||
(duplicate-file (assoc cfg ::bfc/timestamp (dt/now))
|
||||
(-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :reset-shared-flag true)))))))
|
||||
(defn- remap-id
|
||||
[item index key]
|
||||
(cond-> item
|
||||
(contains? item key)
|
||||
(assoc key (get index (get item key) (get item key)))))
|
||||
|
||||
(defn- process-file
|
||||
[conn {:keys [id] :as file} index]
|
||||
(letfn [(process-form [form]
|
||||
(cond-> form
|
||||
;; Relink library items
|
||||
(and (map? form)
|
||||
(uuid? (:component-file form)))
|
||||
(update :component-file #(get index % %))
|
||||
|
||||
(and (map? form)
|
||||
(uuid? (:fill-color-ref-file form)))
|
||||
(update :fill-color-ref-file #(get index % %))
|
||||
|
||||
(and (map? form)
|
||||
(uuid? (:stroke-color-ref-file form)))
|
||||
(update :stroke-color-ref-file #(get index % %))
|
||||
|
||||
(and (map? form)
|
||||
(uuid? (:typography-ref-file form)))
|
||||
(update :typography-ref-file #(get index % %))
|
||||
|
||||
;; Relink Image Shapes
|
||||
(and (map? form)
|
||||
(map? (:metadata form))
|
||||
(= :image (:type form)))
|
||||
(update-in [:metadata :id] #(get index % %))))
|
||||
|
||||
;; A function responsible to analyze all file data and
|
||||
;; replace the old :component-file reference with the new
|
||||
;; ones, using the provided file-index
|
||||
(relink-shapes [data]
|
||||
(walk/postwalk process-form data))
|
||||
|
||||
;; A function responsible of process the :media attr of file
|
||||
;; data and remap the old ids with the new ones.
|
||||
(relink-media [media]
|
||||
(reduce-kv (fn [res k v]
|
||||
(let [id (get index k)]
|
||||
(if (uuid? id)
|
||||
(-> res
|
||||
(assoc id (assoc v :id id))
|
||||
(dissoc k))
|
||||
res)))
|
||||
media
|
||||
media))]
|
||||
(-> file
|
||||
(update :id #(get index %))
|
||||
(update :data
|
||||
(fn [data]
|
||||
(binding [pmap/*load-fn* (partial files/load-pointer conn id)
|
||||
pmap/*tracked* (atom {})]
|
||||
(let [file-id (get index id)
|
||||
data (-> data
|
||||
(blob/decode)
|
||||
(assoc :id file-id)
|
||||
(pmg/migrate-data)
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(d/without-nils)
|
||||
(files/process-pointers pmap/clone)
|
||||
(blob/encode))]
|
||||
(files/persist-pointers! conn file-id)
|
||||
data)))))))
|
||||
|
||||
(def sql:retrieve-used-libraries
|
||||
"select flr.*
|
||||
from file_library_rel as flr
|
||||
inner join file as l on (flr.library_file_id = l.id)
|
||||
where flr.file_id = ?
|
||||
and l.deleted_at is null")
|
||||
|
||||
(def sql:retrieve-used-media-objects
|
||||
"select fmo.*
|
||||
from file_media_object as fmo
|
||||
inner join storage_object as so on (fmo.media_id = so.id)
|
||||
where fmo.file_id = ?
|
||||
and so.deleted_at is null")
|
||||
|
||||
(defn duplicate-file*
|
||||
[conn {:keys [profile-id file index project-id name flibs fmeds]} {:keys [reset-shared-flag]}]
|
||||
(let [flibs (or flibs (db/exec! conn [sql:retrieve-used-libraries (:id file)]))
|
||||
fmeds (or fmeds (db/exec! conn [sql:retrieve-used-media-objects (:id file)]))
|
||||
|
||||
;; memo uniform creation/modification date
|
||||
now (dt/now)
|
||||
ignore (dt/plus now (dt/duration {:seconds 5}))
|
||||
|
||||
;; add to the index all file media objects.
|
||||
index (reduce #(assoc %1 (:id %2) (uuid/next)) index fmeds)
|
||||
|
||||
flibs-xf (comp
|
||||
(map #(remap-id % index :file-id))
|
||||
(map #(remap-id % index :library-file-id))
|
||||
(map #(assoc % :synced-at now))
|
||||
(map #(assoc % :created-at now)))
|
||||
|
||||
;; remap all file-library-rel row
|
||||
flibs (sequence flibs-xf flibs)
|
||||
|
||||
fmeds-xf (comp
|
||||
(map #(assoc % :id (get index (:id %))))
|
||||
(map #(assoc % :created-at now))
|
||||
(map #(remap-id % index :file-id)))
|
||||
|
||||
;; remap all file-media-object rows
|
||||
fmeds (sequence fmeds-xf fmeds)
|
||||
|
||||
file (cond-> file
|
||||
(some? project-id)
|
||||
(assoc :project-id project-id)
|
||||
|
||||
(some? name)
|
||||
(assoc :name name)
|
||||
|
||||
(true? reset-shared-flag)
|
||||
(assoc :is-shared false))
|
||||
|
||||
file (-> file
|
||||
(assoc :created-at now)
|
||||
(assoc :modified-at now)
|
||||
(assoc :ignore-sync-until ignore))
|
||||
|
||||
file (process-file conn file index)]
|
||||
|
||||
(db/insert! conn :file file)
|
||||
(db/insert! conn :file-profile-rel
|
||||
{:file-id (:id file)
|
||||
:profile-id profile-id
|
||||
:is-owner true
|
||||
:is-admin true
|
||||
:can-edit true})
|
||||
|
||||
(doseq [params flibs]
|
||||
(db/insert! conn :file-library-rel params))
|
||||
|
||||
(doseq [params fmeds]
|
||||
(db/insert! conn :file-media-object params))
|
||||
|
||||
file))
|
||||
|
||||
(defn duplicate-file
|
||||
[conn {:keys [profile-id file-id] :as params}]
|
||||
(let [file (db/get-by-id conn :file file-id)
|
||||
index {file-id (uuid/next)}
|
||||
params (assoc params :index index :file file)]
|
||||
(proj/check-edition-permissions! conn profile-id (:project-id file))
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
(-> (duplicate-file* conn params {:reset-shared-flag true})
|
||||
(update :data blob/decode)
|
||||
(update :features db/decode-pgarray #{}))))
|
||||
|
||||
;; --- COMMAND: Duplicate Project
|
||||
|
||||
(defn duplicate-project
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id project-id name] :as params}]
|
||||
(binding [bfc/*state* (volatile! {:index {project-id (uuid/next)}})]
|
||||
(let [project (-> (db/get-by-id conn :project project-id)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(assoc :is-pinned false)
|
||||
(update :id bfc/lookup-index)
|
||||
(cond-> (string? name)
|
||||
(assoc :name name)))
|
||||
(declare duplicate-project)
|
||||
|
||||
files (bfc/get-project-files cfg project-id)]
|
||||
|
||||
;; Update index with the project files and the project-id
|
||||
(vswap! bfc/*state* update :index bfc/update-index files)
|
||||
|
||||
|
||||
;; Check if the source team-id allow creating new project for current user
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id project))
|
||||
|
||||
;; create the duplicated project and assign the current profile as
|
||||
;; a project owner
|
||||
(let [project (teams/create-project conn project)]
|
||||
;; The project profile creation is optional, so when no profile is
|
||||
;; present (when this function is called from profile less
|
||||
;; environment: SREPL) we just omit the creation of the relation
|
||||
(when (uuid? profile-id)
|
||||
(teams/create-project-role conn profile-id (:id project) :owner))
|
||||
|
||||
(doseq [file-id files]
|
||||
(let [params (-> params
|
||||
(dissoc :name)
|
||||
(assoc :file-id file-id)
|
||||
(assoc :reset-shared-flag false))]
|
||||
(duplicate-file cfg params)))
|
||||
|
||||
project))))
|
||||
|
||||
(def ^:private
|
||||
schema:duplicate-project
|
||||
(sm/define
|
||||
[:map {:title "duplicate-project"}
|
||||
[:project-id ::sm/uuid]
|
||||
[:name {:optional true} :string]]))
|
||||
(s/def ::duplicate-project
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::project-id]
|
||||
:opt-un [::name]))
|
||||
|
||||
(sv/defmethod ::duplicate-project
|
||||
"Duplicate an entire project with all the files"
|
||||
{::doc/added "1.16"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:duplicate-project}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
;; Defer all constraints
|
||||
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
(-> (assoc cfg ::bfc/timestamp (dt/now))
|
||||
(duplicate-project (assoc params :profile-id profile-id))))))
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(duplicate-project conn (assoc params :profile-id (::rpc/profile-id params)))))
|
||||
|
||||
(defn duplicate-team
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} & {:keys [profile-id team-id name] :as params}]
|
||||
(defn duplicate-project
|
||||
[conn {:keys [profile-id project-id name] :as params}]
|
||||
|
||||
;; Check if the source team-id allowed to be read by the user if
|
||||
;; profile-id is present; it can be ommited if this function is
|
||||
;; called from SREPL helpers where no profile is available
|
||||
(when (uuid? profile-id)
|
||||
(teams/check-read-permissions! conn profile-id team-id))
|
||||
;; Defer all constraints
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})]
|
||||
(let [projs (bfc/get-team-projects cfg team-id)
|
||||
files (bfc/get-team-files cfg team-id)
|
||||
frels (bfc/get-files-rels cfg files)
|
||||
(let [project (-> (db/get-by-id conn :project project-id)
|
||||
(assoc :is-pinned false))
|
||||
|
||||
team (-> (db/get-by-id conn :team team-id)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(update :id bfc/lookup-index)
|
||||
(cond-> (string? name)
|
||||
(assoc :name name)))
|
||||
files (db/query conn :file
|
||||
{:project-id (:id project)
|
||||
:deleted-at nil}
|
||||
{:columns [:id]})
|
||||
|
||||
fonts (db/query conn :team-font-variant
|
||||
{:team-id team-id})]
|
||||
project (cond-> project
|
||||
(string? name)
|
||||
(assoc :name name)
|
||||
|
||||
(vswap! bfc/*state* update :index
|
||||
(fn [index]
|
||||
(-> index
|
||||
(bfc/update-index projs)
|
||||
(bfc/update-index files)
|
||||
(bfc/update-index fonts :id))))
|
||||
:always
|
||||
(assoc :id (uuid/next)))]
|
||||
|
||||
;; FIXME: disallow clone default team
|
||||
;; Create the new team in the database
|
||||
(db/insert! conn :team team)
|
||||
;; Check if the source team-id allow creating new project for current user
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id project))
|
||||
|
||||
;; Duplicate team <-> profile relations
|
||||
(doseq [params frels]
|
||||
(let [params (-> params
|
||||
(assoc :id (uuid/next))
|
||||
(update :team-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
(db/insert! conn :team-profile-rel params
|
||||
{::db/return-keys false})))
|
||||
;; create the duplicated project and assign the current profile as
|
||||
;; a project owner
|
||||
(create-project conn project)
|
||||
(create-project-role conn profile-id (:id project) :owner)
|
||||
|
||||
;; Duplicate team fonts
|
||||
(doseq [font fonts]
|
||||
(let [params (-> font
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
(db/insert! conn :team-font-variant params
|
||||
{::db/return-keys false})))
|
||||
;; duplicate all files
|
||||
(let [index (reduce #(assoc %1 (:id %2) (uuid/next)) {} files)
|
||||
params (-> params
|
||||
(dissoc :name)
|
||||
(assoc :project-id (:id project))
|
||||
(assoc :index index))]
|
||||
(doseq [{:keys [id]} files]
|
||||
(let [file (db/get-by-id conn :file id)
|
||||
params (assoc params :file file)
|
||||
opts {:reset-shared-flag false}]
|
||||
(duplicate-file* conn params opts))))
|
||||
|
||||
;; Duplicate projects; We don't reuse the `duplicate-project`
|
||||
;; here because we handle files duplication by whole team
|
||||
;; instead of by project and we want to preserve some project
|
||||
;; props which are reset on the `duplicate-project` impl
|
||||
(doseq [project-id projs]
|
||||
(let [project (db/get conn :project {:id project-id})
|
||||
project (-> project
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index))]
|
||||
(teams/create-project conn project)
|
||||
|
||||
;; The project profile creation is optional, so when no profile is
|
||||
;; present (when this function is called from profile less
|
||||
;; environment: SREPL) we just omit the creation of the relation
|
||||
(when (uuid? profile-id)
|
||||
(teams/create-project-role conn profile-id (:id project) :owner))))
|
||||
|
||||
(doseq [file-id files]
|
||||
(let [params (-> params
|
||||
(dissoc :name)
|
||||
(assoc :file-id file-id)
|
||||
(assoc :reset-shared-flag false))]
|
||||
(duplicate-file cfg params)))
|
||||
|
||||
team)))
|
||||
;; return the created project
|
||||
project))
|
||||
|
||||
;; --- COMMAND: Move file
|
||||
|
||||
(def sql:get-files
|
||||
"select id, features, project_id from file where id = ANY(?)")
|
||||
(def sql:retrieve-files
|
||||
"select id, project_id from file where id = ANY(?)")
|
||||
|
||||
(def sql:move-files
|
||||
"update file set project_id = ? where id = ANY(?)")
|
||||
@@ -273,20 +297,14 @@
|
||||
and rel.library_file_id = br.library_file_id")
|
||||
|
||||
(defn move-files
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id ids project-id] :as params}]
|
||||
[conn {:keys [profile-id ids project-id] :as params}]
|
||||
|
||||
(let [fids (db/create-array conn "uuid" ids)
|
||||
files (->> (db/exec! conn [sql:get-files fids])
|
||||
(map files/decode-row))
|
||||
files (db/exec! conn [sql:retrieve-files fids])
|
||||
source (into #{} (map :project-id) files)
|
||||
pids (->> (conj source project-id)
|
||||
(db/create-array conn "uuid"))]
|
||||
|
||||
(when (contains? source project-id)
|
||||
(ex/raise :type :validation
|
||||
:code :cant-move-to-same-project
|
||||
:hint "Unable to move a file to the same project"))
|
||||
|
||||
;; Check if we have permissions on the destination project
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
|
||||
@@ -294,15 +312,10 @@
|
||||
(doseq [project-id source]
|
||||
(proj/check-edition-permissions! conn profile-id project-id))
|
||||
|
||||
;; Check the team compatibility
|
||||
(let [orig-team (teams/get-team conn :profile-id profile-id :project-id (first source))
|
||||
dest-team (teams/get-team conn :profile-id profile-id :project-id project-id)]
|
||||
(cfeat/check-teams-compatibility! orig-team dest-team)
|
||||
|
||||
;; Check if all pending to move files are compaib
|
||||
(let [features (cfeat/get-team-enabled-features cf/flags dest-team)]
|
||||
(doseq [file files]
|
||||
(cfeat/check-file-features! features (:features file)))))
|
||||
(when (contains? source project-id)
|
||||
(ex/raise :type :validation
|
||||
:code :cant-move-to-same-project
|
||||
:hint "Unable to move a file to the same project"))
|
||||
|
||||
;; move all files to the project
|
||||
(db/exec-one! conn [sql:move-files project-id fids])
|
||||
@@ -324,51 +337,36 @@
|
||||
|
||||
nil))
|
||||
|
||||
(def ^:private
|
||||
schema:move-files
|
||||
(sm/define
|
||||
[:map {:title "move-files"}
|
||||
[:ids ::sm/set-of-uuid]
|
||||
[:project-id ::sm/uuid]]))
|
||||
(s/def ::ids (s/every ::us/uuid :kind set?))
|
||||
(s/def ::move-files
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::ids ::project-id]))
|
||||
|
||||
(sv/defmethod ::move-files
|
||||
"Move a set of files from one project to other."
|
||||
{::doc/added "1.16"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:move-files}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg #(move-files % (assoc params :profile-id profile-id))))
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(move-files conn (assoc params :profile-id profile-id))))
|
||||
|
||||
;; --- COMMAND: Move project
|
||||
|
||||
(defn move-project
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id team-id project-id] :as params}]
|
||||
[conn {:keys [profile-id team-id project-id] :as params}]
|
||||
(let [project (db/get-by-id conn :project project-id {:columns [:id :team-id]})
|
||||
pids (->> (db/query conn :project {:team-id (:team-id project)} {:columns [:id]})
|
||||
(map :id)
|
||||
(db/create-array conn "uuid"))]
|
||||
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id project))
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
|
||||
(when (= team-id (:team-id project))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-move-to-same-team
|
||||
:hint "Unable to move a project to same team"))
|
||||
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id project))
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
|
||||
;; Check the teams compatibility
|
||||
(let [orig-team (teams/get-team conn :profile-id profile-id :team-id (:team-id project))
|
||||
dest-team (teams/get-team conn :profile-id profile-id :team-id team-id)]
|
||||
(cfeat/check-teams-compatibility! orig-team dest-team)
|
||||
|
||||
;; Check if all pending to move files are compaib
|
||||
(let [features (cfeat/get-team-enabled-features cf/flags dest-team)]
|
||||
(doseq [file (->> (db/query conn :file
|
||||
{:project-id project-id}
|
||||
{:columns [:features]})
|
||||
(map files/decode-row))]
|
||||
(cfeat/check-file-features! features (:features file)))))
|
||||
|
||||
;; move project to the destination team
|
||||
(db/update! conn :project
|
||||
{:team-id team-id}
|
||||
@@ -379,65 +377,67 @@
|
||||
|
||||
nil))
|
||||
|
||||
(def ^:private
|
||||
schema:move-project
|
||||
(sm/define
|
||||
[:map {:title "move-project"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:project-id ::sm/uuid]]))
|
||||
(s/def ::move-project
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id ::project-id]))
|
||||
|
||||
(sv/defmethod ::move-project
|
||||
"Move projects between teams"
|
||||
"Move projects between teams."
|
||||
{::doc/added "1.16"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:move-project}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg #(move-project % (assoc params :profile-id profile-id))))
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(move-project conn (assoc params :profile-id profile-id))))
|
||||
|
||||
;; --- COMMAND: Clone Template
|
||||
|
||||
(defn- clone-template
|
||||
[{:keys [::wrk/executor ::bf.v1/project-id] :as cfg} template]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
;; NOTE: the importation process performs some operations that
|
||||
;; are not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we
|
||||
;; dispatch that operation to a dedicated executor.
|
||||
(let [result (px/submit! executor (partial bf.v1/import-files! cfg template))]
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
(deref result)))))
|
||||
(defn- clone-template!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id template-id project-id]}]
|
||||
(let [template (tmpl/get-template-stream cfg template-id)
|
||||
project (db/get-by-id conn :project project-id {:columns [:id :team-id]})]
|
||||
|
||||
(def ^:private
|
||||
schema:clone-template
|
||||
(sm/define
|
||||
[:map {:title "clone-template"}
|
||||
[:project-id ::sm/uuid]
|
||||
[:template-id ::sm/word-string]]))
|
||||
|
||||
(sv/defmethod ::clone-template
|
||||
"Clone into the specified project the template by its id."
|
||||
{::doc/added "1.16"
|
||||
::sse/stream? true
|
||||
::webhooks/event? true
|
||||
::sm/params schema:clone-template}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id template-id] :as params}]
|
||||
(let [project (db/get-by-id pool :project project-id {:columns [:id :team-id]})
|
||||
_ (teams/check-edition-permissions! pool profile-id (:team-id project))
|
||||
template (tmpl/get-template-stream cfg template-id)
|
||||
params (-> cfg
|
||||
(assoc ::bf.v1/project-id (:id project))
|
||||
(assoc ::bf.v1/profile-id profile-id))]
|
||||
(when-not template
|
||||
(ex/raise :type :not-found
|
||||
:code :template-not-found
|
||||
:hint "template not found"))
|
||||
|
||||
(sse/response #(clone-template params template))))
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id project))
|
||||
|
||||
(-> cfg
|
||||
;; FIXME: maybe reuse the conn instead of creating more
|
||||
;; connections in the import process?
|
||||
(dissoc ::db/conn)
|
||||
(assoc ::binfile/input template)
|
||||
(assoc ::binfile/project-id (:id project))
|
||||
(assoc ::binfile/ignore-index-errors? true)
|
||||
(assoc ::binfile/migrate? true)
|
||||
(binfile/import!))))
|
||||
|
||||
(def schema:clone-template
|
||||
[:map {:title "clone-template"}
|
||||
[:project-id ::sm/uuid]
|
||||
[:template-id ::sm/word-string]])
|
||||
|
||||
(sv/defmethod ::clone-template
|
||||
"Clone into the specified project the template by its id."
|
||||
{::doc/added "1.16"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:clone-template}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(-> (assoc cfg ::db/conn conn)
|
||||
(clone-template! (assoc params :profile-id profile-id)))))
|
||||
|
||||
;; --- COMMAND: Get list of builtin templates
|
||||
|
||||
(s/def ::retrieve-list-of-builtin-templates any?)
|
||||
|
||||
(sv/defmethod ::retrieve-list-of-builtin-templates
|
||||
{::doc/added "1.10"
|
||||
::doc/deprecated "1.19"}
|
||||
[cfg _params]
|
||||
(mapv #(select-keys % [:id :name]) (::setup/templates cfg)))
|
||||
|
||||
(sv/defmethod ::get-builtin-templates
|
||||
{::doc/added "1.19"}
|
||||
[cfg _params]
|
||||
|
||||
@@ -23,12 +23,9 @@
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.exec :as px]))
|
||||
[datoteka.io :as io]))
|
||||
|
||||
(def default-max-file-size
|
||||
(* 1024 1024 10)) ; 10 MiB
|
||||
@@ -57,25 +54,20 @@
|
||||
:opt-un [::id]))
|
||||
|
||||
(sv/defmethod ::upload-file-media-object
|
||||
{::doc/added "1.17"
|
||||
::climit/id [[:process-image/by-profile ::rpc/profile-id]
|
||||
[:process-image/global]]}
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id content] :as params}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
|
||||
(files/check-edition-permissions! pool profile-id file-id)
|
||||
(media/validate-media-type! content)
|
||||
(media/validate-media-size! content)
|
||||
|
||||
(db/run! cfg (fn [cfg]
|
||||
(let [object (create-file-media-object cfg params)
|
||||
props {:name (:name params)
|
||||
:file-id file-id
|
||||
:is-local (:is-local params)
|
||||
:size (:size content)
|
||||
:mtype (:mtype content)}]
|
||||
(with-meta object
|
||||
{::audit/replace-props props}))))))
|
||||
(let [object (create-file-media-object cfg params)
|
||||
props {:name (:name params)
|
||||
:file-id file-id
|
||||
:is-local (:is-local params)
|
||||
:size (:size content)
|
||||
:mtype (:mtype content)}]
|
||||
(with-meta object
|
||||
{::audit/replace-props props}))))
|
||||
|
||||
(defn- big-enough-for-thumbnail?
|
||||
"Checks if the provided image info is big enough for
|
||||
@@ -150,20 +142,17 @@
|
||||
(assoc ::image (process-main-image info)))))
|
||||
|
||||
(defn create-file-media-object
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor]}
|
||||
[{:keys [::sto/storage ::db/pool] :as cfg}
|
||||
{:keys [id file-id is-local name content]}]
|
||||
|
||||
(let [result (px/invoke! executor (partial process-image content))
|
||||
(let [result (-> (climit/configure cfg :process-image)
|
||||
(climit/submit! (partial process-image content)))
|
||||
|
||||
image (sto/put-object! storage (::image result))
|
||||
thumb (when-let [params (::thumb result)]
|
||||
(sto/put-object! storage params))]
|
||||
|
||||
(db/update! conn :file
|
||||
{:modified-at (dt/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id})
|
||||
|
||||
(db/exec-one! conn [sql:create-file-media-object
|
||||
(db/exec-one! pool [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
(:id image)
|
||||
@@ -187,9 +176,9 @@
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(files/check-edition-permissions! pool profile-id file-id)
|
||||
(create-file-media-object-from-url cfg (assoc params :profile-id profile-id))))
|
||||
(create-file-media-object-from-url cfg params)))
|
||||
|
||||
(defn download-image
|
||||
(defn- download-image
|
||||
[{:keys [::http/client]} uri]
|
||||
(letfn [(parse-and-validate [{:keys [headers] :as response}]
|
||||
(let [size (some-> (get headers "content-length") d/parse-integer)
|
||||
@@ -220,6 +209,7 @@
|
||||
{:method :get :uri uri}
|
||||
{:response-type :input-stream :sync? true})
|
||||
{:keys [size mtype]} (parse-and-validate response)
|
||||
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (io/write-to-file! body path :size size)]
|
||||
|
||||
@@ -233,23 +223,14 @@
|
||||
:path path
|
||||
:mtype mtype})))
|
||||
|
||||
|
||||
(defn- create-file-media-object-from-url
|
||||
[cfg {:keys [url name] :as params}]
|
||||
(let [content (download-image cfg url)
|
||||
params (-> params
|
||||
(assoc :content content)
|
||||
(assoc :name (or name (:filename content))))]
|
||||
|
||||
;; NOTE: we use the climit here in a dynamic invocation because we
|
||||
;; don't want saturate the process-image limit with IO (download
|
||||
;; of external image)
|
||||
|
||||
(-> cfg
|
||||
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
|
||||
[:process-image/global]])
|
||||
(assoc ::climit/label "create-file-media-object-from-url")
|
||||
(climit/invoke! #(db/run! %1 create-file-media-object %2) params))))
|
||||
|
||||
(create-file-media-object cfg params)))
|
||||
|
||||
;; --- Clone File Media object (Upload and create from url)
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@
|
||||
(:require
|
||||
[app.auth :as auth]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.email :as eml]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
@@ -23,14 +23,11 @@
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.exec :as px]))
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(declare check-profile-existence!)
|
||||
(declare decode-row)
|
||||
@@ -40,39 +37,30 @@
|
||||
(declare strip-private-attrs)
|
||||
(declare verify-password)
|
||||
|
||||
(defn clean-email
|
||||
"Clean and normalizes email address string"
|
||||
[email]
|
||||
(let [email (str/lower email)
|
||||
email (if (str/starts-with? email "mailto:")
|
||||
(subs email 7)
|
||||
email)]
|
||||
email))
|
||||
(def schema:profile
|
||||
[:map {:title "Profile"}
|
||||
[:id ::sm/uuid]
|
||||
[:fullname [::sm/word-string {:max 250}]]
|
||||
[:email ::sm/email]
|
||||
[:is-active {:optional true} :boolean]
|
||||
[:is-blocked {:optional true} :boolean]
|
||||
[:is-demo {:optional true} :boolean]
|
||||
[:is-muted {:optional true} :boolean]
|
||||
[:created-at {:optional true} ::sm/inst]
|
||||
[:modified-at {:optional true} ::sm/inst]
|
||||
[:default-project-id {:optional true} ::sm/uuid]
|
||||
[:default-team-id {:optional true} ::sm/uuid]
|
||||
[:props {:optional true}
|
||||
[:map-of {:title "ProfileProps"} :keyword :any]]])
|
||||
|
||||
(def ^:private
|
||||
schema:profile
|
||||
(sm/define
|
||||
[:map {:title "Profile"}
|
||||
[:id ::sm/uuid]
|
||||
[:fullname [::sm/word-string {:max 250}]]
|
||||
[:email ::sm/email]
|
||||
[:is-active {:optional true} :boolean]
|
||||
[:is-blocked {:optional true} :boolean]
|
||||
[:is-demo {:optional true} :boolean]
|
||||
[:is-muted {:optional true} :boolean]
|
||||
[:created-at {:optional true} ::sm/inst]
|
||||
[:modified-at {:optional true} ::sm/inst]
|
||||
[:default-project-id {:optional true} ::sm/uuid]
|
||||
[:default-team-id {:optional true} ::sm/uuid]
|
||||
[:props {:optional true}
|
||||
[:map-of {:title "ProfileProps"} :keyword :any]]]))
|
||||
(def profile?
|
||||
(sm/pred-fn schema:profile))
|
||||
|
||||
;; --- QUERY: Get profile (own)
|
||||
|
||||
(sv/defmethod ::get-profile
|
||||
{::rpc/auth false
|
||||
::doc/added "1.18"
|
||||
::sm/params [:map]
|
||||
::sm/result schema:profile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id]}]
|
||||
;; We need to return the anonymous profile object in two cases, when
|
||||
@@ -93,13 +81,11 @@
|
||||
|
||||
;; --- MUTATION: Update Profile (own)
|
||||
|
||||
(def ^:private
|
||||
schema:update-profile
|
||||
(sm/define
|
||||
[:map {:title "update-profile"}
|
||||
[:fullname [::sm/word-string {:max 250}]]
|
||||
[:lang {:optional true} [:string {:max 5}]]
|
||||
[:theme {:optional true} [:string {:max 250}]]]))
|
||||
(def schema:update-profile
|
||||
[:map {:title "update-profile"}
|
||||
[:fullname [::sm/word-string {:max 250}]]
|
||||
[:lang {:optional true} [:string {:max 5}]]
|
||||
[:theme {:optional true} [:string {:max 250}]]])
|
||||
|
||||
(sv/defmethod ::update-profile
|
||||
{::doc/added "1.0"
|
||||
@@ -107,18 +93,23 @@
|
||||
::sm/result schema:profile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id fullname lang theme] :as params}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid profile data"
|
||||
(profile? params))
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
;; NOTE: we need to retrieve the profile independently if we use
|
||||
;; it or not for explicit locking and avoid concurrent updates of
|
||||
;; the same row/object.
|
||||
(let [profile (-> (db/get-by-id conn :profile profile-id ::sql/for-update true)
|
||||
(let [profile (-> (db/get-by-id conn :profile profile-id ::db/for-update? true)
|
||||
(decode-row))
|
||||
|
||||
;; Update the profile map with direct params
|
||||
profile (-> profile
|
||||
(assoc :fullname fullname)
|
||||
(assoc :lang lang)
|
||||
(assoc :theme theme))]
|
||||
(assoc :theme theme))
|
||||
]
|
||||
|
||||
(db/update! conn :profile
|
||||
{:fullname fullname
|
||||
@@ -139,32 +130,32 @@
|
||||
(declare update-profile-password!)
|
||||
(declare invalidate-profile-session!)
|
||||
|
||||
(def ^:private
|
||||
schema:update-profile-password
|
||||
(sm/define
|
||||
[:map {:title "update-profile-password"}
|
||||
[:password [::sm/word-string {:max 500}]]
|
||||
;; Social registered users don't have old-password
|
||||
[:old-password {:optional true} [:maybe [::sm/word-string {:max 500}]]]]))
|
||||
(def schema:update-profile-password
|
||||
[:map {:title "update-profile-password"}
|
||||
[:password [::sm/word-string {:max 500}]]
|
||||
;; Social registered users don't have old-password
|
||||
[:old-password {:optional true} [:maybe [::sm/word-string {:max 500}]]]])
|
||||
|
||||
(sv/defmethod ::update-profile-password
|
||||
{::doc/added "1.0"
|
||||
{:doc/added "1.0"
|
||||
::sm/params schema:update-profile-password
|
||||
::climit/id :auth/global}
|
||||
[cfg {:keys [::rpc/profile-id password] :as params}]
|
||||
::sm/result :nil}
|
||||
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))
|
||||
session-id (::session/id params)]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id password] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [cfg (assoc cfg ::db/conn conn)
|
||||
profile (validate-password! cfg (assoc params :profile-id profile-id))
|
||||
session-id (::session/id params)]
|
||||
|
||||
(when (= (:email profile) (str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
(when (= (str/lower (:email profile))
|
||||
(str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
|
||||
(update-profile-password! cfg (assoc profile :password password))
|
||||
(invalidate-profile-session! cfg profile-id session-id)
|
||||
nil))))
|
||||
(update-profile-password! conn (assoc profile :password password))
|
||||
(invalidate-profile-session! cfg profile-id session-id)
|
||||
nil)))
|
||||
|
||||
(defn- invalidate-profile-session!
|
||||
"Removes all sessions except the current one."
|
||||
@@ -174,7 +165,7 @@
|
||||
|
||||
(defn- validate-password!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
|
||||
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
|
||||
(let [profile (db/get-by-id conn :profile profile-id ::db/for-update? true)]
|
||||
(when (and (not= (:password profile) "!")
|
||||
(not (:valid (verify-password cfg old-password (:password profile)))))
|
||||
(ex/raise :type :validation
|
||||
@@ -182,23 +173,20 @@
|
||||
profile))
|
||||
|
||||
(defn update-profile-password!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id password] :as profile}]
|
||||
[conn {:keys [id password] :as profile}]
|
||||
(when-not (db/read-only? conn)
|
||||
(db/update! conn :profile
|
||||
{:password (derive-password cfg password)}
|
||||
{:id id})
|
||||
nil))
|
||||
{:password (auth/derive-password password)}
|
||||
{:id id})))
|
||||
|
||||
;; --- MUTATION: Update Photo
|
||||
|
||||
(declare upload-photo)
|
||||
(declare update-profile-photo)
|
||||
|
||||
(def ^:private
|
||||
schema:update-profile-photo
|
||||
(sm/define
|
||||
[:map {:title "update-profile-photo"}
|
||||
[:file ::media/upload]]))
|
||||
(def schema:update-profile-photo
|
||||
[:map {:title "update-profile-photo"}
|
||||
[:file ::media/upload]])
|
||||
|
||||
(sv/defmethod ::update-profile-photo
|
||||
{:doc/added "1.1"
|
||||
@@ -212,9 +200,8 @@
|
||||
|
||||
(defn update-profile-photo
|
||||
[{:keys [::db/pool ::sto/storage] :as cfg} {:keys [profile-id file] :as params}]
|
||||
|
||||
(let [photo (upload-photo cfg params)
|
||||
profile (db/get-by-id pool :profile profile-id ::sql/for-update true)]
|
||||
profile (db/get-by-id pool :profile profile-id ::db/for-update? true)]
|
||||
|
||||
;; Schedule deletion of old photo
|
||||
(when-let [id (:photo-id profile)]
|
||||
@@ -233,7 +220,7 @@
|
||||
:file-mtype (:mtype file)}}))))
|
||||
|
||||
(defn- generate-thumbnail!
|
||||
[_ file]
|
||||
[file]
|
||||
(let [input (media/run {:cmd :info :input file})
|
||||
thumb (media/run {:cmd :profile-thumbnail
|
||||
:format :jpeg
|
||||
@@ -250,25 +237,20 @@
|
||||
:content-type (:mtype thumb)}))
|
||||
|
||||
(defn upload-photo
|
||||
[{:keys [::sto/storage ::wrk/executor] :as cfg} {:keys [file] :as params}]
|
||||
(let [params (-> cfg
|
||||
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
|
||||
[:process-image/global]])
|
||||
(assoc ::climit/label "upload-photo")
|
||||
(assoc ::climit/executor executor)
|
||||
(climit/invoke! generate-thumbnail! file))]
|
||||
[{:keys [::sto/storage] :as cfg} {:keys [file]}]
|
||||
(let [params (-> (climit/configure cfg :process-image)
|
||||
(climit/submit! (partial generate-thumbnail! file)))]
|
||||
(sto/put-object! storage params)))
|
||||
|
||||
|
||||
;; --- MUTATION: Request Email Change
|
||||
|
||||
(declare ^:private request-email-change!)
|
||||
(declare ^:private change-email-immediately!)
|
||||
|
||||
(def ^:private
|
||||
schema:request-email-change
|
||||
(sm/define
|
||||
[:map {:title "request-email-change"}
|
||||
[:email ::sm/email]]))
|
||||
(def schema:request-email-change
|
||||
[:map {:title "request-email-change"}
|
||||
[:email ::sm/email]])
|
||||
|
||||
(sv/defmethod ::request-email-change
|
||||
{::doc/added "1.0"
|
||||
@@ -279,7 +261,7 @@
|
||||
cfg (assoc cfg ::conn conn)
|
||||
params (assoc params
|
||||
:profile profile
|
||||
:email (clean-email email))]
|
||||
:email (str/lower email))]
|
||||
(if (contains? cf/flags :smtp)
|
||||
(request-email-change! cfg params)
|
||||
(change-email-immediately! cfg params)))))
|
||||
@@ -297,12 +279,12 @@
|
||||
|
||||
(defn- request-email-change!
|
||||
[{:keys [::conn] :as cfg} {:keys [profile email] :as params}]
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
(let [token (tokens/generate (::main/props cfg)
|
||||
{:iss :change-email
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id (:id profile)
|
||||
:email email})
|
||||
ptoken (tokens/generate (::setup/props cfg)
|
||||
ptoken (tokens/generate (::main/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})})]
|
||||
@@ -333,18 +315,16 @@
|
||||
|
||||
;; --- MUTATION: Update Profile Props
|
||||
|
||||
(def ^:private
|
||||
schema:update-profile-props
|
||||
(sm/define
|
||||
[:map {:title "update-profile-props"}
|
||||
[:props [:map-of :keyword :any]]]))
|
||||
(def schema:update-profile-props
|
||||
[:map {:title "update-profile-props"}
|
||||
[:props [:map-of :keyword :any]]])
|
||||
|
||||
(sv/defmethod ::update-profile-props
|
||||
{::doc/added "1.0"
|
||||
::sm/params schema:update-profile-props}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id props]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (get-profile conn profile-id ::sql/for-update true)
|
||||
(let [profile (get-profile conn profile-id ::db/for-update? true)
|
||||
props (reduce-kv (fn [props k v]
|
||||
;; We don't accept namespaced keys
|
||||
(if (simple-ident? k)
|
||||
@@ -418,9 +398,10 @@
|
||||
where email = ?
|
||||
and deleted_at is null) as val")
|
||||
|
||||
(defn- check-profile-existence!
|
||||
(defn check-profile-existence!
|
||||
[conn {:keys [email] :as params}]
|
||||
(let [result (db/exec-one! conn [sql:profile-existence email])]
|
||||
(let [email (str/lower email)
|
||||
result (db/exec-one! conn [sql:profile-existence email])]
|
||||
(when (:val result)
|
||||
(ex/raise :type :validation
|
||||
:code :email-already-exists))
|
||||
@@ -435,7 +416,7 @@
|
||||
(defn get-profile-by-email
|
||||
"Returns a profile looked up by email or `nil` if not match found."
|
||||
[conn email]
|
||||
(->> (db/exec! conn [sql:profile-by-email (clean-email email)])
|
||||
(->> (db/exec! conn [sql:profile-by-email (str/lower email)])
|
||||
(map decode-row)
|
||||
(first)))
|
||||
|
||||
@@ -450,13 +431,15 @@
|
||||
(into {} (filter (fn [[k _]] (simple-ident? k))) props))
|
||||
|
||||
(defn derive-password
|
||||
[{:keys [::wrk/executor]} password]
|
||||
[cfg password]
|
||||
(when password
|
||||
(px/invoke! executor (partial auth/derive-password password))))
|
||||
(-> (climit/configure cfg :derive-password)
|
||||
(climit/submit! (partial auth/derive-password password)))))
|
||||
|
||||
(defn verify-password
|
||||
[{:keys [::wrk/executor]} password password-data]
|
||||
(px/invoke! executor (partial auth/verify-password password password-data)))
|
||||
[cfg password password-data]
|
||||
(-> (climit/configure cfg :derive-password)
|
||||
(climit/submit! (partial auth/verify-password password password-data))))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [props] :as row}]
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -51,11 +50,11 @@
|
||||
is-owner (boolean (some :is-owner rows))
|
||||
is-admin (boolean (some :is-admin rows))
|
||||
can-edit (boolean (some :can-edit rows))]
|
||||
(when (seq rows)
|
||||
{:is-owner is-owner
|
||||
:is-admin (or is-owner is-admin)
|
||||
:can-edit (or is-owner is-admin can-edit)
|
||||
:can-read true})))
|
||||
(when (seq rows)
|
||||
{:is-owner is-owner
|
||||
:is-admin (or is-owner is-admin)
|
||||
:can-edit (or is-owner is-admin can-edit)
|
||||
:can-read true})))
|
||||
|
||||
(def has-edit-permissions?
|
||||
(perms/make-edition-predicate-fn get-permissions))
|
||||
@@ -190,8 +189,8 @@
|
||||
{:project-id (:id project)
|
||||
:profile-id profile-id
|
||||
:team-id team-id
|
||||
:is-pinned false})
|
||||
(assoc project :is-pinned false))))
|
||||
:is-pinned true})
|
||||
(assoc project :is-pinned true))))
|
||||
|
||||
|
||||
;; --- MUTATION: Toggle Project Pin
|
||||
@@ -234,7 +233,7 @@
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id name] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(let [project (db/get-by-id conn :project id ::sql/for-update true)]
|
||||
(let [project (db/get-by-id conn :project id ::db/for-update? true)]
|
||||
(db/update! conn :project
|
||||
{:name name}
|
||||
{:id id})
|
||||
@@ -260,8 +259,7 @@
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(let [project (db/update! conn :project
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id :is-default false}
|
||||
{::db/return-keys true})]
|
||||
{:id id :is-default false})]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:team-id (:team-id project)
|
||||
:name (:name project)
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :refer [resolve-public-uri]]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -38,15 +37,12 @@
|
||||
)
|
||||
select distinct
|
||||
f.id,
|
||||
f.revn,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
ft.media_id
|
||||
f.is_shared
|
||||
from file as f
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn)
|
||||
inner join projects as pr on (f.project_id = pr.id)
|
||||
where f.name ilike ('%' || ? || '%')
|
||||
and (f.deleted_at is null or f.deleted_at > now())
|
||||
@@ -54,16 +50,10 @@
|
||||
|
||||
(defn search-files
|
||||
[conn profile-id team-id search-term]
|
||||
(->> (db/exec! conn [sql:search-files
|
||||
profile-id team-id
|
||||
profile-id team-id
|
||||
search-term])
|
||||
(mapv (fn [row]
|
||||
(if-let [media-id (:media-id row)]
|
||||
(-> row
|
||||
(dissoc :media-id)
|
||||
(assoc :thumbnail-uri (resolve-public-uri media-id)))
|
||||
(dissoc row :media-id))))))
|
||||
(db/exec! conn [sql:search-files
|
||||
profile-id team-id
|
||||
profile-id team-id
|
||||
search-term]))
|
||||
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::search-files ::us/string)
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
@@ -26,7 +25,6 @@
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
@@ -57,11 +55,11 @@
|
||||
is-owner (boolean (some :is-owner rows))
|
||||
is-admin (boolean (some :is-admin rows))
|
||||
can-edit (boolean (some :can-edit rows))]
|
||||
(when (seq rows)
|
||||
{:is-owner is-owner
|
||||
:is-admin (or is-owner is-admin)
|
||||
:can-edit (or is-owner is-admin can-edit)
|
||||
:can-read true})))
|
||||
(when (seq rows)
|
||||
{:is-owner is-owner
|
||||
:is-admin (or is-owner is-admin)
|
||||
:can-edit (or is-owner is-admin can-edit)
|
||||
:can-read true})))
|
||||
|
||||
(def has-admin-permissions?
|
||||
(perms/make-admin-predicate-fn get-permissions))
|
||||
@@ -81,26 +79,22 @@
|
||||
(def check-read-permissions!
|
||||
(perms/make-check-fn has-read-permissions?))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [features] :as row}]
|
||||
(cond-> row
|
||||
(some? features) (assoc :features (db/decode-pgarray features #{}))))
|
||||
|
||||
;; --- Query: Teams
|
||||
|
||||
(declare get-teams)
|
||||
(declare retrieve-teams)
|
||||
|
||||
(def ^:private schema:get-teams
|
||||
[:map {:title "get-teams"}])
|
||||
(def counter (volatile! 0))
|
||||
|
||||
(s/def ::get-teams
|
||||
(s/keys :req [::rpc/profile-id]))
|
||||
|
||||
(sv/defmethod ::get-teams
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-teams}
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(get-teams conn profile-id)))
|
||||
(retrieve-teams conn profile-id)))
|
||||
|
||||
(def sql:get-teams-with-permissions
|
||||
(def sql:teams
|
||||
"select t.*,
|
||||
tp.is_owner,
|
||||
tp.is_admin,
|
||||
@@ -125,77 +119,37 @@
|
||||
(dissoc :is-owner :is-admin :can-edit)
|
||||
(assoc :permissions permissions))))
|
||||
|
||||
(defn get-teams
|
||||
(defn retrieve-teams
|
||||
[conn profile-id]
|
||||
(let [profile (profile/get-profile conn profile-id)]
|
||||
(->> (db/exec! conn [sql:get-teams-with-permissions (:default-team-id profile) profile-id])
|
||||
(map decode-row)
|
||||
(map process-permissions)
|
||||
(vec))))
|
||||
(->> (db/exec! conn [sql:teams (:default-team-id profile) profile-id])
|
||||
(mapv process-permissions))))
|
||||
|
||||
;; --- Query: Team (by ID)
|
||||
|
||||
(declare get-team)
|
||||
(declare retrieve-team)
|
||||
|
||||
(def ^:private schema:get-team
|
||||
[:and
|
||||
[:map {:title "get-team"}
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]]
|
||||
|
||||
[:fn (fn [params]
|
||||
(or (contains? params :id)
|
||||
(contains? params :file-id)))]])
|
||||
(s/def ::get-team
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id]))
|
||||
|
||||
(sv/defmethod ::get-team
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-team}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id id file-id]}]
|
||||
(get-team pool :profile-id profile-id :team-id id :file-id file-id))
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(retrieve-team conn profile-id id)))
|
||||
|
||||
(defn get-team
|
||||
[conn & {:keys [profile-id team-id project-id file-id] :as params}]
|
||||
|
||||
(dm/assert!
|
||||
"connection or pool is mandatory"
|
||||
(or (db/connection? conn)
|
||||
(db/pool? conn)))
|
||||
|
||||
(dm/assert!
|
||||
"profile-id is mandatory"
|
||||
(uuid? profile-id))
|
||||
|
||||
(let [{:keys [default-team-id] :as profile} (profile/get-profile conn profile-id)
|
||||
result (cond
|
||||
(some? team-id)
|
||||
(let [sql (str "WITH teams AS (" sql:get-teams-with-permissions
|
||||
") SELECT * FROM teams WHERE id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id team-id]))
|
||||
|
||||
(some? project-id)
|
||||
(let [sql (str "WITH teams AS (" sql:get-teams-with-permissions ") "
|
||||
"SELECT t.* FROM teams AS t "
|
||||
" JOIN project AS p ON (p.team_id = t.id) "
|
||||
" WHERE p.id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id project-id]))
|
||||
|
||||
(some? file-id)
|
||||
(let [sql (str "WITH teams AS (" sql:get-teams-with-permissions ") "
|
||||
"SELECT t.* FROM teams AS t "
|
||||
" JOIN project AS p ON (p.team_id = t.id) "
|
||||
" JOIN file AS f ON (f.project_id = p.id) "
|
||||
" WHERE f.id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id file-id]))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid arguments")))]
|
||||
(defn retrieve-team
|
||||
[conn profile-id team-id]
|
||||
(let [profile (profile/get-profile conn profile-id)
|
||||
sql (str "WITH teams AS (" sql:teams ") SELECT * FROM teams WHERE id=?")
|
||||
result (db/exec-one! conn [sql (:default-team-id profile) profile-id team-id])]
|
||||
|
||||
(when-not result
|
||||
(ex/raise :type :not-found
|
||||
:code :team-does-not-exist))
|
||||
(-> result
|
||||
(decode-row)
|
||||
(process-permissions))))
|
||||
|
||||
(process-permissions result)))
|
||||
|
||||
;; --- Query: Team Members
|
||||
|
||||
@@ -211,48 +165,44 @@
|
||||
join profile as p on (p.id = tp.profile_id)
|
||||
where tp.team_id = ?")
|
||||
|
||||
(defn get-team-members
|
||||
(defn retrieve-team-members
|
||||
[conn team-id]
|
||||
(db/exec! conn [sql:team-members team-id]))
|
||||
|
||||
(def ^:private schema:get-team-memebrs
|
||||
[:map {:title "get-team-members"}
|
||||
[:team-id ::sm/uuid]])
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::get-team-members
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id]))
|
||||
|
||||
(sv/defmethod ::get-team-members
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-team-memebrs}
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(get-team-members conn team-id)))
|
||||
(retrieve-team-members conn team-id)))
|
||||
|
||||
|
||||
;; --- Query: Team Users
|
||||
|
||||
(declare get-users)
|
||||
(declare get-team-for-file)
|
||||
(declare retrieve-users)
|
||||
(declare retrieve-team-for-file)
|
||||
|
||||
(def ^:private schema:get-team-users
|
||||
[:and {:title "get-team-users"}
|
||||
[:map
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]]
|
||||
[:fn #(or (contains? % :team-id)
|
||||
(contains? % :file-id))]])
|
||||
(s/def ::get-team-users
|
||||
(s/and (s/keys :req [::rpc/profile-id]
|
||||
:opt-un [::team-id ::file-id])
|
||||
#(or (:team-id %) (:file-id %))))
|
||||
|
||||
(sv/defmethod ::get-team-users
|
||||
"Get team users by team-id or by file-id"
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-team-users}
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id file-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(if team-id
|
||||
(do
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(get-users conn team-id))
|
||||
(let [{team-id :id} (get-team-for-file conn file-id)]
|
||||
(retrieve-users conn team-id))
|
||||
(let [{team-id :id} (retrieve-team-for-file conn file-id)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(get-users conn team-id)))))
|
||||
(retrieve-users conn team-id)))))
|
||||
|
||||
;; This is a similar query to team members but can contain more data
|
||||
;; because some user can be explicitly added to project or file (not
|
||||
@@ -283,44 +233,44 @@
|
||||
join file as f on (p.id = f.project_id)
|
||||
where f.id = ?")
|
||||
|
||||
(defn get-users
|
||||
(defn retrieve-users
|
||||
[conn team-id]
|
||||
(db/exec! conn [sql:team-users team-id team-id team-id]))
|
||||
|
||||
(defn get-team-for-file
|
||||
(defn retrieve-team-for-file
|
||||
[conn file-id]
|
||||
(->> [sql:team-by-file file-id]
|
||||
(db/exec-one! conn)))
|
||||
|
||||
;; --- Query: Team Stats
|
||||
|
||||
(declare get-team-stats)
|
||||
(declare retrieve-team-stats)
|
||||
|
||||
(def ^:private schema:get-team-stats
|
||||
[:map {:title "get-team-stats"}
|
||||
[:team-id ::sm/uuid]])
|
||||
(s/def ::get-team-stats
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id]))
|
||||
|
||||
(sv/defmethod ::get-team-stats
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-team-stats}
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(get-team-stats conn team-id)))
|
||||
(retrieve-team-stats conn team-id)))
|
||||
|
||||
(def sql:team-stats
|
||||
"select (select count(*) from project where team_id = ?) as projects,
|
||||
(select count(*) from file as f join project as p on (p.id = f.project_id) where p.team_id = ?) as files")
|
||||
|
||||
(defn get-team-stats
|
||||
(defn retrieve-team-stats
|
||||
[conn team-id]
|
||||
(db/exec-one! conn [sql:team-stats team-id team-id]))
|
||||
|
||||
|
||||
;; --- Query: Team invitations
|
||||
|
||||
(def ^:private schema:get-team-invitations
|
||||
[:map {:title "get-team-invitations"}
|
||||
[:team-id ::sm/uuid]])
|
||||
(s/def ::get-team-invitations
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id]))
|
||||
|
||||
(def sql:team-invitations
|
||||
"select email_to as email, role, (valid_until < now()) as expired
|
||||
@@ -332,8 +282,7 @@
|
||||
(mapv #(update % :role keyword))))
|
||||
|
||||
(sv/defmethod ::get-team-invitations
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-team-invitations}
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
@@ -348,32 +297,25 @@
|
||||
(declare ^:private create-team-role)
|
||||
(declare ^:private create-team-default-project)
|
||||
|
||||
(def ^:private schema:create-team
|
||||
[:map {:title "create-team"}
|
||||
[:name :string]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:id {:optional true} ::sm/uuid]])
|
||||
(s/def ::create-team
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::name]
|
||||
:opt-un [::id]))
|
||||
|
||||
(sv/defmethod ::create-team
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:create-team}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(quotes/check-quote! conn {::quotes/id ::quotes/teams-per-profile
|
||||
::quotes/profile-id profile-id})
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(quotes/check-quote! conn {::quotes/id ::quotes/teams-per-profile
|
||||
::quotes/profile-id profile-id})
|
||||
|
||||
(let [features (-> (cfeat/get-enabled-features cf/flags)
|
||||
(cfeat/check-client-features! (:features params)))]
|
||||
(create-team cfg (assoc params
|
||||
:profile-id profile-id
|
||||
:features features))))))
|
||||
(create-team conn (assoc params :profile-id profile-id))))
|
||||
|
||||
(defn create-team
|
||||
"This is a complete team creation process, it creates the team
|
||||
object and all related objects (default role and default project)."
|
||||
[cfg-or-conn params]
|
||||
(let [conn (db/get-connection cfg-or-conn)
|
||||
team (create-team* conn params)
|
||||
[conn params]
|
||||
(let [team (create-team* conn params)
|
||||
params (assoc params
|
||||
:team-id (:id team)
|
||||
:role :owner)
|
||||
@@ -382,16 +324,13 @@
|
||||
(assoc team :default-project-id (:id project))))
|
||||
|
||||
(defn- create-team*
|
||||
[conn {:keys [id name is-default features] :as params}]
|
||||
[conn {:keys [id name is-default] :as params}]
|
||||
(let [id (or id (uuid/next))
|
||||
is-default (if (boolean? is-default) is-default false)
|
||||
features (db/create-array conn "text" features)
|
||||
team (db/insert! conn :team
|
||||
{:id id
|
||||
:name name
|
||||
:features features
|
||||
:is-default is-default})]
|
||||
(decode-row team)))
|
||||
is-default (if (boolean? is-default) is-default false)]
|
||||
(db/insert! conn :team
|
||||
{:id id
|
||||
:name name
|
||||
:is-default is-default})))
|
||||
|
||||
(defn- create-team-role
|
||||
[conn {:keys [profile-id team-id role] :as params}]
|
||||
@@ -417,16 +356,14 @@
|
||||
;; namespace too.
|
||||
|
||||
(defn create-project
|
||||
[conn {:keys [id team-id name is-default created-at modified-at]}]
|
||||
[conn {:keys [id team-id name is-default] :as params}]
|
||||
(let [id (or id (uuid/next))
|
||||
is-default (if (boolean? is-default) is-default false)
|
||||
params {:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:is-default is-default
|
||||
:created-at created-at
|
||||
:modified-at modified-at}]
|
||||
(db/insert! conn :project (d/without-nils params))))
|
||||
is-default (if (boolean? is-default) is-default false)]
|
||||
(db/insert! conn :project
|
||||
{:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:is-default is-default})))
|
||||
|
||||
(defn create-project-role
|
||||
[conn profile-id project-id role]
|
||||
@@ -459,7 +396,7 @@
|
||||
(defn leave-team
|
||||
[conn {:keys [profile-id id reassign-to]}]
|
||||
(let [perms (get-permissions conn profile-id id)
|
||||
members (get-team-members conn id)]
|
||||
members (retrieve-team-members conn id)]
|
||||
|
||||
(cond
|
||||
;; we can only proceed if there are more members in the team
|
||||
@@ -543,15 +480,10 @@
|
||||
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::member-id ::us/uuid)
|
||||
(s/def ::role #{:owner :admin :editor})
|
||||
|
||||
;; Temporarily disabled viewer role
|
||||
;; https://tree.taiga.io/project/penpot/issue/1083
|
||||
(def valid-roles
|
||||
#{:owner :admin :editor #_:viewer})
|
||||
|
||||
(def schema:role
|
||||
[::sm/one-of valid-roles])
|
||||
;; (s/def ::role #{:owner :admin :editor :viewer})
|
||||
(s/def ::role #{:owner :admin :editor})
|
||||
|
||||
(defn role->params
|
||||
[role]
|
||||
@@ -568,7 +500,7 @@
|
||||
;; convenience, if this becomes a bottleneck or problematic,
|
||||
;; we will change it to more efficient fetch mechanisms.
|
||||
(let [perms (get-permissions conn profile-id team-id)
|
||||
members (get-team-members conn team-id)
|
||||
members (retrieve-team-members conn team-id)
|
||||
member (d/seek #(= member-id (:id %)) members)
|
||||
|
||||
is-owner? (:is-owner perms)
|
||||
@@ -664,7 +596,7 @@
|
||||
|
||||
(defn update-team-photo
|
||||
[{:keys [::db/pool ::sto/storage] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(let [team (get-team pool :profile-id profile-id :team-id team-id)
|
||||
(let [team (retrieve-team pool profile-id team-id)
|
||||
photo (profile/upload-photo cfg params)]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
@@ -676,8 +608,8 @@
|
||||
|
||||
;; Save new photo
|
||||
(db/update! pool :team
|
||||
{:photo-id (:id photo)}
|
||||
{:id team-id})
|
||||
{:photo-id (:id photo)}
|
||||
{:id team-id})
|
||||
|
||||
(assoc team :photo-id (:id photo)))))
|
||||
|
||||
@@ -692,7 +624,7 @@
|
||||
|
||||
(defn- create-invitation-token
|
||||
[cfg {:keys [profile-id valid-until team-id member-id member-email role]}]
|
||||
(tokens/generate (::setup/props cfg)
|
||||
(tokens/generate (::main/props cfg)
|
||||
{:iss :team-invitation
|
||||
:exp valid-until
|
||||
:profile-id profile-id
|
||||
@@ -703,15 +635,14 @@
|
||||
|
||||
(defn- create-profile-identity-token
|
||||
[cfg profile]
|
||||
(tokens/generate (::setup/props cfg)
|
||||
(tokens/generate (::main/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})}))
|
||||
|
||||
(defn- create-invitation
|
||||
[{:keys [::db/conn] :as cfg} {:keys [team profile role email] :as params}]
|
||||
(let [email (profile/clean-email email)
|
||||
member (profile/get-profile-by-email conn email)]
|
||||
(let [member (profile/get-profile-by-email conn email)]
|
||||
|
||||
(when (and member (not (eml/allow-send-emails? conn member)))
|
||||
(ex/raise :type :validation
|
||||
@@ -739,8 +670,7 @@
|
||||
(role->params role))]
|
||||
|
||||
;; Insert the invited member to the team
|
||||
(db/insert! conn :team-profile-rel params
|
||||
{::db/on-conflict-do-nothing? true})
|
||||
(db/insert! conn :team-profile-rel params {:on-conflict-do-nothing true})
|
||||
|
||||
;; If profile is not yet verified, mark it as verified because
|
||||
;; accepting an invitation link serves as verification.
|
||||
@@ -805,8 +735,7 @@
|
||||
(db/with-atomic [conn pool]
|
||||
(let [perms (get-permissions conn profile-id team-id)
|
||||
profile (db/get-by-id conn :profile profile-id)
|
||||
team (db/get-by-id conn :team team-id)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
team (db/get-by-id conn :team team-id)]
|
||||
|
||||
(run! (partial quotes/check-quote! conn)
|
||||
(list {::quotes/id ::quotes/invitations-per-team
|
||||
@@ -837,7 +766,7 @@
|
||||
;; We don't re-send inviation to already existing members
|
||||
(remove (partial contains? members))
|
||||
(map (fn [email]
|
||||
{:email email
|
||||
{:email (str/lower email)
|
||||
:team team
|
||||
:profile profile
|
||||
:role role}))
|
||||
@@ -855,37 +784,21 @@
|
||||
(s/merge ::create-team
|
||||
(s/keys :req-un [::emails ::role])))
|
||||
|
||||
|
||||
(def ^:private schema:create-team-with-invitations
|
||||
[:map {:title "create-team-with-invitations"}
|
||||
[:name :string]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:emails ::sm/set-of-emails]
|
||||
[:role schema:role]])
|
||||
|
||||
(sv/defmethod ::create-team-with-invitations
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:create-team-with-invitations}
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id emails role] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
|
||||
(let [features (-> (cfeat/get-enabled-features cf/flags)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
params (assoc params
|
||||
:profile-id profile-id
|
||||
:features features)
|
||||
cfg (assoc cfg ::db/conn conn)
|
||||
team (create-team cfg params)
|
||||
(let [params (assoc params :profile-id profile-id)
|
||||
team (create-team conn params)
|
||||
profile (db/get-by-id conn :profile profile-id)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
cfg (assoc cfg ::db/conn conn)]
|
||||
|
||||
;; Create invitations for all provided emails.
|
||||
(->> emails
|
||||
(map (fn [email]
|
||||
{:team team
|
||||
:profile profile
|
||||
:email email
|
||||
:email (str/lower email)
|
||||
:role role}))
|
||||
(run! (partial create-invitation cfg)))
|
||||
|
||||
@@ -922,20 +835,17 @@
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
|
||||
(check-read-permissions! pool profile-id team-id)
|
||||
(let [email (profile/clean-email email)
|
||||
invit (-> (db/get pool :team-invitation
|
||||
(let [invit (-> (db/get pool :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to email})
|
||||
:email-to (str/lower email)})
|
||||
(update :role keyword))
|
||||
|
||||
member (profile/get-profile-by-email pool (:email-to invit))
|
||||
token (create-invitation-token cfg {:team-id (:team-id invit)
|
||||
:profile-id profile-id
|
||||
:valid-until (:valid-until invit)
|
||||
:role (:role invit)
|
||||
:member-id (:id member)
|
||||
:member-email (or (:email member)
|
||||
(profile/clean-email (:email-to invit)))})]
|
||||
:member-email (or (:email member) (:email-to invit))})]
|
||||
{:token token}))
|
||||
|
||||
;; --- Mutation: Update invitation role
|
||||
@@ -956,7 +866,7 @@
|
||||
|
||||
(db/update! conn :team-invitation
|
||||
{:role (name role) :updated-at (dt/now)}
|
||||
{:team-id team-id :email-to (profile/clean-email email)})
|
||||
{:team-id team-id :email-to (str/lower email)})
|
||||
nil)))
|
||||
|
||||
;; --- Mutation: Delete invitation
|
||||
@@ -977,6 +887,5 @@
|
||||
|
||||
(let [invitation (db/delete! conn :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to (profile/clean-email email)}
|
||||
{::db/return-keys true})]
|
||||
:email-to (str/lower email)})]
|
||||
(rph/wrap nil {::audit/props {:invitation-id (:id invitation)}})))))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user