mirror of
https://github.com/penpot/penpot.git
synced 2026-01-07 22:09:05 -05:00
Compare commits
10 Commits
hiru-trans
...
hiru-trans
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f4fcfe5eb | ||
|
|
7ba15b9a3a | ||
|
|
1cba7346fd | ||
|
|
f1bee5edf5 | ||
|
|
835ed342f8 | ||
|
|
de4c7762ad | ||
|
|
3112d04a99 | ||
|
|
3b26d05fed | ||
|
|
0e88398dc2 | ||
|
|
5d3eadc6db |
@@ -3,19 +3,19 @@ jobs:
|
||||
build:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
- image: cimg/postgres:14.5
|
||||
- image: cimg/postgres:13.5
|
||||
environment:
|
||||
POSTGRES_USER: penpot_test
|
||||
POSTGRES_PASSWORD: penpot_test
|
||||
POSTGRES_DB: penpot_test
|
||||
- image: cimg/redis:7.0.5
|
||||
- image: cimg/redis:6.2.6
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: large
|
||||
|
||||
environment:
|
||||
# Customize the JVM maximum heap limit
|
||||
JVM_OPTS: -Xmx4g
|
||||
JVM_OPTS: -Xmx1g
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
@@ -29,13 +29,6 @@ jobs:
|
||||
|
||||
- run: cd .clj-kondo && cat config.edn
|
||||
|
||||
- run:
|
||||
name: frontend styles prettier
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint-scss
|
||||
|
||||
- run:
|
||||
name: common lint
|
||||
working_directory: "./common"
|
||||
@@ -50,6 +43,13 @@ jobs:
|
||||
clj-kondo --version
|
||||
clj-kondo --parallel --lint src/
|
||||
|
||||
- run:
|
||||
name: frontend styles prettier
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint-scss
|
||||
|
||||
- run:
|
||||
name: backend lint
|
||||
working_directory: "./backend"
|
||||
@@ -57,42 +57,47 @@ jobs:
|
||||
clj-kondo --version
|
||||
clj-kondo --parallel --lint src/
|
||||
|
||||
- run:
|
||||
working_directory: "./common"
|
||||
name: common tests
|
||||
command: |
|
||||
yarn install
|
||||
yarn test
|
||||
clojure -X:dev:test :patterns '["common-tests.*-test"]'
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
JVM_OPTS: -Xmx4g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
# run backend test
|
||||
- run:
|
||||
name: backend test
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
clojure -X:dev:test :patterns '["backend-tests.*-test"]'
|
||||
|
||||
command: "clojure -X:dev:test"
|
||||
environment:
|
||||
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
|
||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||
PENPOT_TEST_REDIS_URI: "redis://localhost/1"
|
||||
JVM_OPTS: -Xmx4g
|
||||
|
||||
- run:
|
||||
name: frontend tests
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn test
|
||||
clojure -M:dev:shadow-cljs compile test
|
||||
node target/tests.js
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
|
||||
- run:
|
||||
working_directory: "./common"
|
||||
name: common tests (cljs)
|
||||
command: |
|
||||
yarn install
|
||||
yarn run compile-test
|
||||
node target/test.js
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
|
||||
- run:
|
||||
working_directory: "./common"
|
||||
name: common tests (clj)
|
||||
command: |
|
||||
clojure -X:dev:test
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
app.common.data/export clojure.core/def
|
||||
app.db/with-atomic clojure.core/with-open
|
||||
app.common.data.macros/get-in clojure.core/get-in
|
||||
app.common.data.macros/with-open clojure.core/with-open
|
||||
app.common.data.macros/select-keys clojure.core/select-keys
|
||||
app.common.logging/with-context clojure.core/do}
|
||||
|
||||
|
||||
34
.gitignore
vendored
34
.gitignore
vendored
@@ -1,59 +1,55 @@
|
||||
*-init.clj
|
||||
*.jar
|
||||
*.orig
|
||||
*.penpot
|
||||
*.orig
|
||||
.calva
|
||||
.clj-kondo
|
||||
.cpcache
|
||||
.lein-deps-sum
|
||||
.lein-failures
|
||||
.lein-plugins/
|
||||
.lein-repl-history
|
||||
.lsp
|
||||
.nrepl-port
|
||||
.nyc_output
|
||||
.rebel_readline_history
|
||||
.repl
|
||||
.shadow-cljs
|
||||
/*.jpg
|
||||
/*.md
|
||||
/*.png
|
||||
/*.sql
|
||||
/*.txt
|
||||
/*.yml
|
||||
/*.zip
|
||||
/.clj-kondo/.cache
|
||||
/_dump
|
||||
/backend/*.md
|
||||
/backend/*.sql
|
||||
/backend/*.txt
|
||||
/backend/-
|
||||
/backend/assets/
|
||||
/backend/builtin-templates
|
||||
/backend/dist/
|
||||
/backend/logs/
|
||||
/backend/resources/public/assets
|
||||
/backend/resources/public/media
|
||||
/backend/target/
|
||||
/backend/builtin-templates
|
||||
/bundle*
|
||||
/cd.md
|
||||
/clj-profiler/
|
||||
/common/.shadow-cljs
|
||||
/common/coverage
|
||||
/common/target
|
||||
/deploy
|
||||
/docker/images/bundle*
|
||||
/exporter/.shadow-cljs
|
||||
/exporter/target
|
||||
/frontend/.shadow-cljs
|
||||
/frontend/package-lock.json
|
||||
/frontend/cypress/videos/*/
|
||||
/frontend/cypress/fixtures/validuser.json
|
||||
/frontend/cypress/videos/*/
|
||||
/frontend/cypress/videos/*/
|
||||
/frontend/dist/
|
||||
/frontend/npm-debug.log
|
||||
/frontend/out/
|
||||
/frontend/package-lock.json
|
||||
/frontend/resources/fonts/experiments
|
||||
/frontend/resources/public/*
|
||||
/frontend/target/
|
||||
/other/
|
||||
/scripts/
|
||||
/frontend/cypress/videos/*/
|
||||
/media
|
||||
/telemetry/
|
||||
/tmp/
|
||||
/vendor/**/target
|
||||
/vendor/svgclean/bundle*.js
|
||||
/web
|
||||
clj-profiler/
|
||||
figwheel_server.log
|
||||
node_modules
|
||||
|
||||
42
CHANGES.md
42
CHANGES.md
@@ -4,12 +4,6 @@
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
### :sparkles: New features
|
||||
|
||||
- Adds layout flex functionality for boards
|
||||
- Better overlays interactions on boards inside boards [Taiga #4386](https://tree.taiga.io/project/penpot/us/4386)
|
||||
- Show board miniature in manual overlay setting [Taiga #4475](https://tree.taiga.io/project/penpot/issue/4475)
|
||||
- Handoff visual improvements [Taiga #3124](https://tree.taiga.io/project/penpot/us/3124)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Add title to color bullets [Taiga #4218](https://tree.taiga.io/project/penpot/task/4218)
|
||||
@@ -17,35 +11,13 @@
|
||||
- Fix shortcut texts alignment [Taiga #4275](https://tree.taiga.io/project/penpot/issue/4275)
|
||||
- Fix some texts and a typo [Taiga #4215](https://tree.taiga.io/project/penpot/issue/4215)
|
||||
- Fix twitter support account link [Taiga #4279](https://tree.taiga.io/project/penpot/issue/4279)
|
||||
- Fix lang autodetect issue [Taiga #4277](https://tree.taiga.io/project/penpot/issue/4277)
|
||||
- Fix adding an extra page on import [Taiga #4543](https://tree.taiga.io/project/penpot/task/4543)
|
||||
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
## 1.16.2-beta
|
||||
|
||||
## 1.16.1-beta
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix unexpected exception related to default nudge value
|
||||
- Fix firefox changing layer color type is not applied [Taiga #4292](https://tree.taiga.io/project/penpot/issue/4292)
|
||||
- Fix justify alignes text left [Taiga #4322](https://tree.taiga.io/project/penpot/issue/4322)
|
||||
- Fix text out of borders with "auto width" and center align [Taiga #4308](https://tree.taiga.io/project/penpot/issue/4308)
|
||||
- Fix wrong validation text after interaction with 2 and more files [Taiga #4276](https://tree.taiga.io/project/penpot/issue/4276)
|
||||
- Fix auto-width for texts can make text appear stretched [Github #2482](https://github.com/penpot/penpot/issues/2482)
|
||||
- Fix boards name do not disappear in focus mode [#4272](https://tree.taiga.io/project/penpot/issue/4272)
|
||||
- Fix wrong email in the info message at change email [Taiga #4274](https://tree.taiga.io/project/penpot/issue/4274)
|
||||
- Fix transform to path RMB menu item is not relevant if shape is already path [Taiga #4302](https://tree.taiga.io/project/penpot/issue/4302)
|
||||
- Fix join nodes icon is active when 2 already joined nodes are selected [Taiga #4370](https://tree.taiga.io/project/penpot/issue/4370)
|
||||
- Fix path nodes panel. "To curve" and "To corner" icons are active if node is already curved/cornered [Taiga #4371](https://tree.taiga.io/project/penpot/issue/4371)
|
||||
- Fix displaying comments settings are not applied via "Comments" menu drop-down on the top navbar on view mode [Taiga #4389](https://tree.taiga.io/project/penpot/issue/4389)
|
||||
- Fix bad behaviour on hovering and click nested artboards [Taiga #4018](https://tree.taiga.io/project/penpot/issue/4018) and [Taiga #4269](https://tree.taiga.io/project/penpot/us/4269)
|
||||
- Fix lang autodetect issue [Taiga #4277](https://tree.taiga.io/project/penpot/issue/4277)
|
||||
- Fix colorpicker does not close upon switching to Dashboard [Taiga #4408](https://tree.taiga.io/project/penpot/issue/4408)
|
||||
- Fix problem with auto-width/auto-height + lock-proportions
|
||||
|
||||
## 1.16.0-beta
|
||||
## :rocket: 1.16.0-beta
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
@@ -86,10 +58,6 @@
|
||||
- Fix ungroup does not work for typographies [Taiga #4195](https://tree.taiga.io/project/penpot/issue/4195)
|
||||
- Fix inviting to non existing users can fail [Taiga #4108](https://tree.taiga.io/project/penpot/issue/4108)
|
||||
- Fix components marked as touched when moved [Taiga #4061](https://tree.taiga.io/project/penpot/task/4061)
|
||||
- Fix boards grouped shouldn't show the title [Taiga #4251](https://tree.taiga.io/project/penpot/issue/4251)
|
||||
- Fix gradient handlers are under resize handlers[Taiga #4298](https://tree.taiga.io/project/penpot/issue/4298)
|
||||
- Fix grid not syncing immediately in multiuser [Taiga #4339](https://tree.taiga.io/project/penpot/issue/4339)
|
||||
- Fix custom font upload fails silently for unsupported formats [Taiga #4279](https://tree.taiga.io/project/penpot/issue/4280)
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
@@ -103,17 +71,11 @@
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix artboard border radius [Taiga #4291](https://tree.taiga.io/project/penpot/issue/4291)
|
||||
- Fix copied & pasted layer is not visible [Taiga #4283](https://tree.taiga.io/project/penpot/issue/4283)
|
||||
- Fix notification to newsletter is shown in all cases [Taiga #4367](https://tree.taiga.io/project/penpot/issue/4367)
|
||||
- Fix comments section is not scrolling by mouse wheel [Taiga #4305](https://tree.taiga.io/project/penpot/issue/4305)
|
||||
- Fix justify alignes text left [Taiga #4322](https://tree.taiga.io/project/penpot/issue/4322)
|
||||
- Fix text out of borders with "auto width" and center align [Taiga #4308](https://tree.taiga.io/project/penpot/issue/4308)
|
||||
|
||||
## 1.15.4-beta
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix social buttons in register form [Taiga #4320](https://tree.taiga.io/project/penpot/issue/4320)
|
||||
- Remove gitter information from feedback page [Taiga #4157](https://tree.taiga.io/project/penpot/issue/4157)
|
||||
- Fix overlay remains open on frame change [Taiga #4066](https://tree.taiga.io/project/penpot/issue/4066)
|
||||
- Fix toggle overlay position [Taiga #4091](https://tree.taiga.io/project/penpot/issue/4091)
|
||||
|
||||
@@ -16,18 +16,19 @@
|
||||
:exclusions [org.eclipse.jetty/jetty-server
|
||||
org.eclipse.jetty/jetty-servlet]}
|
||||
|
||||
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "6.2.1.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.2.0.RELEASE"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/yetti
|
||||
{:git/tag "v9.11"
|
||||
:git/sha "6f9197a"
|
||||
{:git/tag "v9.9"
|
||||
:git/sha "f0a455d"
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.834"}
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.828"}
|
||||
metosin/reitit-core {:mvn/version "0.5.18"}
|
||||
org.postgresql/postgresql {:mvn/version "42.5.0"}
|
||||
com.zaxxer/HikariCP {:mvn/version "5.0.1"}
|
||||
@@ -37,8 +38,6 @@
|
||||
buddy/buddy-hashers {:mvn/version "1.8.158"}
|
||||
buddy/buddy-sign {:mvn/version "3.4.333"}
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.1"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.15.1"}
|
||||
org.im4java/im4java
|
||||
{:git/tag "1.4.0-penpot-2"
|
||||
@@ -63,6 +62,7 @@
|
||||
{:extra-deps
|
||||
{com.bhauman/rebel-readline {:mvn/version "RELEASE"}
|
||||
org.clojure/tools.namespace {:mvn/version "RELEASE"}
|
||||
org.clojure/test.check {:mvn/version "RELEASE"}
|
||||
clojure-humanize/clojure-humanize {:mvn/version "0.2.2"}
|
||||
org.clojure/data.csv {:mvn/version "RELEASE"}
|
||||
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
|
||||
|
||||
@@ -12,12 +12,9 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.perf :as perf]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.spec :as us]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cfg]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers]
|
||||
[app.srepl.main :as srepl]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.fressian :as fres]
|
||||
@@ -29,13 +26,10 @@
|
||||
[clojure.pprint :refer [pprint print-table]]
|
||||
[clojure.repl :refer :all]
|
||||
[clojure.spec.alpha :as s]
|
||||
[clojure.stacktrace :as trace]
|
||||
[clojure.spec.gen.alpha :as sgen]
|
||||
[clojure.test :as test]
|
||||
[clojure.test.check.generators :as gen]
|
||||
[clojure.tools.namespace.repl :as repl]
|
||||
[clojure.walk :refer [macroexpand-all]]
|
||||
[criterium.core :as crit]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.core]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
@@ -48,24 +42,24 @@
|
||||
|
||||
(defmacro run-quick-bench
|
||||
[& exprs]
|
||||
`(crit/with-progress-reporting (crit/quick-bench (do ~@exprs) :verbose)))
|
||||
`(with-progress-reporting (quick-bench (do ~@exprs) :verbose)))
|
||||
|
||||
(defmacro run-quick-bench'
|
||||
[& exprs]
|
||||
`(crit/quick-bench (do ~@exprs)))
|
||||
`(quick-bench (do ~@exprs)))
|
||||
|
||||
(defmacro run-bench
|
||||
[& exprs]
|
||||
`(crit/with-progress-reporting (crit/bench (do ~@exprs) :verbose)))
|
||||
`(with-progress-reporting (bench (do ~@exprs) :verbose)))
|
||||
|
||||
(defmacro run-bench'
|
||||
[& exprs]
|
||||
`(crit/bench (do ~@exprs)))
|
||||
`(bench (do ~@exprs)))
|
||||
|
||||
;; --- Development Stuff
|
||||
|
||||
(defn- run-tests
|
||||
([] (run-tests #"^backend-tests.*-test$"))
|
||||
([] (run-tests #"^app.*-test$"))
|
||||
([o]
|
||||
(repl/refresh)
|
||||
(cond
|
||||
@@ -80,15 +74,12 @@
|
||||
|
||||
(defn- start
|
||||
[]
|
||||
(try
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
(-> (merge main/system-config main/worker-config)
|
||||
(ig/prep)
|
||||
(ig/init))))
|
||||
:started
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause))))
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
(-> (merge main/system-config main/worker-config)
|
||||
(ig/prep)
|
||||
(ig/init))))
|
||||
:started)
|
||||
|
||||
(defn- stop
|
||||
[]
|
||||
@@ -109,20 +100,12 @@
|
||||
|
||||
(defn compression-bench
|
||||
[data]
|
||||
(let [humanize (fn [v] (hum/filesize v :binary true :format " %.4f "))
|
||||
v1 (time (humanize (alength (blob/encode data {:version 1}))))
|
||||
v3 (time (humanize (alength (blob/encode data {:version 3}))))
|
||||
v4 (time (humanize (alength (blob/encode data {:version 4}))))
|
||||
v5 (time (humanize (alength (blob/encode data {:version 5}))))
|
||||
v6 (time (humanize (alength (blob/encode data {:version 6}))))
|
||||
]
|
||||
(let [humanize (fn [v] (hum/filesize v :binary true :format " %.4f "))]
|
||||
(print-table
|
||||
[{
|
||||
:v1 v1
|
||||
:v3 v3
|
||||
:v4 v4
|
||||
:v5 v5
|
||||
:v6 v6
|
||||
[{:v1 (humanize (alength (blob/encode data {:version 1})))
|
||||
:v2 (humanize (alength (blob/encode data {:version 2})))
|
||||
:v3 (humanize (alength (blob/encode data {:version 3})))
|
||||
:v4 (humanize (alength (blob/encode data {:version 4})))
|
||||
}])))
|
||||
|
||||
(defonce debug-tap
|
||||
|
||||
@@ -22,10 +22,6 @@
|
||||
:name "Circum Icons pack"
|
||||
:thumbnail-uri "https://penpot.app/images/libraries/cover-circum.jpg"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/CircumIcons.penpot"}
|
||||
{:id "coreui"
|
||||
:name "CoreUI"
|
||||
:thumbnail-uri "https://penpot.app/images/libraries/cover-coreui.jpg"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/CoreUI%20DesignSystem%20(DEMO).penpot"}
|
||||
{:id "whiteboarding-kit"
|
||||
:name "Whiteboarding Kit"
|
||||
:thumbnail-uri "https://penpot.app/images/libraries/cover-whiteboards.jpg"
|
||||
|
||||
@@ -11,8 +11,7 @@ penpot - error list
|
||||
<main class="horizontal-list">
|
||||
<ul>
|
||||
{% for item in items %}
|
||||
<li><a class="date" href="/dbg/error/{{item.id}}">{{item.created-at}}</a>
|
||||
<span class="title">{{item.hint|abbreviate:150}}</span></li>
|
||||
<li><a href="/dbg/error/{{item.id}}">{{item.created-at}}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</main>
|
||||
|
||||
@@ -137,6 +137,8 @@ nav > div:not(:last-child) {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
flex-direction: column;
|
||||
flex-wrap: wrap;
|
||||
height: calc(100vh - 75px);
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
@@ -149,31 +151,19 @@ nav > div:not(:last-child) {
|
||||
margin: 0px 20px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
.horizontal-list li:hover {
|
||||
background-color: #e9e9e9;
|
||||
}
|
||||
|
||||
.horizontal-list li > *:not(:last-child) {
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.horizontal-list li > a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
.horizontal-list li > .date {
|
||||
font-weight: 200;
|
||||
color: #686868;
|
||||
min-width: 210px;
|
||||
}
|
||||
|
||||
|
||||
form .row {
|
||||
padding: 5px 0;
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
;; Example climit.edn file
|
||||
;; Required: concurrency
|
||||
;; Optional: queue-size, ommited means Integer/MAX_VALUE
|
||||
{:update-file {:concurrency 1 :queue-size 3}
|
||||
:auth {:concurrency 128}
|
||||
:process-font {:concurrency 4 :queue-size 32}
|
||||
:process-image {:concurrency 8 :queue-size 32}
|
||||
:push-audit-events
|
||||
{:concurrency 1 :queue-size 3}}
|
||||
@@ -2,13 +2,11 @@
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="false" />
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"/>
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="false" />
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"/>
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
@@ -34,8 +32,6 @@
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
<Logger name="app.rpc.climit" level="info" />
|
||||
<Logger name="app.rpc.mutations.files" level="info" />
|
||||
|
||||
<Logger name="app.cli" level="debug" additivity="false">
|
||||
<AppenderRef ref="console"/>
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
<Configuration status="info" monitorInterval="60">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="false" />
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"/>
|
||||
</Console>
|
||||
</Appenders>
|
||||
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
;; Example rlimit.edn file
|
||||
^{:refresh "30s"}
|
||||
{:default
|
||||
[[:default :window "200000/h"]]
|
||||
|
||||
#{:query/teams}
|
||||
[[:burst :bucket "5/1/5s"]]
|
||||
|
||||
#{:query/profile}
|
||||
[[:burst :bucket "100/60/1m"]]}
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -x
|
||||
|
||||
jcmd |grep "rebel" |sed -nE 's/^([0-9]+).*$/\1/p' | xargs kill -9
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_TENANT=dev
|
||||
export PENPOT_FLAGS="$PENPOT_FLAGS enable-backend-asserts enable-audit-log enable-transit-readable-response enable-demo-users disable-secure-session-cookies enable-smtp"
|
||||
export PENPOT_FLAGS="$PENPOT_FLAGS enable-backend-asserts enable-audit-log enable-transit-readable-response enable-demo-users disable-secure-session-cookies enable-rpc-rate-limit enable-warn-rpc-rate-limits enable-smtp"
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||
@@ -16,6 +16,8 @@ export PENPOT_FLAGS="$PENPOT_FLAGS enable-backend-asserts enable-audit-log enabl
|
||||
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
|
||||
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
|
||||
|
||||
export PENPOT_DEFAULT_RATE_LIMIT="default,window,10000/h"
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv
|
||||
@@ -29,7 +31,7 @@ export PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_STORAGE_ASSETS_S3_BUCKET=penpot
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-A:dev:jmx-remote \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-J-XX:+UseG1GC \
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
set +e
|
||||
JAVA_CMD=$(type -p java)
|
||||
|
||||
set -e
|
||||
if [[ ! -n "$JAVA_CMD" ]]; then
|
||||
if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
|
||||
JAVA_CMD="$JAVA_HOME/bin/java"
|
||||
else
|
||||
set +e
|
||||
JAVA_CMD=$(type -p java)
|
||||
set -e
|
||||
if [[ ! -n "$JAVA_CMD" ]]; then
|
||||
>&2 echo "Couldn't find 'java'. Please set JAVA_HOME."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
|
||||
JAVA_CMD="$JAVA_HOME/bin/java"
|
||||
else
|
||||
>&2 echo "Couldn't find 'java'. Please set JAVA_HOME."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f ./environ ]; then
|
||||
source ./environ
|
||||
source ./environ
|
||||
fi
|
||||
|
||||
set -x
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
(ns app.auth.oidc
|
||||
"OIDC client implementation."
|
||||
(:require
|
||||
[app.auth.oidc.providers :as-alias providers]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
@@ -18,9 +17,7 @@
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.http.middleware :as hmw]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.json :as json]
|
||||
@@ -50,11 +47,9 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- discover-oidc-config
|
||||
[cfg {:keys [::base-uri] :as opts}]
|
||||
[{:keys [http-client]} {:keys [base-uri] :as opts}]
|
||||
(let [discovery-uri (u/join base-uri ".well-known/openid-configuration")
|
||||
response (ex/try! (http/req! cfg
|
||||
{:method :get :uri (str discovery-uri)}
|
||||
{:sync? true}))]
|
||||
response (ex/try (http/req! http-client {:method :get :uri (str discovery-uri)} {:sync? true}))]
|
||||
(cond
|
||||
(ex/exception? response)
|
||||
(do
|
||||
@@ -64,7 +59,7 @@
|
||||
nil)
|
||||
|
||||
(= 200 (:status response))
|
||||
(let [data (json/decode (:body response))]
|
||||
(let [data (json/read (:body response))]
|
||||
{:token-uri (get data :token_endpoint)
|
||||
:auth-uri (get data :authorization_endpoint)
|
||||
:user-uri (get data :userinfo_endpoint)})
|
||||
@@ -78,15 +73,15 @@
|
||||
|
||||
(defn- prepare-oidc-opts
|
||||
[cfg]
|
||||
(let [opts {:base-uri (cf/get :oidc-base-uri)
|
||||
:client-id (cf/get :oidc-client-id)
|
||||
:client-secret (cf/get :oidc-client-secret)
|
||||
:token-uri (cf/get :oidc-token-uri)
|
||||
:auth-uri (cf/get :oidc-auth-uri)
|
||||
:user-uri (cf/get :oidc-user-uri)
|
||||
:scopes (cf/get :oidc-scopes #{"openid" "profile" "email"})
|
||||
:roles-attr (cf/get :oidc-roles-attr)
|
||||
:roles (cf/get :oidc-roles)
|
||||
(let [opts {:base-uri (:base-uri cfg)
|
||||
:client-id (:client-id cfg)
|
||||
:client-secret (:client-secret cfg)
|
||||
:token-uri (:token-uri cfg)
|
||||
:auth-uri (:auth-uri cfg)
|
||||
:user-uri (:user-uri cfg)
|
||||
:scopes (:scopes cfg #{"openid" "profile" "email"})
|
||||
:roles-attr (:roles-attr cfg)
|
||||
:roles (:roles cfg)
|
||||
:name "oidc"}
|
||||
|
||||
opts (d/without-nils opts)]
|
||||
@@ -101,12 +96,13 @@
|
||||
(some-> (discover-oidc-config cfg opts)
|
||||
(merge opts {:discover? true}))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::providers/generic [_]
|
||||
(s/keys :req [::http/client]))
|
||||
|
||||
(defmethod ig/init-key ::providers/generic
|
||||
(defmethod ig/prep-key ::generic-provider
|
||||
[_ cfg]
|
||||
(when (contains? cf/flags :login-with-oidc)
|
||||
(d/without-nils cfg))
|
||||
|
||||
(defmethod ig/init-key ::generic-provider
|
||||
[_ cfg]
|
||||
(when (:enabled? cfg)
|
||||
(if-let [opts (prepare-oidc-opts cfg)]
|
||||
(do
|
||||
(l/info :hint "provider initialized"
|
||||
@@ -114,10 +110,10 @@
|
||||
:method (if (:discover? opts) "discover" "manual")
|
||||
:client-id (:client-id opts)
|
||||
:client-secret (obfuscate-string (:client-secret opts))
|
||||
:scopes (str/join "," (:scopes opts))
|
||||
:auth-uri (:auth-uri opts)
|
||||
:user-uri (:user-uri opts)
|
||||
:token-uri (:token-uri opts)
|
||||
:scopes (str/join "," (:scopes opts))
|
||||
:auth-uri (:auth-uri opts)
|
||||
:user-uri (:user-uri opts)
|
||||
:token-uri (:token-uri opts)
|
||||
:roles-attr (:roles-attr opts)
|
||||
:roles (:roles opts))
|
||||
opts)
|
||||
@@ -129,17 +125,21 @@
|
||||
;; GOOGLE AUTH PROVIDER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/init-key ::providers/google
|
||||
[_ _]
|
||||
(let [opts {:client-id (cf/get :google-client-id)
|
||||
:client-secret (cf/get :google-client-secret)
|
||||
(defmethod ig/prep-key ::google-provider
|
||||
[_ cfg]
|
||||
(d/without-nils cfg))
|
||||
|
||||
(defmethod ig/init-key ::google-provider
|
||||
[_ cfg]
|
||||
(let [opts {:client-id (:client-id cfg)
|
||||
:client-secret (:client-secret cfg)
|
||||
:scopes #{"openid" "email" "profile"}
|
||||
:auth-uri "https://accounts.google.com/o/oauth2/v2/auth"
|
||||
:token-uri "https://oauth2.googleapis.com/token"
|
||||
:user-uri "https://openidconnect.googleapis.com/v1/userinfo"
|
||||
:name "google"}]
|
||||
|
||||
(when (contains? cf/flags :login-with-google)
|
||||
(when (:enabled? cfg)
|
||||
(if (and (string? (:client-id opts))
|
||||
(string? (:client-secret opts)))
|
||||
(do
|
||||
@@ -158,29 +158,29 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- retrieve-github-email
|
||||
[cfg tdata info]
|
||||
[{:keys [http-client]} tdata info]
|
||||
(or (some-> info :email p/resolved)
|
||||
(->> (http/req! cfg
|
||||
{:uri "https://api.github.com/user/emails"
|
||||
:headers {"Authorization" (dm/str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get})
|
||||
(p/map (fn [{:keys [status body] :as response}]
|
||||
(-> (http/req! http-client {:uri "https://api.github.com/user/emails"
|
||||
:headers {"Authorization" (dm/str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get})
|
||||
(p/then (fn [{:keys [status body] :as response}]
|
||||
(when-not (s/int-in-range? 200 300 status)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-github-emails
|
||||
:hint "unable to retrieve github emails"
|
||||
:http-status status
|
||||
:http-body body))
|
||||
(->> response :body json/decode (filter :primary) first :email))))))
|
||||
(->> response :body json/read (filter :primary) first :email))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::providers/github [_]
|
||||
(s/keys :req [::http/client]))
|
||||
|
||||
(defmethod ig/init-key ::providers/github
|
||||
(defmethod ig/prep-key ::github-provider
|
||||
[_ cfg]
|
||||
(let [opts {:client-id (cf/get :github-client-id)
|
||||
:client-secret (cf/get :github-client-secret)
|
||||
(d/without-nils cfg))
|
||||
|
||||
(defmethod ig/init-key ::github-provider
|
||||
[_ cfg]
|
||||
(let [opts {:client-id (:client-id cfg)
|
||||
:client-secret (:client-secret cfg)
|
||||
:scopes #{"read:user" "user:email"}
|
||||
:auth-uri "https://github.com/login/oauth/authorize"
|
||||
:token-uri "https://github.com/login/oauth/access_token"
|
||||
@@ -191,7 +191,7 @@
|
||||
;; retrieve emails.
|
||||
:get-email-fn (partial retrieve-github-email cfg)}]
|
||||
|
||||
(when (contains? cf/flags :login-with-github)
|
||||
(when (:enabled? cfg)
|
||||
(if (and (string? (:client-id opts))
|
||||
(string? (:client-secret opts)))
|
||||
(do
|
||||
@@ -209,18 +209,22 @@
|
||||
;; GITLAB AUTH PROVIDER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/init-key ::providers/gitlab
|
||||
[_ _]
|
||||
(let [base (cf/get :gitlab-base-uri "https://gitlab.com")
|
||||
(defmethod ig/prep-key ::gitlab-provider
|
||||
[_ cfg]
|
||||
(d/without-nils cfg))
|
||||
|
||||
(defmethod ig/init-key ::gitlab-provider
|
||||
[_ cfg]
|
||||
(let [base (:base-uri cfg "https://gitlab.com")
|
||||
opts {:base-uri base
|
||||
:client-id (cf/get :gitlab-client-id)
|
||||
:client-secret (cf/get :gitlab-client-secret)
|
||||
:client-id (:client-id cfg)
|
||||
:client-secret (:client-secret cfg)
|
||||
:scopes #{"openid" "profile" "email"}
|
||||
:auth-uri (str base "/oauth/authorize")
|
||||
:token-uri (str base "/oauth/token")
|
||||
:user-uri (str base "/oauth/userinfo")
|
||||
:name "gitlab"}]
|
||||
(when (contains? cf/flags :login-with-gitlab)
|
||||
(when (:enabled? cfg)
|
||||
(if (and (string? (:client-id opts))
|
||||
(string? (:client-secret opts)))
|
||||
(do
|
||||
@@ -241,7 +245,7 @@
|
||||
|
||||
(defn- build-redirect-uri
|
||||
[{:keys [provider] :as cfg}]
|
||||
(let [public (u/uri (cf/get :public-uri))]
|
||||
(let [public (u/uri (:public-uri cfg))]
|
||||
(str (assoc public :path (str "/api/auth/oauth/" (:name provider) "/callback")))))
|
||||
|
||||
(defn- build-auth-uri
|
||||
@@ -264,7 +268,7 @@
|
||||
props))
|
||||
|
||||
(defn retrieve-access-token
|
||||
[{:keys [provider] :as cfg} code]
|
||||
[{:keys [provider http-client] :as cfg} code]
|
||||
(let [params {:client_id (:client-id provider)
|
||||
:client_secret (:client-secret provider)
|
||||
:code code
|
||||
@@ -275,25 +279,25 @@
|
||||
"accept" "application/json"}
|
||||
:uri (:token-uri provider)
|
||||
:body (u/map->query-string params)}]
|
||||
(->> (http/req! cfg req)
|
||||
(p/map (fn [{:keys [status body] :as res}]
|
||||
(if (= status 200)
|
||||
(let [data (json/decode body)]
|
||||
{:token (get data :access_token)
|
||||
:type (get data :token_type)})
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-token
|
||||
:http-status status
|
||||
:http-body body)))))))
|
||||
(p/then
|
||||
(http/req! http-client req)
|
||||
(fn [{:keys [status body] :as res}]
|
||||
(if (= status 200)
|
||||
(let [data (json/read body)]
|
||||
{:token (get data :access_token)
|
||||
:type (get data :token_type)})
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-token
|
||||
:http-status status
|
||||
:http-body body))))))
|
||||
|
||||
(defn- retrieve-user-info
|
||||
[{:keys [provider] :as cfg} tdata]
|
||||
[{:keys [provider http-client] :as cfg} tdata]
|
||||
(letfn [(retrieve []
|
||||
(http/req! cfg
|
||||
{:uri (:user-uri provider)
|
||||
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get}))
|
||||
(http/req! http-client {:uri (:user-uri provider)
|
||||
:headers {"Authorization" (str (:type tdata) " " (:token tdata))}
|
||||
:timeout 6000
|
||||
:method :get}))
|
||||
(validate-response [response]
|
||||
(when-not (s/int-in-range? 200 300 (:status response))
|
||||
(ex/raise :type :internal
|
||||
@@ -316,7 +320,7 @@
|
||||
(get info attr-kw)))
|
||||
|
||||
(process-response [response]
|
||||
(p/let [info (-> response :body json/decode)
|
||||
(p/let [info (-> response :body json/read)
|
||||
email (get-email info)]
|
||||
{:backend (:name provider)
|
||||
:email email
|
||||
@@ -350,7 +354,7 @@
|
||||
::props]))
|
||||
|
||||
(defn retrieve-info
|
||||
[{:keys [provider] :as cfg} {:keys [params] :as request}]
|
||||
[{:keys [sprops provider] :as cfg} {:keys [params] :as request}]
|
||||
(letfn [(validate-oidc [info]
|
||||
;; If the provider is OIDC, we can proceed to check
|
||||
;; roles if they are defined.
|
||||
@@ -389,7 +393,7 @@
|
||||
|
||||
(let [state (get params :state)
|
||||
code (get params :code)
|
||||
state (tokens/verify (::main/props cfg) {:token state :iss :oauth})]
|
||||
state (tokens/verify sprops {:token state :iss :oauth})]
|
||||
(-> (p/resolved code)
|
||||
(p/then #(retrieve-access-token cfg %))
|
||||
(p/then #(retrieve-user-info cfg %))
|
||||
@@ -397,7 +401,7 @@
|
||||
(p/then' (partial post-process state))))))
|
||||
|
||||
(defn- retrieve-profile
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} info]
|
||||
[{:keys [pool executor] :as cfg} info]
|
||||
(px/with-dispatch executor
|
||||
(with-open [conn (db/open pool)]
|
||||
(some->> (:email info)
|
||||
@@ -410,23 +414,23 @@
|
||||
(yrs/response :status 302 :headers {"location" (str uri)}))
|
||||
|
||||
(defn- generate-error-redirect
|
||||
[_ error]
|
||||
(let [uri (-> (u/uri (cf/get :public-uri))
|
||||
[cfg error]
|
||||
(let [uri (-> (u/uri (:public-uri cfg))
|
||||
(assoc :path "/#/auth/login")
|
||||
(assoc :query (u/map->query-string {:error "unable-to-auth" :hint (ex-message error)})))]
|
||||
(redirect-response uri)))
|
||||
|
||||
(defn- generate-redirect
|
||||
[{:keys [::session/session] :as cfg} request info profile]
|
||||
[{:keys [sprops session audit] :as cfg} request info profile]
|
||||
(if profile
|
||||
(let [sxf (session/create-fn session (:id profile))
|
||||
(let [sxf ((:create session) (:id profile))
|
||||
token (or (:invitation-token info)
|
||||
(tokens/generate (::main/props cfg)
|
||||
{:iss :auth
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id (:id profile)}))
|
||||
(tokens/generate sprops {:iss :auth
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id (:id profile)}))
|
||||
params {:token token}
|
||||
uri (-> (u/uri (cf/get :public-uri))
|
||||
|
||||
uri (-> (u/uri (:public-uri cfg))
|
||||
(assoc :path "/#/auth/verify-token")
|
||||
(assoc :query (u/map->query-string params)))]
|
||||
|
||||
@@ -434,12 +438,13 @@
|
||||
(ex/raise :type :restriction
|
||||
:code :profile-blocked))
|
||||
|
||||
(when-let [collector (::audit/collector cfg)]
|
||||
(audit/submit! collector {:type "command"
|
||||
:name "login"
|
||||
:profile-id (:id profile)
|
||||
:ip-addr (audit/parse-client-ip request)
|
||||
:props (audit/profile->props profile)}))
|
||||
(when (fn? audit)
|
||||
(audit :cmd :submit
|
||||
:type "command"
|
||||
:name "login"
|
||||
:profile-id (:id profile)
|
||||
:ip-addr (audit/parse-client-ip request)
|
||||
:props (audit/profile->props profile)))
|
||||
|
||||
(->> (redirect-response uri)
|
||||
(sxf request)))
|
||||
@@ -448,19 +453,19 @@
|
||||
:iss :prepared-register
|
||||
:is-active true
|
||||
:exp (dt/in-future {:hours 48}))
|
||||
token (tokens/generate (::main/props cfg) info)
|
||||
token (tokens/generate sprops info)
|
||||
params (d/without-nils
|
||||
{:token token
|
||||
:fullname (:fullname info)})
|
||||
uri (-> (u/uri (cf/get :public-uri))
|
||||
uri (-> (u/uri (:public-uri cfg))
|
||||
(assoc :path "/#/auth/register/validate")
|
||||
(assoc :query (u/map->query-string params)))]
|
||||
(redirect-response uri))))
|
||||
|
||||
(defn- auth-handler
|
||||
[cfg {:keys [params] :as request}]
|
||||
[{:keys [sprops] :as cfg} {:keys [params] :as request}]
|
||||
(let [props (audit/extract-utm-params params)
|
||||
state (tokens/generate (::main/props cfg)
|
||||
state (tokens/generate sprops
|
||||
{:iss :oauth
|
||||
:invitation-token (:invitation-token params)
|
||||
:props props
|
||||
@@ -486,7 +491,7 @@
|
||||
{:compile
|
||||
(fn [& _]
|
||||
(fn [handler]
|
||||
(fn [{:keys [::providers] :as cfg} request]
|
||||
(fn [{:keys [providers] :as cfg} request]
|
||||
(let [provider (some-> request :path-params :provider keyword)]
|
||||
(if-let [provider (get providers provider)]
|
||||
(handler (assoc cfg :provider provider) request)
|
||||
@@ -495,57 +500,44 @@
|
||||
:provider provider
|
||||
:hint "provider not configured"))))))})
|
||||
|
||||
|
||||
(s/def ::client-id ::cf/oidc-client-id)
|
||||
(s/def ::client-secret ::cf/oidc-client-secret)
|
||||
(s/def ::base-uri ::cf/oidc-base-uri)
|
||||
(s/def ::token-uri ::cf/oidc-token-uri)
|
||||
(s/def ::auth-uri ::cf/oidc-auth-uri)
|
||||
(s/def ::user-uri ::cf/oidc-user-uri)
|
||||
(s/def ::scopes ::cf/oidc-scopes)
|
||||
(s/def ::roles ::cf/oidc-roles)
|
||||
(s/def ::roles-attr ::cf/oidc-roles-attr)
|
||||
(s/def ::email-attr ::cf/oidc-email-attr)
|
||||
(s/def ::name-attr ::cf/oidc-name-attr)
|
||||
|
||||
;; FIXME: migrate to qualified-keywords
|
||||
(s/def ::provider
|
||||
(s/keys :req-un [::client-id
|
||||
::client-secret]
|
||||
:opt-un [::base-uri
|
||||
::token-uri
|
||||
::auth-uri
|
||||
::user-uri
|
||||
::scopes
|
||||
::roles
|
||||
::roles-attr
|
||||
::email-attr
|
||||
::name-attr]))
|
||||
|
||||
(s/def ::providers (s/map-of ::us/keyword (s/nilable ::provider)))
|
||||
(s/def ::public-uri ::us/not-empty-string)
|
||||
(s/def ::http-client ::http/client)
|
||||
(s/def ::session map?)
|
||||
(s/def ::sprops map?)
|
||||
(s/def ::providers map?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes
|
||||
[_]
|
||||
(s/keys :req [::http/client
|
||||
::wrk/executor
|
||||
::main/props
|
||||
::db/pool
|
||||
::providers
|
||||
::session/session]))
|
||||
(s/keys :req-un [::public-uri
|
||||
::session
|
||||
::sprops
|
||||
::http-client
|
||||
::providers
|
||||
::db/pool
|
||||
::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::wrk/executor ::session/session] :as cfg}]
|
||||
[_ {:keys [executor session] :as cfg}]
|
||||
(let [cfg (update cfg :provider d/without-nils)]
|
||||
["" {:middleware [[(:middleware session)]
|
||||
[hmw/with-dispatch executor]
|
||||
[hmw/with-config cfg]
|
||||
[provider-lookup]
|
||||
]}
|
||||
;; We maintain the both URI prefixes for backward compatibility.
|
||||
|
||||
["/auth/oauth"
|
||||
["/:provider"
|
||||
{:handler auth-handler
|
||||
:allowed-methods #{:post}}]
|
||||
["/:provider/callback"
|
||||
{:handler callback-handler
|
||||
:allowed-methods #{:get}}]]]))
|
||||
:allowed-methods #{:get}}]]
|
||||
|
||||
["/auth/oidc"
|
||||
["/:provider"
|
||||
{:handler auth-handler
|
||||
:allowed-methods #{:post}}]
|
||||
["/:provider/callback"
|
||||
{:handler callback-handler
|
||||
:allowed-methods #{:get}}]]]))
|
||||
|
||||
@@ -27,10 +27,6 @@
|
||||
clojure.lang.IRecord
|
||||
clojure.lang.IDeref)
|
||||
|
||||
(prefer-method print-method
|
||||
clojure.lang.IPersistentMap
|
||||
clojure.lang.IDeref)
|
||||
|
||||
(prefer-method pprint/simple-dispatch
|
||||
clojure.lang.IPersistentMap
|
||||
clojure.lang.IDeref)
|
||||
@@ -50,11 +46,9 @@
|
||||
:database-username "penpot"
|
||||
:database-password "penpot"
|
||||
|
||||
:default-blob-version 5
|
||||
:default-blob-version 4
|
||||
:loggers-zmq-uri "tcp://localhost:45556"
|
||||
|
||||
:rpc-rlimit-config (fs/path "resources/rlimit.edn")
|
||||
:rpc-climit-config (fs/path "resources/climit.edn")
|
||||
|
||||
:file-change-snapshot-every 5
|
||||
:file-change-snapshot-timeout "3h"
|
||||
@@ -92,7 +86,6 @@
|
||||
|
||||
(s/def ::default-rpc-rlimit ::us/vector-of-strings)
|
||||
(s/def ::rpc-rlimit-config ::fs/path)
|
||||
(s/def ::rpc-climit-config ::fs/path)
|
||||
|
||||
(s/def ::media-max-file-size ::us/integer)
|
||||
|
||||
@@ -100,17 +93,13 @@
|
||||
(s/def ::telemetry-enabled ::us/boolean)
|
||||
|
||||
(s/def ::audit-log-archive-uri ::us/string)
|
||||
(s/def ::audit-log-http-handler-concurrency ::us/integer)
|
||||
|
||||
(s/def ::admins ::us/set-of-strings)
|
||||
(s/def ::file-change-snapshot-every ::us/integer)
|
||||
(s/def ::file-change-snapshot-timeout ::dt/duration)
|
||||
|
||||
(s/def ::default-executor-parallelism ::us/integer)
|
||||
(s/def ::scheduled-executor-parallelism ::us/integer)
|
||||
|
||||
(s/def ::worker-default-parallelism ::us/integer)
|
||||
(s/def ::worker-webhook-parallelism ::us/integer)
|
||||
(s/def ::worker-executor-parallelism ::us/integer)
|
||||
|
||||
(s/def ::authenticated-cookie-domain ::us/string)
|
||||
(s/def ::authenticated-cookie-name ::us/string)
|
||||
@@ -155,6 +144,7 @@
|
||||
(s/def ::http-server-max-multipart-body-size ::us/integer)
|
||||
(s/def ::http-server-io-threads ::us/integer)
|
||||
(s/def ::http-server-worker-threads ::us/integer)
|
||||
(s/def ::initial-project-skey ::us/string)
|
||||
(s/def ::ldap-attrs-email ::us/string)
|
||||
(s/def ::ldap-attrs-fullname ::us/string)
|
||||
(s/def ::ldap-attrs-username ::us/string)
|
||||
@@ -178,6 +168,11 @@
|
||||
(s/def ::redis-uri ::us/string)
|
||||
(s/def ::registration-domain-whitelist ::us/set-of-strings)
|
||||
|
||||
(s/def ::semaphore-process-font ::us/integer)
|
||||
(s/def ::semaphore-process-image ::us/integer)
|
||||
(s/def ::semaphore-update-file ::us/integer)
|
||||
(s/def ::semaphore-auth ::us/integer)
|
||||
|
||||
(s/def ::smtp-default-from ::us/string)
|
||||
(s/def ::smtp-default-reply-to ::us/string)
|
||||
(s/def ::smtp-host ::us/string)
|
||||
@@ -208,7 +203,6 @@
|
||||
::admins
|
||||
::allow-demo-users
|
||||
::audit-log-archive-uri
|
||||
::audit-log-http-handler-concurrency
|
||||
::auth-token-cookie-name
|
||||
::auth-token-cookie-max-age
|
||||
::authenticated-cookie-name
|
||||
@@ -223,9 +217,7 @@
|
||||
::default-rpc-rlimit
|
||||
::error-report-webhook
|
||||
::default-executor-parallelism
|
||||
::scheduled-executor-parallelism
|
||||
::worker-default-parallelism
|
||||
::worker-webhook-parallelism
|
||||
::worker-executor-parallelism
|
||||
::file-change-snapshot-every
|
||||
::file-change-snapshot-timeout
|
||||
::user-feedback-destination
|
||||
@@ -254,6 +246,7 @@
|
||||
::http-server-max-multipart-body-size
|
||||
::http-server-io-threads
|
||||
::http-server-worker-threads
|
||||
::initial-project-skey
|
||||
::ldap-attrs-email
|
||||
::ldap-attrs-fullname
|
||||
::ldap-attrs-username
|
||||
@@ -345,8 +338,7 @@
|
||||
(when (ex/ex-info? e)
|
||||
(println ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;")
|
||||
(println "Error on validating configuration:")
|
||||
(println (some-> e ex-data ex/explain))
|
||||
(println (ex/explain (ex-data e)))
|
||||
(println (us/pretty-explain (ex-data e)))
|
||||
(println ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"))
|
||||
(throw e))))
|
||||
|
||||
|
||||
@@ -296,7 +296,6 @@
|
||||
(let [row (get* ds table params opts)]
|
||||
(when (and (not row) check-deleted?)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:table table
|
||||
:hint "database object not found"))
|
||||
row)))
|
||||
@@ -309,7 +308,6 @@
|
||||
(let [row (get* ds table params (assoc opts :check-deleted? check-not-found))]
|
||||
(when (and (not row) check-not-found)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:table table
|
||||
:hint "database object not found"))
|
||||
row)))
|
||||
@@ -354,13 +352,10 @@
|
||||
[v]
|
||||
(and (pgarray? v) (= "uuid" (.getBaseTypeName ^PgArray v))))
|
||||
|
||||
;; TODO rename to decode-pgarray-into
|
||||
(defn decode-pgarray
|
||||
([v] (decode-pgarray v []))
|
||||
([v in]
|
||||
(into in (some-> ^PgArray v .getArray)))
|
||||
([v in xf]
|
||||
(into in xf (some-> ^PgArray v .getArray))))
|
||||
([v] (some->> ^PgArray v .getArray vec))
|
||||
([v in] (some->> ^PgArray v .getArray (into in)))
|
||||
([v in xf] (some->> ^PgArray v .getArray (into in xf))))
|
||||
|
||||
(defn pgarray->set
|
||||
[v]
|
||||
@@ -422,53 +417,47 @@
|
||||
|
||||
(defn decode-json-pgobject
|
||||
[^PGobject o]
|
||||
(when o
|
||||
(let [typ (.getType o)
|
||||
val (.getValue o)]
|
||||
(if (or (= typ "json")
|
||||
(= typ "jsonb"))
|
||||
(json/decode val)
|
||||
val))))
|
||||
(let [typ (.getType o)
|
||||
val (.getValue o)]
|
||||
(if (or (= typ "json")
|
||||
(= typ "jsonb"))
|
||||
(json/read val)
|
||||
val)))
|
||||
|
||||
(defn decode-transit-pgobject
|
||||
[^PGobject o]
|
||||
(when o
|
||||
(let [typ (.getType o)
|
||||
val (.getValue o)]
|
||||
(if (or (= typ "json")
|
||||
(= typ "jsonb"))
|
||||
(t/decode-str val)
|
||||
val))))
|
||||
(let [typ (.getType o)
|
||||
val (.getValue o)]
|
||||
(if (or (= typ "json")
|
||||
(= typ "jsonb"))
|
||||
(t/decode-str val)
|
||||
val)))
|
||||
|
||||
(defn inet
|
||||
[ip-addr]
|
||||
(when ip-addr
|
||||
(doto (org.postgresql.util.PGobject.)
|
||||
(.setType "inet")
|
||||
(.setValue (str ip-addr)))))
|
||||
(doto (org.postgresql.util.PGobject.)
|
||||
(.setType "inet")
|
||||
(.setValue (str ip-addr))))
|
||||
|
||||
(defn decode-inet
|
||||
[^PGobject o]
|
||||
(when o
|
||||
(if (= "inet" (.getType o))
|
||||
(.getValue o)
|
||||
nil)))
|
||||
(if (= "inet" (.getType o))
|
||||
(.getValue o)
|
||||
nil))
|
||||
|
||||
(defn tjson
|
||||
"Encode as transit json."
|
||||
[data]
|
||||
(when data
|
||||
(doto (org.postgresql.util.PGobject.)
|
||||
(.setType "jsonb")
|
||||
(.setValue (t/encode-str data {:type :json-verbose})))))
|
||||
(doto (org.postgresql.util.PGobject.)
|
||||
(.setType "jsonb")
|
||||
(.setValue (t/encode-str data {:type :json-verbose}))))
|
||||
|
||||
(defn json
|
||||
"Encode as plain json."
|
||||
[data]
|
||||
(when data
|
||||
(doto (org.postgresql.util.PGobject.)
|
||||
(.setType "jsonb")
|
||||
(.setValue (json/encode-str data)))))
|
||||
(doto (org.postgresql.util.PGobject.)
|
||||
(.setType "jsonb")
|
||||
(.setValue (json/write-str data))))
|
||||
|
||||
;; --- Locks
|
||||
|
||||
@@ -499,18 +488,3 @@
|
||||
(let [n (xact-check-param n)
|
||||
row (exec-one! conn ["select pg_try_advisory_xact_lock(?::bigint) as lock" n])]
|
||||
(:lock row)))
|
||||
|
||||
(defn sql-exception?
|
||||
[cause]
|
||||
(instance? java.sql.SQLException cause))
|
||||
|
||||
(defn connection-error?
|
||||
[cause]
|
||||
(and (sql-exception? cause)
|
||||
(contains? #{"08003" "08006" "08001" "08004"}
|
||||
(.getSQLState ^java.sql.SQLException cause))))
|
||||
|
||||
(defn serialization-error?
|
||||
[cause]
|
||||
(and (sql-exception? cause)
|
||||
(= "40001" (.getSQLState ^java.sql.SQLException cause))))
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
[app.common.transit :as t]
|
||||
[app.http.errors :as errors]
|
||||
[app.http.middleware :as mw]
|
||||
[app.http.session :as session]
|
||||
[app.metrics :as mtx]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -116,6 +115,7 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::assets map?)
|
||||
(s/def ::audit-handler fn?)
|
||||
(s/def ::awsns-handler fn?)
|
||||
(s/def ::debug-routes (s/nilable vector?))
|
||||
(s/def ::doc-routes (s/nilable vector?))
|
||||
@@ -123,7 +123,7 @@
|
||||
(s/def ::oauth map?)
|
||||
(s/def ::oidc-routes (s/nilable vector?))
|
||||
(s/def ::rpc-routes (s/nilable vector?))
|
||||
(s/def ::session ::session/session)
|
||||
(s/def ::session map?)
|
||||
(s/def ::storage map?)
|
||||
(s/def ::ws fn?)
|
||||
|
||||
@@ -137,6 +137,7 @@
|
||||
::awsns-handler
|
||||
::debug-routes
|
||||
::oidc-routes
|
||||
::audit-handler
|
||||
::rpc-routes
|
||||
::doc-routes]))
|
||||
|
||||
@@ -147,14 +148,13 @@
|
||||
[mw/format-response]
|
||||
[mw/params]
|
||||
[mw/parse-request]
|
||||
[session/middleware-1 session]
|
||||
[mw/errors errors/handle]
|
||||
[mw/restrict-methods]]}
|
||||
|
||||
["/metrics" {:handler (::mtx/handler metrics)
|
||||
:allowed-methods #{:get}}]
|
||||
|
||||
["/assets" {:middleware [[session/middleware-2 session]]}
|
||||
["/assets" {:middleware [(:middleware session)]}
|
||||
["/by-id/:id" {:handler (:objects-handler assets)}]
|
||||
["/by-file-media-id/:id" {:handler (:file-objects-handler assets)}]
|
||||
["/by-file-media-id/:id/thumbnail" {:handler (:file-thumbnails-handler assets)}]]
|
||||
@@ -165,12 +165,14 @@
|
||||
["/sns" {:handler (:awsns-handler cfg)
|
||||
:allowed-methods #{:post}}]]
|
||||
|
||||
["/ws/notifications" {:middleware [[session/middleware-2 session]]
|
||||
["/ws/notifications" {:middleware [(:middleware session)]
|
||||
:handler ws
|
||||
:allowed-methods #{:get}}]
|
||||
|
||||
["/api" {:middleware [[mw/cors]
|
||||
[session/middleware-2 session]]}
|
||||
[(:middleware session)]]}
|
||||
["/audit/events" {:handler (:audit-handler cfg)
|
||||
:allowed-methods #{:post}}]
|
||||
["/feedback" {:handler feedback
|
||||
:allowed-methods #{:post}}]
|
||||
(:doc-routes cfg)
|
||||
|
||||
@@ -12,9 +12,7 @@
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http.client :as http]
|
||||
[app.main :as-alias main]
|
||||
[app.tokens :as tokens]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
@@ -28,21 +26,21 @@
|
||||
(declare parse-notification)
|
||||
(declare process-report)
|
||||
|
||||
(s/def ::http-client ::http/client)
|
||||
(s/def ::sprops map?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::http/client
|
||||
::main/props
|
||||
::db/pool
|
||||
::wrk/executor]))
|
||||
(s/keys :req-un [::db/pool ::http-client ::sprops]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [::wrk/executor] :as cfg}]
|
||||
[_ {:keys [executor] :as cfg}]
|
||||
(fn [request respond _]
|
||||
(let [data (-> request yrq/body slurp)]
|
||||
(px/run! executor #(handle-request cfg data)))
|
||||
(respond (yrs/response 200))))
|
||||
|
||||
(defn handle-request
|
||||
[cfg data]
|
||||
[{:keys [http-client] :as cfg} data]
|
||||
(try
|
||||
(let [body (parse-json data)
|
||||
mtype (get body "Type")]
|
||||
@@ -51,7 +49,7 @@
|
||||
(let [surl (get body "SubscribeURL")
|
||||
stopic (get body "TopicArn")]
|
||||
(l/info :action "subscription received" :topic stopic :url surl)
|
||||
(http/req! cfg {:uri surl :method :post :timeout 10000} {:sync? true}))
|
||||
(http/req! http-client {:uri surl :method :post :timeout 10000} {:sync? true}))
|
||||
|
||||
(= mtype "Notification")
|
||||
(when-let [message (parse-json (get body "Message"))]
|
||||
@@ -102,11 +100,10 @@
|
||||
(get mail "headers")))
|
||||
|
||||
(defn- extract-identity
|
||||
[cfg headers]
|
||||
[{:keys [sprops]} headers]
|
||||
(let [tdata (get headers "x-penpot-data")]
|
||||
(when-not (str/empty? tdata)
|
||||
(let [sprops (::main/props cfg)
|
||||
result (tokens/verify sprops {:token tdata :iss :profile-identity})]
|
||||
(let [result (tokens/verify sprops {:token tdata :iss :profile-identity})]
|
||||
(:profile-id result)))))
|
||||
|
||||
(defn- parse-notification
|
||||
@@ -139,7 +136,7 @@
|
||||
(j/read-value v)))
|
||||
|
||||
(defn- register-bounce-for-profile
|
||||
[{:keys [::db/pool]} {:keys [type kind profile-id] :as report}]
|
||||
[{:keys [pool]} {:keys [type kind profile-id] :as report}]
|
||||
(when (= kind "permanent")
|
||||
(db/with-atomic [conn pool]
|
||||
(db/insert! conn :profile-complaint-report
|
||||
@@ -168,7 +165,7 @@
|
||||
{:id profile-id}))))))
|
||||
|
||||
(defn- register-complaint-for-profile
|
||||
[{:keys [::db/pool]} {:keys [type profile-id] :as report}]
|
||||
[{:keys [pool]} {:keys [type profile-id] :as report}]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/insert! conn :profile-complaint-report
|
||||
{:profile-id profile-id
|
||||
|
||||
@@ -7,41 +7,34 @@
|
||||
(ns app.http.client
|
||||
"Http client abstraction layer."
|
||||
(:require
|
||||
[app.common.spec :as us]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[java-http-clj.core :as http])
|
||||
(:import
|
||||
java.net.http.HttpClient))
|
||||
[java-http-clj.core :as http]))
|
||||
|
||||
(s/def ::client #(instance? HttpClient %))
|
||||
(s/def ::client-holder
|
||||
(s/keys :req [::client]))
|
||||
(s/def ::client fn?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::client [_]
|
||||
(s/keys :req [::wrk/executor]))
|
||||
(defmethod ig/pre-init-spec :app.http/client [_]
|
||||
(s/keys :req-un [::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::client
|
||||
[_ {:keys [::wrk/executor] :as cfg}]
|
||||
(http/build-client {:executor executor
|
||||
:connect-timeout 30000 ;; 10s
|
||||
:follow-redirects :always}))
|
||||
|
||||
(defn send!
|
||||
([client req] (send! client req {}))
|
||||
([client req {:keys [response-type sync?] :or {response-type :string sync? false}}]
|
||||
(us/assert! ::client client)
|
||||
(if sync?
|
||||
(http/send req {:client client :as response-type})
|
||||
(http/send-async req {:client client :as response-type}))))
|
||||
(defmethod ig/init-key :app.http/client
|
||||
[_ {:keys [executor] :as cfg}]
|
||||
(let [client (http/build-client {:executor executor
|
||||
:connect-timeout 30000 ;; 10s
|
||||
:follow-redirects :always})]
|
||||
(with-meta
|
||||
(fn send
|
||||
([req] (send req {}))
|
||||
([req {:keys [response-type sync?] :or {response-type :string sync? false}}]
|
||||
(if sync?
|
||||
(http/send req {:client client :as response-type})
|
||||
(http/send-async req {:client client :as response-type}))))
|
||||
{::client client})))
|
||||
|
||||
(defn req!
|
||||
"A convencience toplevel function for gradual migration to a new API
|
||||
convention."
|
||||
([{:keys [::client] :as holder} request]
|
||||
(us/assert! ::client-holder holder)
|
||||
(send! client request {}))
|
||||
([{:keys [::client] :as holder} request options]
|
||||
(us/assert! ::client-holder holder)
|
||||
(send! client request options)))
|
||||
([client request]
|
||||
(client request))
|
||||
([client request options]
|
||||
(client request options)))
|
||||
|
||||
@@ -14,9 +14,8 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.middleware :as mw]
|
||||
[app.http.session :as session]
|
||||
[app.rpc.commands.binfile :as binf]
|
||||
[app.rpc.commands.files.create :refer [create-file]]
|
||||
[app.rpc.mutations.files :refer [create-file]]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.template :as tmpl]
|
||||
@@ -244,19 +243,15 @@
|
||||
(yrs/response 404 "not found")))))
|
||||
|
||||
(def sql:error-reports
|
||||
"SELECT id, created_at,
|
||||
content->>'~:hint' AS hint
|
||||
FROM server_error_report
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 100")
|
||||
"select id, created_at from server_error_report order by created_at desc limit 100")
|
||||
|
||||
(defn error-list-handler
|
||||
[{:keys [pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
(let [items (->> (db/exec! pool [sql:error-reports])
|
||||
(map #(update % :created-at dt/format-instant :rfc1123)))]
|
||||
(let [items (db/exec! pool [sql:error-reports])
|
||||
items (map #(update % :created-at dt/format-instant :rfc1123) items)]
|
||||
(yrs/response :status 200
|
||||
:body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))
|
||||
@@ -382,15 +377,17 @@
|
||||
:code :only-admins-allowed))))))})
|
||||
|
||||
|
||||
(s/def ::session map?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req-un [::db/pool ::wrk/executor ::session/session]))
|
||||
(s/keys :req-un [::db/pool ::wrk/executor ::session]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [session pool executor] :as cfg}]
|
||||
[["/readyz" {:middleware [[mw/with-dispatch executor]
|
||||
[mw/with-config cfg]]
|
||||
:handler health-handler}]
|
||||
["/dbg" {:middleware [[session/middleware-2 session]
|
||||
["/dbg" {:middleware [[(:middleware session)]
|
||||
[with-authorization pool]
|
||||
[mw/with-dispatch executor]
|
||||
[mw/with-config cfg]]}
|
||||
|
||||
@@ -7,9 +7,9 @@
|
||||
(ns app.http.errors
|
||||
"A errors handling for the http server."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.http :as-alias http]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
@@ -26,18 +26,16 @@
|
||||
|
||||
(defn get-context
|
||||
[request]
|
||||
(let [claims (:session-token-claims request)]
|
||||
(merge
|
||||
*context*
|
||||
{:path (:path request)
|
||||
:method (:method request)
|
||||
:params (:params request)
|
||||
:ip-addr (parse-client-ip request)}
|
||||
(d/without-nils
|
||||
{:user-agent (yrq/get-header request "user-agent")
|
||||
:frontend-version (or (yrq/get-header request "x-frontend-version")
|
||||
"unknown")
|
||||
:profile-id (:uid claims)}))))
|
||||
(merge
|
||||
*context*
|
||||
{:path (:path request)
|
||||
:method (:method request)
|
||||
:params (:params request)
|
||||
:ip-addr (parse-client-ip request)
|
||||
:profile-id (:profile-id request)}
|
||||
(let [headers (:headers request)]
|
||||
{:user-agent (get headers "user-agent")
|
||||
:frontend-version (get headers "x-frontend-version" "unknown")})))
|
||||
|
||||
(defmulti handle-exception
|
||||
(fn [err & _rest]
|
||||
@@ -63,7 +61,7 @@
|
||||
(let [{:keys [code] :as data} (ex-data err)]
|
||||
(cond
|
||||
(= code :spec-validation)
|
||||
(let [explain (ex/explain data)]
|
||||
(let [explain (us/pretty-explain data)]
|
||||
(yrs/response :status 400
|
||||
:body (-> data
|
||||
(dissoc ::s/problems ::s/value)
|
||||
@@ -77,11 +75,11 @@
|
||||
|
||||
(defmethod handle-exception :assertion
|
||||
[error request]
|
||||
(let [edata (ex-data error)
|
||||
explain (ex/explain edata)]
|
||||
(l/error :hint (ex-message error)
|
||||
:cause error
|
||||
::l/context (get-context request))
|
||||
(let [edata (ex-data error)
|
||||
explain (us/pretty-explain edata)]
|
||||
(l/error ::l/raw (str (ex-message error) "\n" explain)
|
||||
::l/context (get-context request)
|
||||
:cause error)
|
||||
(yrs/response :status 500
|
||||
:body {:type :server-error
|
||||
:code :assertion
|
||||
@@ -93,29 +91,12 @@
|
||||
[err _]
|
||||
(yrs/response 404 (ex-data err)))
|
||||
|
||||
(defmethod handle-exception :internal
|
||||
[error request]
|
||||
(let [{:keys [code] :as edata} (ex-data error)]
|
||||
(cond
|
||||
(= :concurrency-limit-reached code)
|
||||
(yrs/response 429)
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/error :hint (ex-message error)
|
||||
:cause error
|
||||
::l/context (get-context request))
|
||||
(yrs/response 500 {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data edata})))))
|
||||
|
||||
(defmethod handle-exception org.postgresql.util.PSQLException
|
||||
[error request]
|
||||
(let [state (.getSQLState ^java.sql.SQLException error)]
|
||||
(l/error :hint (ex-message error)
|
||||
:cause error
|
||||
::l/context (get-context request))
|
||||
(l/error ::l/raw (ex-message error)
|
||||
::l/context (get-context request)
|
||||
:cause error)
|
||||
(cond
|
||||
(= state "57014")
|
||||
(yrs/response 504 {:type :server-error
|
||||
@@ -140,9 +121,9 @@
|
||||
;; This means that exception is not a controlled exception.
|
||||
(nil? edata)
|
||||
(do
|
||||
(l/error :hint (ex-message error)
|
||||
:cause error
|
||||
::l/context (get-context request))
|
||||
(l/error ::l/raw (ex-message error)
|
||||
::l/context (get-context request)
|
||||
:cause error)
|
||||
(yrs/response 500 {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)}))
|
||||
@@ -158,9 +139,9 @@
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/error :hint (ex-message error)
|
||||
:cause error
|
||||
::l/context (get-context request))
|
||||
(l/error ::l/raw (ex-message error)
|
||||
::l/context (get-context request)
|
||||
:cause error)
|
||||
(yrs/response 500 {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs])
|
||||
(:import
|
||||
com.fasterxml.jackson.core.JsonParseException
|
||||
com.fasterxml.jackson.core.io.JsonEOFException
|
||||
io.undertow.server.RequestTooBigException
|
||||
java.io.OutputStream))
|
||||
@@ -32,12 +31,6 @@
|
||||
{:name ::params
|
||||
:compile (constantly ymw/wrap-params)})
|
||||
|
||||
(def ^:private json-mapper
|
||||
(json/mapper
|
||||
{:encode-key-fn str/camel
|
||||
:decode-key-fn (comp keyword str/kebab)
|
||||
:pretty true}))
|
||||
|
||||
(defn wrap-parse-request
|
||||
[handler]
|
||||
(letfn [(process-request [request]
|
||||
@@ -52,7 +45,7 @@
|
||||
|
||||
(str/starts-with? header "application/json")
|
||||
(with-open [is (yrq/body request)]
|
||||
(let [params (json/decode is json-mapper)]
|
||||
(let [params (json/read is)]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
(update :params merge params))))
|
||||
@@ -67,13 +60,10 @@
|
||||
:code :request-body-too-large
|
||||
:hint (ex-message cause)))
|
||||
|
||||
|
||||
(or (instance? JsonEOFException cause)
|
||||
(instance? JsonParseException cause))
|
||||
(instance? JsonEOFException cause)
|
||||
(raise (ex/error :type :validation
|
||||
:code :malformed-json
|
||||
:hint (ex-message cause)
|
||||
:cause cause))
|
||||
:hint (ex-message cause)))
|
||||
:else
|
||||
(raise cause)))]
|
||||
|
||||
@@ -123,32 +113,7 @@
|
||||
(finally
|
||||
(.close ^OutputStream output-stream))))))
|
||||
|
||||
(json-streamable-body [data]
|
||||
(reify yrs/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
|
||||
(with-open [bos (buffered-output-stream output-stream buffer-size)]
|
||||
(json/write! bos data json-mapper))
|
||||
|
||||
(catch java.io.IOException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
nil)
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unexpected error on encoding response"
|
||||
:cause cause))
|
||||
(finally
|
||||
(.close ^OutputStream output-stream))))))
|
||||
|
||||
(format-response-with-json [response _]
|
||||
(let [body (yrs/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(-> response
|
||||
(update :headers assoc "content-type" "application/json")
|
||||
(assoc :body (json-streamable-body body)))
|
||||
response)))
|
||||
|
||||
(format-response-with-transit [response request]
|
||||
(format-response [response request]
|
||||
(let [body (yrs/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(let [qs (yrq/query request)
|
||||
@@ -161,20 +126,6 @@
|
||||
(assoc :body (transit-streamable-body body opts))))
|
||||
response)))
|
||||
|
||||
(format-response [response request]
|
||||
(let [accept (yrq/get-header request "accept")]
|
||||
(cond
|
||||
(or (= accept "application/transit+json")
|
||||
(str/includes? accept "application/transit+json"))
|
||||
(format-response-with-transit response request)
|
||||
|
||||
(or (= accept "application/json")
|
||||
(str/includes? accept "application/json"))
|
||||
(format-response-with-json response request)
|
||||
|
||||
:else
|
||||
(format-response-with-transit response request))))
|
||||
|
||||
(process-response [response request]
|
||||
(cond-> response
|
||||
(map? response) (format-response request)))]
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.http.session
|
||||
(:refer-clojure :exclude [read])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
@@ -21,10 +20,6 @@
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; A default cookie name for storing the session.
|
||||
(def default-auth-token-cookie-name "auth-token")
|
||||
|
||||
@@ -38,55 +33,35 @@
|
||||
;; Default age for automatic session renewal
|
||||
(def default-renewal-max-age (dt/duration {:hours 6}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PROTOCOLS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
(defprotocol ISessionStore
|
||||
(read-session [store key])
|
||||
(write-session [store key data])
|
||||
(update-session [store data])
|
||||
(delete-session [store key]))
|
||||
|
||||
(defprotocol ISessionManager
|
||||
(read [_ key])
|
||||
(decode [_ key])
|
||||
(write! [_ key data])
|
||||
(update! [_ data])
|
||||
(delete! [_ key]))
|
||||
|
||||
(s/def ::session #(satisfies? ISessionManager %))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; STORAGE IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- prepare-session-params
|
||||
[sprops data]
|
||||
(let [profile-id (:profile-id data)
|
||||
user-agent (:user-agent data)
|
||||
created-at (or (:created-at data) (dt/now))
|
||||
token (tokens/generate sprops {:iss "authentication"
|
||||
:iat created-at
|
||||
:uid profile-id})]
|
||||
{:user-agent user-agent
|
||||
:profile-id profile-id
|
||||
:created-at created-at
|
||||
:updated-at created-at
|
||||
:id token}))
|
||||
|
||||
(defn- database-manager
|
||||
(defn- make-database-store
|
||||
[{:keys [pool sprops executor]}]
|
||||
(reify ISessionManager
|
||||
(read [_ token]
|
||||
(reify ISessionStore
|
||||
(read-session [_ token]
|
||||
(px/with-dispatch executor
|
||||
(db/exec-one! pool (sql/select :http-session {:id token}))))
|
||||
|
||||
(decode [_ token]
|
||||
(write-session [_ _ data]
|
||||
(px/with-dispatch executor
|
||||
(tokens/verify sprops {:token token :iss "authentication"})))
|
||||
(let [profile-id (:profile-id data)
|
||||
user-agent (:user-agent data)
|
||||
created-at (or (:created-at data) (dt/now))
|
||||
token (tokens/generate sprops {:iss "authentication"
|
||||
:iat created-at
|
||||
:uid profile-id})
|
||||
params {:user-agent user-agent
|
||||
:profile-id profile-id
|
||||
:created-at created-at
|
||||
:updated-at created-at
|
||||
:id token}]
|
||||
(db/insert! pool :http-session params))))
|
||||
|
||||
(write! [_ _ data]
|
||||
(px/with-dispatch executor
|
||||
(let [params (prepare-session-params sprops data)]
|
||||
(db/insert! pool :http-session params)
|
||||
params)))
|
||||
|
||||
(update! [_ data]
|
||||
(update-session [_ data]
|
||||
(let [updated-at (dt/now)]
|
||||
(px/with-dispatch executor
|
||||
(db/update! pool :http-session
|
||||
@@ -94,154 +69,83 @@
|
||||
{:id (:id data)})
|
||||
(assoc data :updated-at updated-at))))
|
||||
|
||||
(delete! [_ token]
|
||||
(delete-session [_ token]
|
||||
(px/with-dispatch executor
|
||||
(db/delete! pool :http-session {:id token})
|
||||
nil))))
|
||||
|
||||
(defn inmemory-manager
|
||||
[{:keys [sprops executor]}]
|
||||
(defn make-inmemory-store
|
||||
[{:keys [sprops]}]
|
||||
(let [cache (atom {})]
|
||||
(reify ISessionManager
|
||||
(read [_ token]
|
||||
(reify ISessionStore
|
||||
(read-session [_ token]
|
||||
(p/do (get @cache token)))
|
||||
|
||||
(decode [_ token]
|
||||
(px/with-dispatch executor
|
||||
(tokens/verify sprops {:token token :iss "authentication"})))
|
||||
|
||||
(write! [_ _ data]
|
||||
(write-session [_ _ data]
|
||||
(p/do
|
||||
(let [{:keys [token] :as params} (prepare-session-params sprops data)]
|
||||
(let [profile-id (:profile-id data)
|
||||
user-agent (:user-agent data)
|
||||
created-at (or (:created-at data) (dt/now))
|
||||
token (tokens/generate sprops {:iss "authentication"
|
||||
:iat created-at
|
||||
:uid profile-id})
|
||||
params {:user-agent user-agent
|
||||
:created-at created-at
|
||||
:updated-at created-at
|
||||
:profile-id profile-id
|
||||
:id token}]
|
||||
|
||||
(swap! cache assoc token params)
|
||||
params)))
|
||||
|
||||
(update! [_ data]
|
||||
(p/do
|
||||
(let [updated-at (dt/now)]
|
||||
(swap! cache update (:id data) assoc :updated-at updated-at)
|
||||
(assoc data :updated-at updated-at))))
|
||||
(update-session [_ data]
|
||||
(let [updated-at (dt/now)]
|
||||
(swap! cache update (:id data) assoc :updated-at updated-at)
|
||||
(assoc data :updated-at updated-at)))
|
||||
|
||||
(delete! [_ token]
|
||||
(delete-session [_ token]
|
||||
(p/do
|
||||
(swap! cache dissoc token)
|
||||
nil)))))
|
||||
|
||||
(s/def ::sprops map?)
|
||||
(defmethod ig/pre-init-spec ::manager [_]
|
||||
(defmethod ig/pre-init-spec ::store [_]
|
||||
(s/keys :req-un [::db/pool ::wrk/executor ::sprops]))
|
||||
|
||||
(defmethod ig/init-key ::manager
|
||||
(defmethod ig/init-key ::store
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(if (db/read-only? pool)
|
||||
(inmemory-manager cfg)
|
||||
(database-manager cfg)))
|
||||
(make-inmemory-store cfg)
|
||||
(make-database-store cfg)))
|
||||
|
||||
(defmethod ig/halt-key! ::manager
|
||||
(defmethod ig/halt-key! ::store
|
||||
[_ _])
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MANAGER IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare assign-auth-token-cookie)
|
||||
(declare assign-authenticated-cookie)
|
||||
(declare clear-auth-token-cookie)
|
||||
(declare clear-authenticated-cookie)
|
||||
|
||||
(defn create-fn
|
||||
[manager profile-id]
|
||||
(fn [request response]
|
||||
(let [uagent (yrq/get-header request "user-agent")
|
||||
params {:profile-id profile-id
|
||||
:user-agent uagent}]
|
||||
(-> (write! manager nil params)
|
||||
(p/then (fn [session]
|
||||
(l/trace :hint "create" :profile-id profile-id)
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session))))))))
|
||||
(defn delete-fn
|
||||
[manager]
|
||||
(letfn [(delete [{:keys [profile-id] :as request}]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (yrq/get-cookie request cname)]
|
||||
(l/trace :hint "delete" :profile-id profile-id)
|
||||
(some->> (:value cookie) (delete! manager))))]
|
||||
(fn [request response]
|
||||
(p/do
|
||||
(delete request)
|
||||
(-> response
|
||||
(assoc :status 204)
|
||||
(assoc :body nil)
|
||||
(clear-auth-token-cookie)
|
||||
(clear-authenticated-cookie))))))
|
||||
|
||||
(def middleware-1
|
||||
(letfn [(wrap-handler [manager handler request respond raise]
|
||||
(try
|
||||
(let [claims (some->> (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
(yrq/get-cookie request)
|
||||
(decode manager))
|
||||
request (cond-> request
|
||||
(some? claims)
|
||||
(assoc :session-token-claims claims))]
|
||||
(handler request respond raise))
|
||||
(catch Throwable _
|
||||
(handler request respond raise))))]
|
||||
|
||||
{:name :session-1
|
||||
:compile (fn [& _]
|
||||
(fn [handler manager]
|
||||
(partial wrap-handler manager handler)))}))
|
||||
|
||||
(def middleware-2
|
||||
(letfn [(wrap-handler [manager handler request respond raise]
|
||||
(-> (retrieve-session manager request)
|
||||
(p/finally (fn [session cause]
|
||||
(cond
|
||||
(some? cause)
|
||||
(raise cause)
|
||||
|
||||
(nil? session)
|
||||
(handler request respond raise)
|
||||
|
||||
:else
|
||||
(let [request (-> request
|
||||
(assoc :profile-id (:profile-id session))
|
||||
(assoc :session-id (:id session)))
|
||||
respond (cond-> respond
|
||||
(renew-session? session)
|
||||
(wrap-respond manager session))]
|
||||
(handler request respond raise)))))))
|
||||
|
||||
(retrieve-session [manager request]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (yrq/get-cookie request cname)]
|
||||
(some->> (:value cookie) (read manager))))
|
||||
|
||||
(renew-session? [{:keys [updated-at] :as session}]
|
||||
(and (dt/instant? updated-at)
|
||||
(let [elapsed (dt/diff updated-at (dt/now))]
|
||||
(neg? (compare default-renewal-max-age elapsed)))))
|
||||
|
||||
;; Wrap respond with session renewal code
|
||||
(wrap-respond [respond manager session]
|
||||
(fn [response]
|
||||
(p/let [session (update! manager session)]
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)
|
||||
(respond)))))]
|
||||
|
||||
{:name :session-2
|
||||
:compile (fn [& _]
|
||||
(fn [handler manager]
|
||||
(partial wrap-handler manager handler)))}))
|
||||
|
||||
;; --- IMPL
|
||||
|
||||
(defn- assign-auth-token-cookie
|
||||
(defn- create-session!
|
||||
[store profile-id user-agent]
|
||||
(let [params {:user-agent user-agent
|
||||
:profile-id profile-id}]
|
||||
(write-session store nil params)))
|
||||
|
||||
(defn- update-session!
|
||||
[store session]
|
||||
(update-session store session))
|
||||
|
||||
(defn- delete-session!
|
||||
[store {:keys [cookies] :as request}]
|
||||
(let [name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
|
||||
(when-let [token (get-in cookies [name :value])]
|
||||
(delete-session store token))))
|
||||
|
||||
(defn- retrieve-session
|
||||
[store request]
|
||||
(let [cookie-name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
|
||||
(when-let [cookie (yrq/get-cookie request cookie-name)]
|
||||
(read-session store (:value cookie)))))
|
||||
|
||||
(defn assign-auth-token-cookie
|
||||
[response {token :id updated-at :updated-at}]
|
||||
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
|
||||
created-at (or updated-at (dt/now))
|
||||
@@ -260,7 +164,7 @@
|
||||
:secure secure?}]
|
||||
(update response :cookies assoc name cookie)))
|
||||
|
||||
(defn- assign-authenticated-cookie
|
||||
(defn assign-authenticated-cookie
|
||||
[response {updated-at :updated-at}]
|
||||
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
|
||||
created-at (or updated-at (dt/now))
|
||||
@@ -281,23 +185,96 @@
|
||||
(string? domain)
|
||||
(update :cookies assoc name cookie))))
|
||||
|
||||
(defn- clear-auth-token-cookie
|
||||
(defn clear-auth-token-cookie
|
||||
[response]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
|
||||
(update response :cookies assoc cname {:path "/" :value "" :max-age -1})))
|
||||
(let [name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
|
||||
(update response :cookies assoc name {:path "/" :value "" :max-age -1})))
|
||||
|
||||
(defn- clear-authenticated-cookie
|
||||
[response]
|
||||
(let [cname (cf/get :authenticated-cookie-name default-authenticated-cookie-name)
|
||||
(let [name (cf/get :authenticated-cookie-name default-authenticated-cookie-name)
|
||||
domain (cf/get :authenticated-cookie-domain)]
|
||||
(cond-> response
|
||||
(string? domain)
|
||||
(update :cookies assoc cname {:domain domain :path "/" :value "" :max-age -1}))))
|
||||
(update :cookies assoc name {:domain domain :path "/" :value "" :max-age -1}))))
|
||||
|
||||
(defn- make-middleware
|
||||
[{:keys [store] :as cfg}]
|
||||
(letfn [;; Check if time reached for automatic session renewal
|
||||
(renew-session? [{:keys [updated-at] :as session}]
|
||||
(and (dt/instant? updated-at)
|
||||
(let [elapsed (dt/diff updated-at (dt/now))]
|
||||
(neg? (compare default-renewal-max-age elapsed)))))
|
||||
|
||||
;; Wrap respond with session renewal code
|
||||
(wrap-respond [respond session]
|
||||
(fn [response]
|
||||
(p/let [session (update-session! store session)]
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)
|
||||
(respond)))))]
|
||||
|
||||
{:name :session
|
||||
:compile (fn [& _]
|
||||
(fn [handler]
|
||||
(fn [request respond raise]
|
||||
(try
|
||||
(-> (retrieve-session store request)
|
||||
(p/finally (fn [session cause]
|
||||
(cond
|
||||
(some? cause)
|
||||
(raise cause)
|
||||
|
||||
(nil? session)
|
||||
(handler request respond raise)
|
||||
|
||||
:else
|
||||
(let [request (-> request
|
||||
(assoc :profile-id (:profile-id session))
|
||||
(assoc :session-id (:id session)))
|
||||
respond (cond-> respond
|
||||
(renew-session? session)
|
||||
(wrap-respond session))]
|
||||
(handler request respond raise))))))
|
||||
|
||||
(catch Throwable cause
|
||||
(raise cause))))))}))
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASK: SESSION GC
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; --- STATE INIT: SESSION
|
||||
|
||||
(s/def ::store #(satisfies? ISessionStore %))
|
||||
|
||||
(defmethod ig/pre-init-spec :app.http/session [_]
|
||||
(s/keys :req-un [::store]))
|
||||
|
||||
(defmethod ig/prep-key :app.http/session
|
||||
[_ cfg]
|
||||
(d/merge {:buffer-size 128}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/init-key :app.http/session
|
||||
[_ {:keys [store] :as cfg}]
|
||||
(-> cfg
|
||||
(assoc :middleware (make-middleware cfg))
|
||||
(assoc :create (fn [profile-id]
|
||||
(fn [request response]
|
||||
(p/let [uagent (yrq/get-header request "user-agent")
|
||||
session (create-session! store profile-id uagent)]
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session))))))
|
||||
(assoc :delete (fn [request response]
|
||||
(p/do
|
||||
(delete-session! store request)
|
||||
(-> response
|
||||
(assoc :status 204)
|
||||
(assoc :body nil)
|
||||
(clear-auth-token-cookie)
|
||||
(clear-authenticated-cookie)))))))
|
||||
|
||||
;; --- STATE INIT: SESSION GC
|
||||
|
||||
(declare sql:delete-expired)
|
||||
|
||||
|
||||
@@ -15,25 +15,19 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.loggers.audit.tasks :as-alias tasks]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.main :as-alias main]
|
||||
[app.metrics :as mtx]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.async :as aa]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
(defn parse-client-ip
|
||||
[request]
|
||||
@@ -55,22 +49,10 @@
|
||||
(assoc (->> sk str/kebab (keyword "penpot")) v))))]
|
||||
(reduce-kv process-param {} params)))
|
||||
|
||||
(def ^:private
|
||||
profile-props
|
||||
[:id
|
||||
:is-active
|
||||
:is-muted
|
||||
:auth-backend
|
||||
:email
|
||||
:default-team-id
|
||||
:default-project-id
|
||||
:fullname
|
||||
:lang])
|
||||
|
||||
(defn profile->props
|
||||
[profile]
|
||||
(-> profile
|
||||
(select-keys profile-props)
|
||||
(select-keys [:id :is-active :is-muted :auth-backend :email :default-team-id :default-project-id :fullname :lang])
|
||||
(merge (:props profile))
|
||||
(d/without-nils)))
|
||||
|
||||
@@ -97,119 +79,180 @@
|
||||
|
||||
(update event :props #(into {} xform %))))
|
||||
|
||||
;; --- SPECS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP Handler
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare persist-http-events)
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::type ::us/string)
|
||||
(s/def ::props (s/map-of ::us/keyword any?))
|
||||
(s/def ::ip-addr ::us/string)
|
||||
(s/def ::timestamp dt/instant?)
|
||||
(s/def ::context (s/map-of ::us/keyword any?))
|
||||
|
||||
(s/def ::webhooks/event? ::us/boolean)
|
||||
(s/def ::webhooks/batch-timeout ::dt/duration)
|
||||
(s/def ::webhooks/batch-key
|
||||
(s/or :fn fn? :str string? :kw keyword?))
|
||||
(s/def ::frontend-event
|
||||
(s/keys :req-un [::type ::name ::props ::timestamp ::profile-id]
|
||||
:opt-un [::context]))
|
||||
|
||||
(s/def ::event
|
||||
(s/keys :req-un [::type ::name ::profile-id]
|
||||
:opt-un [::ip-addr ::props]
|
||||
:opt [::webhooks/event?
|
||||
::webhooks/batch-timeout
|
||||
::webhooks/batch-key]))
|
||||
(s/def ::frontend-events (s/every ::frontend-event))
|
||||
|
||||
(defmethod ig/init-key ::http-handler
|
||||
[_ {:keys [executor pool] :as cfg}]
|
||||
(if (or (db/read-only? pool) (not (contains? cf/flags :audit-log)))
|
||||
(do
|
||||
(l/warn :hint "audit log http handler disabled or db is read-only")
|
||||
(fn [_ respond _]
|
||||
(respond (yrs/response 204))))
|
||||
|
||||
(letfn [(handler [{:keys [profile-id] :as request}]
|
||||
(let [events (->> (:events (:params request))
|
||||
(remove #(not= profile-id (:profile-id %)))
|
||||
(us/conform ::frontend-events))
|
||||
|
||||
ip-addr (parse-client-ip request)
|
||||
cfg (-> cfg
|
||||
(assoc :source "frontend")
|
||||
(assoc :events events)
|
||||
(assoc :ip-addr ip-addr))]
|
||||
(persist-http-events cfg)))
|
||||
|
||||
(handle-error [cause]
|
||||
(let [xdata (ex-data cause)]
|
||||
(if (= :spec-validation (:code xdata))
|
||||
(l/error ::l/raw (str "spec validation on persist-events:\n" (us/pretty-explain xdata)))
|
||||
(l/error :hint "error on persist-events" :cause cause))))]
|
||||
|
||||
(fn [request respond _]
|
||||
;; Fire and forget, log error in case of error
|
||||
(-> (px/submit! executor #(handler request))
|
||||
(p/catch handle-error))
|
||||
|
||||
(respond (yrs/response 204))))))
|
||||
|
||||
(defn- persist-http-events
|
||||
[{:keys [pool events ip-addr source] :as cfg}]
|
||||
(let [columns [:id :name :source :type :tracked-at :profile-id :ip-addr :props :context]
|
||||
prepare-xf (map (fn [event]
|
||||
[(uuid/next)
|
||||
(:name event)
|
||||
source
|
||||
(:type event)
|
||||
(:timestamp event)
|
||||
(:profile-id event)
|
||||
(db/inet ip-addr)
|
||||
(db/tjson (:props event))
|
||||
(db/tjson (d/without-nils (:context event)))]))]
|
||||
(when (seq events)
|
||||
(->> (into [] prepare-xf events)
|
||||
(db/insert-multi! pool :audit-log columns)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; COLLECTOR
|
||||
;; Collector
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Defines a service that collects the audit/activity log using
|
||||
;; internal database. Later this audit log can be transferred to
|
||||
;; an external storage and data cleared.
|
||||
|
||||
(s/def ::collector
|
||||
(s/keys :req [::wrk/executor ::db/pool]))
|
||||
(declare persist-events)
|
||||
|
||||
(defmethod ig/pre-init-spec ::collector [_]
|
||||
(s/keys :req [::db/pool ::wrk/executor ::mtx/metrics]))
|
||||
(s/keys :req-un [::db/pool ::wrk/executor]))
|
||||
|
||||
(s/def ::ip-addr string?)
|
||||
(s/def ::backend-event
|
||||
(s/keys :req-un [::type ::name ::profile-id]
|
||||
:opt-un [::ip-addr ::props]))
|
||||
|
||||
(def ^:private backend-event-xform
|
||||
(comp
|
||||
(filter #(us/valid? ::backend-event %))
|
||||
(map clean-props)))
|
||||
|
||||
(defmethod ig/init-key ::collector
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(cond
|
||||
(not (contains? cf/flags :audit-log))
|
||||
(do
|
||||
(l/info :hint "audit log collection disabled")
|
||||
(constantly nil))
|
||||
|
||||
(db/read-only? pool)
|
||||
(l/warn :hint "audit: disabled (db is read-only)")
|
||||
(do
|
||||
(l/warn :hint "audit log collection disabled, db is read-only")
|
||||
(constantly nil))
|
||||
|
||||
:else
|
||||
cfg))
|
||||
(let [input (a/chan 512 backend-event-xform)
|
||||
buffer (aa/batch input {:max-batch-size 100
|
||||
:max-batch-age (* 10 1000) ; 10s
|
||||
:init []})]
|
||||
(l/info :hint "audit log collector initialized")
|
||||
(a/go-loop []
|
||||
(when-let [[_type events] (a/<! buffer)]
|
||||
(let [res (a/<! (persist-events cfg events))]
|
||||
(when (ex/exception? res)
|
||||
(l/error :hint "error on persisting events" :cause res))
|
||||
(recur))))
|
||||
|
||||
(defn- persist-event!
|
||||
[pool event]
|
||||
(us/verify! ::event event)
|
||||
(let [params {:id (uuid/next)
|
||||
:name (:name event)
|
||||
:type (:type event)
|
||||
:profile-id (:profile-id event)
|
||||
:tracked-at (dt/now)
|
||||
:ip-addr (:ip-addr event)
|
||||
:props (:props event)}]
|
||||
(fn [& {:keys [cmd] :as params}]
|
||||
(case cmd
|
||||
:stop
|
||||
(a/close! input)
|
||||
|
||||
(when (contains? cf/flags :audit-log)
|
||||
(db/insert! pool :audit-log
|
||||
(-> params
|
||||
(update :props db/tjson)
|
||||
(update :ip-addr db/inet)
|
||||
(assoc :source "backend"))))
|
||||
:submit
|
||||
(let [params (-> params
|
||||
(dissoc :cmd)
|
||||
(assoc :tracked-at (dt/now)))]
|
||||
(when-not (a/offer! input params)
|
||||
(l/warn :hint "activity channel is full"))))))))
|
||||
|
||||
(when (and (contains? cf/flags :webhooks)
|
||||
(::webhooks/event? event))
|
||||
(let [batch-key (::webhooks/batch-key event)
|
||||
batch-timeout (::webhooks/batch-timeout event)]
|
||||
(wrk/submit! ::wrk/conn pool
|
||||
::wrk/task :process-webhook-event
|
||||
::wrk/queue :webhooks
|
||||
::wrk/max-retries 0
|
||||
::wrk/delay (or batch-timeout 0)
|
||||
::wrk/label (cond
|
||||
(fn? batch-key) (batch-key (:props event))
|
||||
(keyword? batch-key) (name batch-key)
|
||||
(string? batch-key) batch-key
|
||||
:else "default")
|
||||
::wrk/dedupe true
|
||||
::webhooks/event (-> params
|
||||
(dissoc :ip-addr)
|
||||
(dissoc :type)))))))
|
||||
|
||||
(defn submit!
|
||||
"Submit audit event to the collector."
|
||||
[{:keys [::wrk/executor ::db/pool] :as collector} params]
|
||||
(us/assert! ::collector collector)
|
||||
(->> (px/submit! executor (partial persist-event! pool (d/without-nils params)))
|
||||
(p/merr (fn [cause]
|
||||
(l/error :hint "audit: unexpected error processing event" :cause cause)
|
||||
(p/resolved nil)))))
|
||||
(defn- persist-events
|
||||
[{:keys [pool executor] :as cfg} events]
|
||||
(letfn [(event->row [event]
|
||||
[(uuid/next)
|
||||
(:name event)
|
||||
(:type event)
|
||||
(:profile-id event)
|
||||
(:tracked-at event)
|
||||
(some-> (:ip-addr event) db/inet)
|
||||
(db/tjson (:props event))
|
||||
"backend"])]
|
||||
(aa/with-thread executor
|
||||
(when (seq events)
|
||||
(db/with-atomic [conn pool]
|
||||
(db/insert-multi! conn :audit-log
|
||||
[:id :name :type :profile-id :tracked-at :ip-addr :props :source]
|
||||
(sequence (keep event->row) events)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASK: ARCHIVE
|
||||
;; Archive Task
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; This is a task responsible to send the accumulated events to
|
||||
;; This is a task responsible to send the accumulated events to an
|
||||
;; external service for archival.
|
||||
|
||||
(declare archive-events)
|
||||
|
||||
(s/def ::tasks/uri ::us/string)
|
||||
(s/def ::http-client fn?)
|
||||
(s/def ::uri ::us/string)
|
||||
(s/def ::sprops map?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/archive-task [_]
|
||||
(s/keys :req [::db/pool ::main/props ::http/client]))
|
||||
(defmethod ig/pre-init-spec ::archive-task [_]
|
||||
(s/keys :req-un [::db/pool ::sprops ::http-client]
|
||||
:opt-un [::uri]))
|
||||
|
||||
(defmethod ig/init-key ::tasks/archive
|
||||
[_ cfg]
|
||||
(fn [params]
|
||||
(defmethod ig/init-key ::archive-task
|
||||
[_ {:keys [uri] :as cfg}]
|
||||
(fn [props]
|
||||
;; NOTE: this let allows overwrite default configured values from
|
||||
;; the repl, when manually invoking the task.
|
||||
(let [enabled (or (contains? cf/flags :audit-log-archive)
|
||||
(:enabled params false))
|
||||
uri (cf/get :audit-log-archive-uri)
|
||||
uri (or uri (:uri params))
|
||||
cfg (assoc cfg ::uri uri)]
|
||||
(:enabled props false))
|
||||
uri (or uri (:uri props))
|
||||
cfg (assoc cfg :uri uri)]
|
||||
|
||||
(when (and enabled (not uri))
|
||||
(ex/raise :type :internal
|
||||
@@ -221,21 +264,20 @@
|
||||
(let [n (archive-events cfg)]
|
||||
(if n
|
||||
(do
|
||||
(px/sleep 100)
|
||||
(aa/thread-sleep 100)
|
||||
(recur (+ total n)))
|
||||
(when (pos? total)
|
||||
(l/debug :hint "events archived" :total total)))))))))
|
||||
(l/trace :hint "events chunk archived" :num total)))))))))
|
||||
|
||||
(def ^:private sql:retrieve-batch-of-audit-log
|
||||
"select *
|
||||
from audit_log
|
||||
(def sql:retrieve-batch-of-audit-log
|
||||
"select * from audit_log
|
||||
where archived_at is null
|
||||
order by created_at asc
|
||||
limit 256
|
||||
for update skip locked;")
|
||||
|
||||
(defn archive-events
|
||||
[{:keys [::db/pool ::uri] :as cfg}]
|
||||
[{:keys [pool uri sprops http-client] :as cfg}]
|
||||
(letfn [(decode-row [{:keys [props ip-addr context] :as row}]
|
||||
(cond-> row
|
||||
(db/pgobject? props)
|
||||
@@ -259,10 +301,9 @@
|
||||
:context]))
|
||||
|
||||
(send [events]
|
||||
(let [token (tokens/generate (::main/props cfg)
|
||||
{:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid uuid/zero})
|
||||
(let [token (tokens/generate sprops {:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid uuid/zero})
|
||||
body (t/encode {:events events})
|
||||
headers {"content-type" "application/transit+json"
|
||||
"origin" (cf/get :public-uri)
|
||||
@@ -272,7 +313,7 @@
|
||||
:method :post
|
||||
:headers headers
|
||||
:body body}
|
||||
resp (http/req! cfg params {:sync? true})]
|
||||
resp (http-client params {:sync? true})]
|
||||
(if (= (:status resp) 204)
|
||||
true
|
||||
(do
|
||||
@@ -293,7 +334,7 @@
|
||||
(map row->event))
|
||||
events (into [] xform rows)]
|
||||
(when-not (empty? events)
|
||||
(l/trace :hint "archive events chunk" :uri uri :events (count events))
|
||||
(l/debug :action "archive-events" :uri uri :events (count events))
|
||||
(when (send events)
|
||||
(mark-as-archived conn rows)
|
||||
(count events)))))))
|
||||
@@ -302,7 +343,7 @@
|
||||
;; GC Task
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private sql:clean-archived
|
||||
(def sql:clean-archived
|
||||
"delete from audit_log
|
||||
where archived_at is not null")
|
||||
|
||||
@@ -313,10 +354,9 @@
|
||||
(l/debug :hint "delete archived audit log entries" :deleted result)
|
||||
result))
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/gc [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/pre-init-spec ::gc-task [_]
|
||||
(s/keys :req-un [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::tasks/gc
|
||||
(defmethod ig/init-key ::gc-task
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(clean-archived cfg)))
|
||||
(partial clean-archived cfg))
|
||||
|
||||
@@ -8,55 +8,58 @@
|
||||
"A Loki integration."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.http.client :as http]
|
||||
[app.loggers.zmq :as lzmq]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cfg]
|
||||
[app.util.json :as json]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]))
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(declare ^:private handle-event)
|
||||
(declare ^:private start-rcv-loop)
|
||||
|
||||
(s/def ::uri ::us/string)
|
||||
(s/def ::receiver fn?)
|
||||
(s/def ::http-client fn?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::reporter [_]
|
||||
(s/keys :req [::http/client
|
||||
::lzmq/receiver]))
|
||||
(s/keys :req-un [ ::receiver ::http-client]
|
||||
:opt-un [::uri]))
|
||||
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ cfg]
|
||||
(when-let [uri (cf/get :loggers-loki-uri)]
|
||||
(px/thread
|
||||
{:name "penpot/loki-reporter"}
|
||||
(l/info :hint "reporter started" :uri uri)
|
||||
(let [input (a/chan (a/dropping-buffer 2048))
|
||||
cfg (assoc cfg ::uri uri)]
|
||||
[_ {:keys [receiver uri] :as cfg}]
|
||||
(when uri
|
||||
(l/info :msg "initializing loki reporter" :uri uri)
|
||||
(let [input (a/chan (a/dropping-buffer 2048))]
|
||||
(receiver :sub input)
|
||||
|
||||
(try
|
||||
(lzmq/sub! (::lzmq/receiver cfg) input)
|
||||
(loop []
|
||||
(when-let [msg (a/<!! input)]
|
||||
(handle-event cfg msg)
|
||||
(recur)))
|
||||
(doto (Thread. #(start-rcv-loop cfg input))
|
||||
(.setDaemon true)
|
||||
(.setName "penpot/loki-sender")
|
||||
(.start))
|
||||
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "reporter interrupted"))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected exception"
|
||||
:cause cause))
|
||||
(finally
|
||||
(a/close! input)
|
||||
(l/info :hint "reporter terminated")))))))
|
||||
input)))
|
||||
|
||||
(defmethod ig/halt-key! ::reporter
|
||||
[_ thread]
|
||||
(some-> thread px/interrupt!))
|
||||
[_ output]
|
||||
(when output
|
||||
(a/close! output)))
|
||||
|
||||
(defn- start-rcv-loop
|
||||
[cfg input]
|
||||
(loop []
|
||||
(let [msg (a/<!! input)]
|
||||
(when-not (nil? msg)
|
||||
(handle-event cfg msg)
|
||||
(recur))))
|
||||
|
||||
(l/info :msg "stopping error reporting loop"))
|
||||
|
||||
(defn- prepare-payload
|
||||
[event]
|
||||
(let [labels {:host (cf/get :host)
|
||||
:tenant (cf/get :tenant)
|
||||
:version (:full cf/version)
|
||||
(let [labels {:host (cfg/get :host)
|
||||
:tenant (cfg/get :tenant)
|
||||
:version (:full cfg/version)
|
||||
:logger (:logger/name event)
|
||||
:level (:logger/level event)}]
|
||||
{:streams
|
||||
@@ -66,15 +69,15 @@
|
||||
(when-let [error (:trace event)]
|
||||
(str "\n" error)))]]}]}))
|
||||
|
||||
|
||||
(defn- make-request
|
||||
[{:keys [::uri] :as cfg} payload]
|
||||
(http/req! cfg
|
||||
{:uri uri
|
||||
:timeout 3000
|
||||
:method :post
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/encode payload)}
|
||||
{:sync? true}))
|
||||
[{:keys [http-client uri] :as cfg} payload]
|
||||
(http-client {:uri uri
|
||||
:timeout 3000
|
||||
:method :post
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/write payload)}
|
||||
{:sync? true}))
|
||||
|
||||
(defn- handle-event
|
||||
[cfg event]
|
||||
@@ -82,6 +85,7 @@
|
||||
(let [payload (prepare-payload event)
|
||||
response (make-request cfg payload)]
|
||||
(when-not (= 204 (:status response))
|
||||
(map? response)
|
||||
(l/error :hint "error on sending log to loki (unexpected response)"
|
||||
:response (pr-str response))))
|
||||
(catch Throwable cause
|
||||
|
||||
@@ -9,69 +9,67 @@
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.http.client :as http]
|
||||
[app.loggers.database :as ldb]
|
||||
[app.loggers.zmq :as lzmq]
|
||||
[app.util.json :as json]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]))
|
||||
[promesa.core :as p]))
|
||||
|
||||
(defonce enabled (atom true))
|
||||
|
||||
(defn- send-mattermost-notification!
|
||||
[cfg {:keys [host id public-uri] :as event}]
|
||||
(let [text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
|
||||
(when-let [pid (:profile-id event)]
|
||||
(str "- profile-id: #uuid-" pid "\n")))
|
||||
resp (http/req! cfg
|
||||
{:uri (cf/get :error-report-webhook)
|
||||
:method :post
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/encode-str {:text text})}
|
||||
{:sync? true})]
|
||||
|
||||
(when (not= 200 (:status resp))
|
||||
(l/warn :hint "error on sending data"
|
||||
:response (pr-str resp)))))
|
||||
[{:keys [http-client] :as cfg} {:keys [host id public-uri] :as event}]
|
||||
(let [uri (:uri cfg)
|
||||
text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
|
||||
(when-let [pid (:profile-id event)]
|
||||
(str "- profile-id: #uuid-" pid "\n")))]
|
||||
(p/then
|
||||
(http-client {:uri uri
|
||||
:method :post
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/write-str {:text text})})
|
||||
(fn [{:keys [status] :as rsp}]
|
||||
(when (not= status 200)
|
||||
(l/warn :hint "error on sending data to mattermost"
|
||||
:response (pr-str rsp)))))))
|
||||
|
||||
(defn handle-event
|
||||
[cfg event]
|
||||
(try
|
||||
(let [event (ldb/parse-event event)]
|
||||
(when @enabled
|
||||
(send-mattermost-notification! cfg event)))
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unhandled error"
|
||||
:cause cause))))
|
||||
(let [ch (a/chan)]
|
||||
(-> (p/let [event (ldb/parse-event event)]
|
||||
(send-mattermost-notification! cfg event))
|
||||
(p/finally (fn [_ cause]
|
||||
(when cause
|
||||
(l/warn :hint "unexpected exception on error reporter" :cause cause))
|
||||
(a/close! ch))))
|
||||
ch))
|
||||
|
||||
(s/def ::http-client fn?)
|
||||
(s/def ::uri ::cf/error-report-webhook)
|
||||
|
||||
(defmethod ig/pre-init-spec ::reporter [_]
|
||||
(s/keys :req [::http/client
|
||||
::lzmq/receiver]))
|
||||
(s/keys :req-un [::http-client ::receiver]
|
||||
:opt-un [::uri]))
|
||||
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ cfg]
|
||||
(when-let [uri (cf/get :error-report-webhook)]
|
||||
(px/thread
|
||||
{:name "penpot/mattermost-reporter"}
|
||||
(l/info :msg "initializing error reporter" :uri uri)
|
||||
(let [input (a/chan (a/sliding-buffer 128)
|
||||
(filter #(= (:logger/level %) "error")))]
|
||||
(try
|
||||
(lzmq/sub! (::lzmq/receiver cfg) input)
|
||||
(loop []
|
||||
(when-let [msg (a/<!! input)]
|
||||
(handle-event cfg msg)
|
||||
(recur)))
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "reporter interrupted"))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected error" :cause cause))
|
||||
(finally
|
||||
(a/close! input)
|
||||
(l/info :hint "reporter terminated")))))))
|
||||
[_ {:keys [receiver uri] :as cfg}]
|
||||
(when uri
|
||||
(l/info :msg "initializing mattermost error reporter" :uri uri)
|
||||
(let [output (a/chan (a/sliding-buffer 128)
|
||||
(filter (fn [event]
|
||||
(= (:logger/level event) "error"))))]
|
||||
(receiver :sub output)
|
||||
(a/go-loop []
|
||||
(let [msg (a/<! output)]
|
||||
(if (nil? msg)
|
||||
(l/info :msg "stopping error reporting loop")
|
||||
(do
|
||||
(a/<! (handle-event cfg msg))
|
||||
(recur)))))
|
||||
output)))
|
||||
|
||||
(defmethod ig/halt-key! ::reporter
|
||||
[_ thread]
|
||||
(some-> thread px/interrupt!))
|
||||
[_ output]
|
||||
(when output
|
||||
(a/close! output)))
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.loggers.webhooks
|
||||
"A mattermost integration for error reporting."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uri :as uri]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
;; --- PROC
|
||||
|
||||
(defn- lookup-webhooks-by-team
|
||||
[pool team-id]
|
||||
(db/exec! pool ["select * from webhook where team_id=? and is_active=true" team-id]))
|
||||
|
||||
(defn- lookup-webhooks-by-project
|
||||
[pool project-id]
|
||||
(let [sql [(str "select * from webhook as w"
|
||||
" join project as p on (p.team_id = w.team_id)"
|
||||
" where p.id = ? and w.is_active = true")
|
||||
project-id]]
|
||||
(db/exec! pool sql)))
|
||||
|
||||
(defn- lookup-webhooks-by-file
|
||||
[pool file-id]
|
||||
(let [sql [(str "select * from webhook as w"
|
||||
" join project as p on (p.team_id = w.team_id)"
|
||||
" join file as f on (f.project_id = p.id)"
|
||||
" where f.id = ? and w.is_active = true")
|
||||
file-id]]
|
||||
(db/exec! pool sql)))
|
||||
|
||||
(defn- lookup-webhooks
|
||||
[{:keys [::db/pool]} {:keys [props] :as event}]
|
||||
(or (some->> (:team-id props) (lookup-webhooks-by-team pool))
|
||||
(some->> (:project-id props) (lookup-webhooks-by-project pool))
|
||||
(some->> (:file-id props) (lookup-webhooks-by-file pool))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::process-event-handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::process-event-handler
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [event (::event props)]
|
||||
|
||||
(l/debug :hint "process webhook event"
|
||||
:name (:name event))
|
||||
|
||||
(when-let [items (lookup-webhooks cfg event)]
|
||||
;; (app.common.pprint/pprint items)
|
||||
(l/trace :hint "webhooks found for event" :total (count items))
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(doseq [item items]
|
||||
(wrk/submit! ::wrk/conn conn
|
||||
::wrk/task :run-webhook
|
||||
::wrk/queue :webhooks
|
||||
::wrk/max-retries 3
|
||||
::event event
|
||||
::config item)))))))
|
||||
|
||||
;; --- RUN
|
||||
|
||||
(declare interpret-exception)
|
||||
(declare interpret-response)
|
||||
|
||||
(def ^:private json-mapper
|
||||
(json/mapper
|
||||
{:encode-key-fn str/camel
|
||||
:decode-key-fn (comp keyword str/kebab)
|
||||
:pretty true}))
|
||||
|
||||
(defmethod ig/pre-init-spec ::run-webhook-handler [_]
|
||||
(s/keys :req [::http/client ::db/pool]))
|
||||
|
||||
(defmethod ig/prep-key ::run-webhook-handler
|
||||
[_ cfg]
|
||||
(merge {::max-errors 3} (d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/init-key ::run-webhook-handler
|
||||
[_ {:keys [::db/pool ::max-errors] :as cfg}]
|
||||
(letfn [(update-webhook! [whook err]
|
||||
(if err
|
||||
(let [sql [(str "update webhook "
|
||||
" set error_code=?, "
|
||||
" error_count=error_count+1 "
|
||||
" where id=?")
|
||||
err
|
||||
(:id whook)]
|
||||
res (db/exec-one! pool sql {:return-keys true})]
|
||||
(when (>= (:error-count res) max-errors)
|
||||
(db/update! pool :webhook {:is-active false} {:id (:id whook)})))
|
||||
|
||||
(db/update! pool :webhook
|
||||
{:updated-at (dt/now)
|
||||
:error-code nil
|
||||
:error-count 0}
|
||||
{:id (:id whook)})))
|
||||
|
||||
(report-delivery! [whook req rsp err]
|
||||
(db/insert! pool :webhook-delivery
|
||||
{:webhook-id (:id whook)
|
||||
:created-at (dt/now)
|
||||
:error-code err
|
||||
:req-data (db/tjson req)
|
||||
:rsp-data (db/tjson rsp)}))]
|
||||
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [event (::event props)
|
||||
whook (::config props)
|
||||
|
||||
body (case (:mtype whook)
|
||||
"application/json" (json/encode-str event json-mapper)
|
||||
"application/transit+json" (t/encode-str event)
|
||||
"application/x-www-form-urlencoded" (uri/map->query-string event))]
|
||||
|
||||
(l/debug :hint "run webhook"
|
||||
:event-name (:name event)
|
||||
:webhook-id (:id whook)
|
||||
:webhook-uri (:uri whook)
|
||||
:webhook-mtype (:mtype whook))
|
||||
|
||||
(let [req {:uri (:uri whook)
|
||||
:headers {"content-type" (:mtype whook)
|
||||
"user-agent" (str/ffmt "penpot/%" (:main cf/version))}
|
||||
:timeout (dt/duration "4s")
|
||||
:method :post
|
||||
:body body}]
|
||||
(try
|
||||
(let [rsp (http/req! cfg req {:response-type :input-stream :sync? true})
|
||||
err (interpret-response rsp)]
|
||||
(report-delivery! whook req rsp err)
|
||||
(update-webhook! whook err))
|
||||
(catch Throwable cause
|
||||
(let [err (interpret-exception cause)]
|
||||
(report-delivery! whook req nil err)
|
||||
(update-webhook! whook err)
|
||||
(when (= err "unknown")
|
||||
(l/error :hint "unknown error on webhook request"
|
||||
:cause cause))))))))))
|
||||
|
||||
(defn interpret-response
|
||||
[{:keys [status] :as response}]
|
||||
(when-not (or (= 200 status)
|
||||
(= 204 status))
|
||||
(str/ffmt "unexpected-status:%" status)))
|
||||
|
||||
(defn interpret-exception
|
||||
[cause]
|
||||
(cond
|
||||
(instance? javax.net.ssl.SSLHandshakeException cause)
|
||||
"ssl-validation-error"
|
||||
|
||||
(instance? java.net.ConnectException cause)
|
||||
"connection-error"
|
||||
|
||||
(instance? java.net.http.HttpConnectTimeoutException cause)
|
||||
"timeout"
|
||||
))
|
||||
@@ -9,15 +9,13 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.loggers.zmq.receiver :as-alias receiver]
|
||||
[app.common.spec :as us]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px])
|
||||
[integrant.core :as ig])
|
||||
(:import
|
||||
org.zeromq.SocketType
|
||||
org.zeromq.ZMQ$Socket
|
||||
@@ -26,56 +24,38 @@
|
||||
(declare prepare)
|
||||
(declare start-rcv-loop)
|
||||
|
||||
(s/def ::endpoint ::us/string)
|
||||
|
||||
(defmethod ig/pre-init-spec ::receiver [_]
|
||||
(s/keys :opt-un [::endpoint]))
|
||||
|
||||
(defmethod ig/init-key ::receiver
|
||||
[_ cfg]
|
||||
(let [uri (cf/get :loggers-zmq-uri)
|
||||
buffer (a/chan 1)
|
||||
[_ {:keys [endpoint] :as cfg}]
|
||||
(l/info :msg "initializing ZMQ receiver" :bind endpoint)
|
||||
(let [buffer (a/chan 1)
|
||||
output (a/chan 1 (comp (filter map?)
|
||||
(keep prepare)))
|
||||
mult (a/mult output)
|
||||
thread (when uri
|
||||
(px/thread
|
||||
{:name "penpot/zmq-receiver"
|
||||
:daemon false}
|
||||
(l/info :hint "receiver started")
|
||||
(try
|
||||
(start-rcv-loop buffer uri)
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "receiver interrupted"))
|
||||
(catch java.lang.IllegalStateException cause
|
||||
(if (= "errno 4" (ex-message cause))
|
||||
(l/debug :hint "receiver interrupted")
|
||||
(l/error :hint "unhandled error" :cause cause)))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unhandled error" :cause cause))
|
||||
(finally
|
||||
(l/info :hint "receiver terminated")))))]
|
||||
mult (a/mult output)]
|
||||
(when endpoint
|
||||
(let [thread (Thread. #(start-rcv-loop {:out buffer :endpoint endpoint}))]
|
||||
(.setDaemon thread false)
|
||||
(.setName thread "penpot/zmq-logger-receiver")
|
||||
(.start thread)))
|
||||
|
||||
(a/pipe buffer output)
|
||||
(-> cfg
|
||||
(assoc ::receiver/mult mult)
|
||||
(assoc ::receiver/thread thread)
|
||||
(assoc ::receiver/output output)
|
||||
(assoc ::receiver/buffer buffer))))
|
||||
|
||||
(s/def ::receiver/mult some?)
|
||||
(s/def ::receiver/thread #(instance? Thread %))
|
||||
(s/def ::receiver/output some?)
|
||||
(s/def ::receiver/buffer some?)
|
||||
(s/def ::receiver
|
||||
(s/keys :req [::receiver/mult
|
||||
::receiver/thread
|
||||
::receiver/output
|
||||
::receiver/buffer]))
|
||||
|
||||
(defn sub!
|
||||
[{:keys [::receiver/mult]} ch]
|
||||
(a/tap mult ch))
|
||||
(with-meta
|
||||
(fn [cmd ch]
|
||||
(case cmd
|
||||
:sub (a/tap mult ch)
|
||||
:unsub (a/untap mult ch))
|
||||
ch)
|
||||
{::output output
|
||||
::buffer buffer
|
||||
::mult mult})))
|
||||
|
||||
(defmethod ig/halt-key! ::receiver
|
||||
[_ {:keys [::receiver/buffer ::receiver/thread]}]
|
||||
(some-> thread px/interrupt!)
|
||||
(some-> buffer a/close!))
|
||||
[_ f]
|
||||
(a/close! (::buffer (meta f))))
|
||||
|
||||
(def ^:private json-mapper
|
||||
(json/mapper
|
||||
@@ -83,23 +63,23 @@
|
||||
:decode-key-fn (comp keyword str/kebab)}))
|
||||
|
||||
(defn- start-rcv-loop
|
||||
[output endpoint]
|
||||
(let [zctx (ZContext. 1)
|
||||
socket (.. zctx (createSocket SocketType/SUB))]
|
||||
(try
|
||||
(.. socket (connect ^String endpoint))
|
||||
(.. socket (subscribe ""))
|
||||
(.. socket (setReceiveTimeOut 5000))
|
||||
(loop []
|
||||
(let [msg (.recv ^ZMQ$Socket socket)
|
||||
msg (ex/ignoring (json/decode msg json-mapper))
|
||||
msg (if (nil? msg) :empty msg)]
|
||||
(when (a/>!! output msg)
|
||||
(recur))))
|
||||
|
||||
(finally
|
||||
(.close ^java.lang.AutoCloseable socket)
|
||||
(.destroy ^ZContext zctx)))))
|
||||
([] (start-rcv-loop nil))
|
||||
([{:keys [out endpoint] :or {endpoint "tcp://localhost:5556"}}]
|
||||
(let [out (or out (a/chan 1))
|
||||
zctx (ZContext. 1)
|
||||
socket (.. zctx (createSocket SocketType/SUB))]
|
||||
(.. socket (connect ^String endpoint))
|
||||
(.. socket (subscribe ""))
|
||||
(.. socket (setReceiveTimeOut 5000))
|
||||
(loop []
|
||||
(let [msg (.recv ^ZMQ$Socket socket)
|
||||
msg (ex/ignoring (json/read msg json-mapper))
|
||||
msg (if (nil? msg) :empty msg)]
|
||||
(if (a/>!! out msg)
|
||||
(recur)
|
||||
(do
|
||||
(.close ^java.lang.AutoCloseable socket)
|
||||
(.destroy ^ZContext zctx))))))))
|
||||
|
||||
(s/def ::logger-name string?)
|
||||
(s/def ::level string?)
|
||||
|
||||
@@ -6,227 +6,103 @@
|
||||
|
||||
(ns app.main
|
||||
(:require
|
||||
[app.auth.oidc :as-alias oidc]
|
||||
[app.auth.oidc.providers :as-alias oidc.providers]
|
||||
[app.auth.oidc]
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.db :as-alias db]
|
||||
[app.http.client :as-alias http.client]
|
||||
[app.http.session :as-alias http.session]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.audit.tasks :as-alias audit.tasks]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.loggers.zmq :as-alias lzmq]
|
||||
[app.metrics :as-alias mtx]
|
||||
[app.metrics.definition :as-alias mdef]
|
||||
[app.redis :as-alias rds]
|
||||
[app.storage :as-alias sto]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig])
|
||||
(:gen-class))
|
||||
|
||||
(def default-metrics
|
||||
{:update-file-changes
|
||||
{::mdef/name "penpot_rpc_update_file_changes_total"
|
||||
::mdef/help "A total number of changes submitted to update-file."
|
||||
::mdef/type :counter}
|
||||
|
||||
:update-file-bytes-processed
|
||||
{::mdef/name "penpot_rpc_update_file_bytes_processed_total"
|
||||
::mdef/help "A total number of bytes processed by update-file."
|
||||
::mdef/type :counter}
|
||||
|
||||
:rpc-mutation-timing
|
||||
{::mdef/name "penpot_rpc_mutation_timing"
|
||||
::mdef/help "RPC mutation method call timing."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :histogram}
|
||||
|
||||
:rpc-command-timing
|
||||
{::mdef/name "penpot_rpc_command_timing"
|
||||
::mdef/help "RPC command method call timing."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :histogram}
|
||||
|
||||
:rpc-query-timing
|
||||
{::mdef/name "penpot_rpc_query_timing"
|
||||
::mdef/help "RPC query method call timing."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :histogram}
|
||||
|
||||
:websocket-active-connections
|
||||
{::mdef/name "penpot_websocket_active_connections"
|
||||
::mdef/help "Active websocket connections gauge"
|
||||
::mdef/type :gauge}
|
||||
|
||||
:websocket-messages-total
|
||||
{::mdef/name "penpot_websocket_message_total"
|
||||
::mdef/help "Counter of processed messages."
|
||||
::mdef/labels ["op"]
|
||||
::mdef/type :counter}
|
||||
|
||||
:websocket-session-timing
|
||||
{::mdef/name "penpot_websocket_session_timing"
|
||||
::mdef/help "Websocket session timing (seconds)."
|
||||
::mdef/type :summary}
|
||||
|
||||
:session-update-total
|
||||
{::mdef/name "penpot_http_session_update_total"
|
||||
::mdef/help "A counter of session update batch events."
|
||||
::mdef/type :counter}
|
||||
|
||||
:tasks-timing
|
||||
{::mdef/name "penpot_tasks_timing"
|
||||
::mdef/help "Background tasks timing (milliseconds)."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :summary}
|
||||
|
||||
:redis-eval-timing
|
||||
{::mdef/name "penpot_redis_eval_timing"
|
||||
::mdef/help "Redis EVAL commands execution timings (ms)"
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :summary}
|
||||
|
||||
:rpc-climit-queue-size
|
||||
{::mdef/name "penpot_rpc_climit_queue_size"
|
||||
::mdef/help "Current number of queued submissions on the CLIMIT."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:rpc-climit-concurrency
|
||||
{::mdef/name "penpot_rpc_climit_concurrency"
|
||||
::mdef/help "Current number of used concurrency capacity on the CLIMIT"
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:rpc-climit-timing
|
||||
{::mdef/name "penpot_rpc_climit_timing"
|
||||
::mdef/help "Summary of the time between queuing and executing on the CLIMIT"
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :summary}
|
||||
|
||||
:audit-http-handler-queue-size
|
||||
{::mdef/name "penpot_audit_http_handler_queue_size"
|
||||
::mdef/help "Current number of queued submissions on the audit log http handler"
|
||||
::mdef/labels []
|
||||
::mdef/type :gauge}
|
||||
|
||||
:audit-http-handler-concurrency
|
||||
{::mdef/name "penpot_audit_http_handler_concurrency"
|
||||
::mdef/help "Current number of used concurrency capacity on the audit log http handler"
|
||||
::mdef/labels []
|
||||
::mdef/type :gauge}
|
||||
|
||||
:audit-http-handler-timing
|
||||
{::mdef/name "penpot_audit_http_handler_timing"
|
||||
::mdef/help "Summary of the time between queuing and executing on the audit log http handler"
|
||||
::mdef/labels []
|
||||
::mdef/type :summary}
|
||||
|
||||
:executors-active-threads
|
||||
{::mdef/name "penpot_executors_active_threads"
|
||||
::mdef/help "Current number of threads available in the executor service."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:executors-completed-tasks
|
||||
{::mdef/name "penpot_executors_completed_tasks_total"
|
||||
::mdef/help "Approximate number of completed tasks by the executor."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :counter}
|
||||
|
||||
:executors-running-threads
|
||||
{::mdef/name "penpot_executors_running_threads"
|
||||
::mdef/help "Current number of threads with state RUNNING."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:executors-queued-submissions
|
||||
{::mdef/name "penpot_executors_queued_submissions"
|
||||
::mdef/help "Current number of queued submissions."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}})
|
||||
|
||||
(def system-config
|
||||
{::db/pool
|
||||
{:app.db/pool
|
||||
{:uri (cf/get :database-uri)
|
||||
:username (cf/get :database-username)
|
||||
:password (cf/get :database-password)
|
||||
:read-only (cf/get :database-readonly false)
|
||||
:metrics (ig/ref ::mtx/metrics)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:migrations (ig/ref :app.migrations/all)
|
||||
:name :main
|
||||
:min-size (cf/get :database-min-pool-size 0)
|
||||
:max-size (cf/get :database-max-pool-size 60)}
|
||||
|
||||
;; Default thread pool for IO operations
|
||||
::wrk/executor
|
||||
{::wrk/parallelism (cf/get :default-executor-parallelism 100)}
|
||||
[::default :app.worker/executor]
|
||||
{:parallelism (cf/get :default-executor-parallelism 70)}
|
||||
|
||||
::wrk/scheduled-executor
|
||||
{::wrk/parallelism (cf/get :scheduled-executor-parallelism 20)}
|
||||
;; Dedicated thread pool for background tasks execution.
|
||||
[::worker :app.worker/executor]
|
||||
{:parallelism (cf/get :worker-executor-parallelism 20)}
|
||||
|
||||
::wrk/monitor
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/name "default"
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
:app.worker/scheduler
|
||||
{:parallelism 1
|
||||
:prefix :scheduler}
|
||||
|
||||
:app.worker/executors
|
||||
{:default (ig/ref [::default :app.worker/executor])
|
||||
:worker (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
:app.worker/executor-monitor
|
||||
{:metrics (ig/ref :app.metrics/metrics)
|
||||
:executors (ig/ref :app.worker/executors)}
|
||||
|
||||
:app.migrations/migrations
|
||||
{}
|
||||
|
||||
::mtx/metrics
|
||||
{:default default-metrics}
|
||||
:app.metrics/metrics
|
||||
{}
|
||||
|
||||
:app.migrations/all
|
||||
{:main (ig/ref :app.migrations/migrations)}
|
||||
|
||||
::rds/redis
|
||||
{::rds/uri (cf/get :redis-uri)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
:app.redis/redis
|
||||
{:uri (cf/get :redis-uri)
|
||||
:metrics (ig/ref :app.metrics/metrics)}
|
||||
|
||||
:app.msgbus/msgbus
|
||||
{:backend (cf/get :msgbus-backend :redis)
|
||||
:executor (ig/ref ::wrk/executor)
|
||||
:redis (ig/ref ::rds/redis)}
|
||||
:executor (ig/ref [::default :app.worker/executor])
|
||||
:redis (ig/ref :app.redis/redis)}
|
||||
|
||||
:app.storage.tmp/cleaner
|
||||
{::wrk/executor (ig/ref ::wrk/executor)
|
||||
::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)}
|
||||
{:executor (ig/ref [::worker :app.worker/executor])
|
||||
:scheduler (ig/ref :app.worker/scheduler)}
|
||||
|
||||
::sto/gc-deleted-task
|
||||
{:pool (ig/ref ::db/pool)
|
||||
:storage (ig/ref ::sto/storage)
|
||||
:executor (ig/ref ::wrk/executor)}
|
||||
:app.storage/gc-deleted-task
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
::sto/gc-touched-task
|
||||
{:pool (ig/ref ::db/pool)}
|
||||
:app.storage/gc-touched-task
|
||||
{:pool (ig/ref :app.db/pool)}
|
||||
|
||||
::http.client/client
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
:app.http/client
|
||||
{:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.http.session/manager
|
||||
{:pool (ig/ref ::db/pool)
|
||||
:app.http/session
|
||||
{:store (ig/ref :app.http.session/store)}
|
||||
|
||||
:app.http.session/store
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:sprops (ig/ref :app.setup/props)
|
||||
:executor (ig/ref ::wrk/executor)}
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.http.session/gc-task
|
||||
{:pool (ig/ref ::db/pool)
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:max-age (cf/get :auth-token-cookie-max-age)}
|
||||
|
||||
:app.http.awsns/handler
|
||||
{::props (ig/ref :app.setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
{:sprops (ig/ref :app.setup/props)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:http-client (ig/ref :app.http/client)
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
:app.http/server
|
||||
{:port (cf/get :http-server-port)
|
||||
:host (cf/get :http-server-host)
|
||||
:router (ig/ref :app.http/router)
|
||||
:metrics (ig/ref ::mtx/metrics)
|
||||
:executor (ig/ref ::wrk/executor)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:executor (ig/ref [::default :app.worker/executor])
|
||||
:io-threads (cf/get :http-server-io-threads)
|
||||
:max-body-size (cf/get :http-server-max-body-size)
|
||||
:max-multipart-body-size (cf/get :http-server-max-multipart-body-size)}
|
||||
@@ -245,98 +121,117 @@
|
||||
:bind-password (cf/get :ldap-bind-password)
|
||||
:enabled? (contains? cf/flags :login-with-ldap)}
|
||||
|
||||
::oidc.providers/google
|
||||
{}
|
||||
:app.auth.oidc/google-provider
|
||||
{:enabled? (contains? cf/flags :login-with-google)
|
||||
:client-id (cf/get :google-client-id)
|
||||
:client-secret (cf/get :google-client-secret)}
|
||||
|
||||
::oidc.providers/github
|
||||
{::http.client/client (ig/ref ::http.client/client)}
|
||||
:app.auth.oidc/github-provider
|
||||
{:enabled? (contains? cf/flags :login-with-github)
|
||||
:http-client (ig/ref :app.http/client)
|
||||
:client-id (cf/get :github-client-id)
|
||||
:client-secret (cf/get :github-client-secret)}
|
||||
|
||||
::oidc.providers/gitlab
|
||||
{}
|
||||
:app.auth.oidc/gitlab-provider
|
||||
{:enabled? (contains? cf/flags :login-with-gitlab)
|
||||
:base-uri (cf/get :gitlab-base-uri "https://gitlab.com")
|
||||
:client-id (cf/get :gitlab-client-id)
|
||||
:client-secret (cf/get :gitlab-client-secret)}
|
||||
|
||||
::oidc.providers/generic
|
||||
{::http.client/client (ig/ref ::http.client/client)}
|
||||
:app.auth.oidc/generic-provider
|
||||
{:enabled? (contains? cf/flags :login-with-oidc)
|
||||
:http-client (ig/ref :app.http/client)
|
||||
|
||||
::oidc/routes
|
||||
{::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::props (ig/ref :app.setup/props)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::oidc/providers {:google (ig/ref ::oidc.providers/google)
|
||||
:github (ig/ref ::oidc.providers/github)
|
||||
:gitlab (ig/ref ::oidc.providers/gitlab)
|
||||
:oidc (ig/ref ::oidc.providers/generic)}
|
||||
::audit/collector (ig/ref ::audit/collector)
|
||||
::http.session/session (ig/ref :app.http.session/manager)}
|
||||
:client-id (cf/get :oidc-client-id)
|
||||
:client-secret (cf/get :oidc-client-secret)
|
||||
|
||||
:base-uri (cf/get :oidc-base-uri)
|
||||
|
||||
:token-uri (cf/get :oidc-token-uri)
|
||||
:auth-uri (cf/get :oidc-auth-uri)
|
||||
:user-uri (cf/get :oidc-user-uri)
|
||||
|
||||
:scopes (cf/get :oidc-scopes)
|
||||
:roles-attr (cf/get :oidc-roles-attr)
|
||||
:roles (cf/get :oidc-roles)}
|
||||
|
||||
:app.auth.oidc/routes
|
||||
{:providers {:google (ig/ref :app.auth.oidc/google-provider)
|
||||
:github (ig/ref :app.auth.oidc/github-provider)
|
||||
:gitlab (ig/ref :app.auth.oidc/gitlab-provider)
|
||||
:oidc (ig/ref :app.auth.oidc/generic-provider)}
|
||||
:sprops (ig/ref :app.setup/props)
|
||||
:http-client (ig/ref :app.http/client)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:session (ig/ref :app.http/session)
|
||||
:public-uri (cf/get :public-uri)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
;; TODO: revisit the dependencies of this service, looks they are too much unused of them
|
||||
:app.http/router
|
||||
{:assets (ig/ref :app.http.assets/handlers)
|
||||
:feedback (ig/ref :app.http.feedback/handler)
|
||||
:session (ig/ref :app.http.session/manager)
|
||||
:session (ig/ref :app.http/session)
|
||||
:awsns-handler (ig/ref :app.http.awsns/handler)
|
||||
:debug-routes (ig/ref :app.http.debug/routes)
|
||||
:oidc-routes (ig/ref ::oidc/routes)
|
||||
:oidc-routes (ig/ref :app.auth.oidc/routes)
|
||||
:ws (ig/ref :app.http.websocket/handler)
|
||||
:metrics (ig/ref ::mtx/metrics)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:public-uri (cf/get :public-uri)
|
||||
:storage (ig/ref ::sto/storage)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:audit-handler (ig/ref :app.loggers.audit/http-handler)
|
||||
:rpc-routes (ig/ref :app.rpc/routes)
|
||||
:doc-routes (ig/ref :app.rpc.doc/routes)
|
||||
:executor (ig/ref ::wrk/executor)}
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.http.debug/routes
|
||||
{:pool (ig/ref ::db/pool)
|
||||
:executor (ig/ref ::wrk/executor)
|
||||
:storage (ig/ref ::sto/storage)
|
||||
:session (ig/ref :app.http.session/manager)}
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::worker :app.worker/executor])
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:session (ig/ref :app.http/session)}
|
||||
|
||||
:app.http.websocket/handler
|
||||
{:pool (ig/ref ::db/pool)
|
||||
:metrics (ig/ref ::mtx/metrics)
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:msgbus (ig/ref :app.msgbus/msgbus)}
|
||||
|
||||
:app.http.assets/handlers
|
||||
{:metrics (ig/ref ::mtx/metrics)
|
||||
{:metrics (ig/ref :app.metrics/metrics)
|
||||
:assets-path (cf/get :assets-path)
|
||||
:storage (ig/ref ::sto/storage)
|
||||
:executor (ig/ref ::wrk/executor)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:executor (ig/ref [::default :app.worker/executor])
|
||||
:cache-max-age (dt/duration {:hours 24})
|
||||
:signature-max-age (dt/duration {:hours 24 :minutes 5})}
|
||||
|
||||
:app.http.feedback/handler
|
||||
{:pool (ig/ref ::db/pool)
|
||||
:executor (ig/ref ::wrk/executor)}
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.rpc/climit
|
||||
{:metrics (ig/ref ::mtx/metrics)
|
||||
:executor (ig/ref ::wrk/executor)}
|
||||
:app.rpc/semaphores
|
||||
{:metrics (ig/ref :app.metrics/metrics)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.rpc/rlimit
|
||||
{:executor (ig/ref ::wrk/executor)
|
||||
:scheduled-executor (ig/ref ::wrk/scheduled-executor)}
|
||||
{:executor (ig/ref [::worker :app.worker/executor])
|
||||
:scheduler (ig/ref :app.worker/scheduler)}
|
||||
|
||||
:app.rpc/methods
|
||||
{::audit/collector (ig/ref ::audit/collector)
|
||||
::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
|
||||
:pool (ig/ref ::db/pool)
|
||||
:session (ig/ref :app.http.session/manager)
|
||||
:sprops (ig/ref :app.setup/props)
|
||||
:metrics (ig/ref ::mtx/metrics)
|
||||
:storage (ig/ref ::sto/storage)
|
||||
:msgbus (ig/ref :app.msgbus/msgbus)
|
||||
:public-uri (cf/get :public-uri)
|
||||
:redis (ig/ref ::rds/redis)
|
||||
:ldap (ig/ref :app.auth.ldap/provider)
|
||||
:http-client (ig/ref ::http.client/client)
|
||||
:climit (ig/ref :app.rpc/climit)
|
||||
:rlimit (ig/ref :app.rpc/rlimit)
|
||||
:executor (ig/ref ::wrk/executor)
|
||||
:templates (ig/ref :app.setup/builtin-templates)
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:session (ig/ref :app.http/session)
|
||||
:sprops (ig/ref :app.setup/props)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:msgbus (ig/ref :app.msgbus/msgbus)
|
||||
:public-uri (cf/get :public-uri)
|
||||
:redis (ig/ref :app.redis/redis)
|
||||
:audit (ig/ref :app.loggers.audit/collector)
|
||||
:ldap (ig/ref :app.auth.ldap/provider)
|
||||
:http-client (ig/ref :app.http/client)
|
||||
:rlimit (ig/ref :app.rpc/rlimit)
|
||||
:executors (ig/ref :app.worker/executors)
|
||||
:executor (ig/ref [::default :app.worker/executor])
|
||||
:templates (ig/ref :app.setup/builtin-templates)
|
||||
:semaphores (ig/ref :app.rpc/semaphores)
|
||||
}
|
||||
|
||||
:app.rpc.doc/routes
|
||||
@@ -345,25 +240,20 @@
|
||||
:app.rpc/routes
|
||||
{:methods (ig/ref :app.rpc/methods)}
|
||||
|
||||
::wrk/registry
|
||||
{:metrics (ig/ref ::mtx/metrics)
|
||||
:app.worker/registry
|
||||
{:metrics (ig/ref :app.metrics/metrics)
|
||||
:tasks
|
||||
{:sendmail (ig/ref :app.emails/handler)
|
||||
:objects-gc (ig/ref :app.tasks.objects-gc/handler)
|
||||
:file-gc (ig/ref :app.tasks.file-gc/handler)
|
||||
:file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler)
|
||||
:storage-gc-deleted (ig/ref ::sto/gc-deleted-task)
|
||||
:storage-gc-touched (ig/ref ::sto/gc-touched-task)
|
||||
:storage-gc-deleted (ig/ref :app.storage/gc-deleted-task)
|
||||
:storage-gc-touched (ig/ref :app.storage/gc-touched-task)
|
||||
:tasks-gc (ig/ref :app.tasks.tasks-gc/handler)
|
||||
:telemetry (ig/ref :app.tasks.telemetry/handler)
|
||||
:session-gc (ig/ref :app.http.session/gc-task)
|
||||
:audit-log-archive (ig/ref ::audit.tasks/archive)
|
||||
:audit-log-gc (ig/ref ::audit.tasks/gc)
|
||||
|
||||
:process-webhook-event
|
||||
(ig/ref ::webhooks/process-event-handler)
|
||||
:run-webhook
|
||||
(ig/ref ::webhooks/run-webhook-handler)}}
|
||||
:audit-log-archive (ig/ref :app.loggers.audit/archive-task)
|
||||
:audit-log-gc (ig/ref :app.loggers.audit/gc-task)}}
|
||||
|
||||
|
||||
:app.emails/sendmail
|
||||
@@ -378,78 +268,78 @@
|
||||
|
||||
:app.emails/handler
|
||||
{:sendmail (ig/ref :app.emails/sendmail)
|
||||
:metrics (ig/ref ::mtx/metrics)}
|
||||
:metrics (ig/ref :app.metrics/metrics)}
|
||||
|
||||
:app.tasks.tasks-gc/handler
|
||||
{:pool (ig/ref ::db/pool)
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:max-age cf/deletion-delay}
|
||||
|
||||
:app.tasks.objects-gc/handler
|
||||
{:pool (ig/ref ::db/pool)
|
||||
:storage (ig/ref ::sto/storage)}
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:storage (ig/ref :app.storage/storage)}
|
||||
|
||||
:app.tasks.file-gc/handler
|
||||
{:pool (ig/ref ::db/pool)}
|
||||
{:pool (ig/ref :app.db/pool)}
|
||||
|
||||
:app.tasks.file-xlog-gc/handler
|
||||
{:pool (ig/ref ::db/pool)}
|
||||
{:pool (ig/ref :app.db/pool)}
|
||||
|
||||
:app.tasks.telemetry/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)
|
||||
::props (ig/ref :app.setup/props)}
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:version (:full cf/version)
|
||||
:uri (cf/get :telemetry-uri)
|
||||
:sprops (ig/ref :app.setup/props)
|
||||
:http-client (ig/ref :app.http/client)}
|
||||
|
||||
:app.srepl/server
|
||||
{:port (cf/get :srepl-port)
|
||||
:host (cf/get :srepl-host)}
|
||||
|
||||
:app.setup/builtin-templates
|
||||
{::http.client/client (ig/ref ::http.client/client)}
|
||||
{:http-client (ig/ref :app.http/client)}
|
||||
|
||||
:app.setup/props
|
||||
{:pool (ig/ref ::db/pool)
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:key (cf/get :secret-key)}
|
||||
|
||||
::lzmq/receiver
|
||||
{}
|
||||
:app.loggers.zmq/receiver
|
||||
{:endpoint (cf/get :loggers-zmq-uri)}
|
||||
|
||||
::audit/collector
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
:app.loggers.audit/http-handler
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
::audit.tasks/archive
|
||||
{::props (ig/ref :app.setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
:app.loggers.audit/collector
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
::audit.tasks/gc
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
:app.loggers.audit/archive-task
|
||||
{:uri (cf/get :audit-log-archive-uri)
|
||||
:sprops (ig/ref :app.setup/props)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:http-client (ig/ref :app.http/client)}
|
||||
|
||||
::webhooks/process-event-handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
|
||||
::webhooks/run-webhook-handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
:app.loggers.audit/gc-task
|
||||
{:pool (ig/ref :app.db/pool)}
|
||||
|
||||
:app.loggers.loki/reporter
|
||||
{::lzmq/receiver (ig/ref ::lzmq/receiver)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
{:uri (cf/get :loggers-loki-uri)
|
||||
:receiver (ig/ref :app.loggers.zmq/receiver)
|
||||
:http-client (ig/ref :app.http/client)}
|
||||
|
||||
:app.loggers.mattermost/reporter
|
||||
{::lzmq/receiver (ig/ref ::lzmq/receiver)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
{:uri (cf/get :error-report-webhook)
|
||||
:receiver (ig/ref :app.loggers.zmq/receiver)
|
||||
:http-client (ig/ref :app.http/client)}
|
||||
|
||||
:app.loggers.database/reporter
|
||||
{:receiver (ig/ref :app.loggers.zmq/receiver)
|
||||
:pool (ig/ref ::db/pool)
|
||||
:executor (ig/ref ::wrk/executor)}
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
::sto/storage
|
||||
{:pool (ig/ref ::db/pool)
|
||||
:executor (ig/ref ::wrk/executor)
|
||||
:app.storage/storage
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::default :app.worker/executor])
|
||||
|
||||
:backends
|
||||
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
@@ -463,7 +353,7 @@
|
||||
{:region (cf/get :storage-assets-s3-region)
|
||||
:endpoint (cf/get :storage-assets-s3-endpoint)
|
||||
:bucket (cf/get :storage-assets-s3-bucket)
|
||||
:executor (ig/ref ::wrk/executor)}
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
[::assets :app.storage.fs/backend]
|
||||
{:directory (cf/get :storage-assets-fs-directory)}
|
||||
@@ -471,11 +361,12 @@
|
||||
|
||||
|
||||
(def worker-config
|
||||
{::wrk/cron
|
||||
{::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/entries
|
||||
{:app.worker/cron
|
||||
{:executor (ig/ref [::worker :app.worker/executor])
|
||||
:scheduler (ig/ref :app.worker/scheduler)
|
||||
:tasks (ig/ref :app.worker/registry)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:entries
|
||||
[{:cron #app/cron "0 0 * * * ?" ;; hourly
|
||||
:task :file-xlog-gc}
|
||||
|
||||
@@ -508,27 +399,11 @@
|
||||
{:cron #app/cron "30 */5 * * * ?" ;; every 5m
|
||||
:task :audit-log-gc})]}
|
||||
|
||||
::wrk/dispatcher
|
||||
{::rds/redis (ig/ref ::rds/redis)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
[::default ::wrk/worker]
|
||||
{::wrk/parallelism (cf/get ::worker-default-parallelism 1)
|
||||
::wrk/queue :default
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
[::webhook ::wrk/worker]
|
||||
{::wrk/parallelism (cf/get ::worker-webhook-parallelism 1)
|
||||
::wrk/queue :webhooks
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}})
|
||||
|
||||
:app.worker/worker
|
||||
{:executor (ig/ref [::worker :app.worker/executor])
|
||||
:tasks (ig/ref :app.worker/registry)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:pool (ig/ref :app.db/pool)}})
|
||||
|
||||
(def system nil)
|
||||
|
||||
@@ -542,7 +417,7 @@
|
||||
(merge worker-config))
|
||||
(ig/prep)
|
||||
(ig/init))))
|
||||
(l/info :hint "welcome to penpot"
|
||||
(l/info :msg "welcome to penpot"
|
||||
:flags (str/join "," (map name cf/flags))
|
||||
:worker? (contains? cf/flags :backend-worker)
|
||||
:version (:full cf/version)))
|
||||
@@ -555,9 +430,4 @@
|
||||
|
||||
(defn -main
|
||||
[& _args]
|
||||
(try
|
||||
(start)
|
||||
(catch Throwable cause
|
||||
(l/error :hint (ex-message cause)
|
||||
:cause cause)
|
||||
(System/exit -1))))
|
||||
(start))
|
||||
|
||||
@@ -38,6 +38,110 @@
|
||||
;; METRICS SERVICE PROVIDER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def default-metrics
|
||||
{:update-file-changes
|
||||
{::mdef/name "penpot_rpc_update_file_changes_total"
|
||||
::mdef/help "A total number of changes submitted to update-file."
|
||||
::mdef/type :counter}
|
||||
|
||||
:update-file-bytes-processed
|
||||
{::mdef/name "penpot_rpc_update_file_bytes_processed_total"
|
||||
::mdef/help "A total number of bytes processed by update-file."
|
||||
::mdef/type :counter}
|
||||
|
||||
:rpc-mutation-timing
|
||||
{::mdef/name "penpot_rpc_mutation_timing"
|
||||
::mdef/help "RPC mutation method call timing."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :histogram}
|
||||
|
||||
:rpc-command-timing
|
||||
{::mdef/name "penpot_rpc_command_timing"
|
||||
::mdef/help "RPC command method call timing."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :histogram}
|
||||
|
||||
:rpc-query-timing
|
||||
{::mdef/name "penpot_rpc_query_timing"
|
||||
::mdef/help "RPC query method call timing."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :histogram}
|
||||
|
||||
:websocket-active-connections
|
||||
{::mdef/name "penpot_websocket_active_connections"
|
||||
::mdef/help "Active websocket connections gauge"
|
||||
::mdef/type :gauge}
|
||||
|
||||
:websocket-messages-total
|
||||
{::mdef/name "penpot_websocket_message_total"
|
||||
::mdef/help "Counter of processed messages."
|
||||
::mdef/labels ["op"]
|
||||
::mdef/type :counter}
|
||||
|
||||
:websocket-session-timing
|
||||
{::mdef/name "penpot_websocket_session_timing"
|
||||
::mdef/help "Websocket session timing (seconds)."
|
||||
::mdef/type :summary}
|
||||
|
||||
:session-update-total
|
||||
{::mdef/name "penpot_http_session_update_total"
|
||||
::mdef/help "A counter of session update batch events."
|
||||
::mdef/type :counter}
|
||||
|
||||
:tasks-timing
|
||||
{::mdef/name "penpot_tasks_timing"
|
||||
::mdef/help "Background tasks timing (milliseconds)."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :summary}
|
||||
|
||||
:redis-eval-timing
|
||||
{::mdef/name "penpot_redis_eval_timing"
|
||||
::mdef/help "Redis EVAL commands execution timings (ms)"
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :summary}
|
||||
|
||||
:semaphore-queued-submissions
|
||||
{::mdef/name "penpot_semaphore_queued_submissions"
|
||||
::mdef/help "Current number of queued submissions on SEMAPHORE."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:semaphore-used-permits
|
||||
{::mdef/name "penpot_semaphore_used_permits"
|
||||
::mdef/help "Current number of used permits on SEMAPHORE."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:semaphore-timing
|
||||
{::mdef/name "penpot_semaphore_timing"
|
||||
::mdef/help "Total timing of SEMAPHORE."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :summary}
|
||||
|
||||
:executors-active-threads
|
||||
{::mdef/name "penpot_executors_active_threads"
|
||||
::mdef/help "Current number of threads available in the executor service."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:executors-completed-tasks
|
||||
{::mdef/name "penpot_executors_completed_tasks_total"
|
||||
::mdef/help "Approximate number of completed tasks by the executor."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :counter}
|
||||
|
||||
:executors-running-threads
|
||||
{::mdef/name "penpot_executors_running_threads"
|
||||
::mdef/help "Current number of threads with state RUNNING."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:executors-queued-submissions
|
||||
{::mdef/name "penpot_executors_queued_submissions"
|
||||
::mdef/help "Current number of queued submissions."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}})
|
||||
|
||||
(s/def ::mdef/name string?)
|
||||
(s/def ::mdef/help string?)
|
||||
(s/def ::mdef/labels (s/every string? :kind vector?))
|
||||
@@ -65,13 +169,8 @@
|
||||
::handler
|
||||
::definitions]))
|
||||
|
||||
(s/def ::default ::definitions)
|
||||
|
||||
(defmethod ig/pre-init-spec ::metrics [_]
|
||||
(s/keys :req-un [::default]))
|
||||
|
||||
(defmethod ig/init-key ::metrics
|
||||
[_ cfg]
|
||||
[_ _]
|
||||
(l/info :action "initialize metrics")
|
||||
(let [registry (create-registry)
|
||||
definitions (reduce-kv (fn [res k v]
|
||||
@@ -79,7 +178,7 @@
|
||||
(create-collector)
|
||||
(assoc res k)))
|
||||
{}
|
||||
(:default cfg))]
|
||||
default-metrics)]
|
||||
|
||||
(us/verify! ::definitions definitions)
|
||||
|
||||
|
||||
@@ -247,30 +247,6 @@
|
||||
|
||||
{:name "0079-mod-profile-table"
|
||||
:fn (mg/resource "app/migrations/sql/0079-mod-profile-table.sql")}
|
||||
|
||||
{:name "0080-mod-index-names"
|
||||
:fn (mg/resource "app/migrations/sql/0080-mod-index-names.sql")}
|
||||
|
||||
{:name "0081-add-deleted-at-index-to-file-table"
|
||||
:fn (mg/resource "app/migrations/sql/0081-add-deleted-at-index-to-file-table.sql")}
|
||||
|
||||
{:name "0082-add-features-column-to-file-table"
|
||||
:fn (mg/resource "app/migrations/sql/0082-add-features-column-to-file-table.sql")}
|
||||
|
||||
{:name "0083-add-file-data-fragment-table"
|
||||
:fn (mg/resource "app/migrations/sql/0083-add-file-data-fragment-table.sql")}
|
||||
|
||||
{:name "0084-add-features-column-to-file-change-table"
|
||||
:fn (mg/resource "app/migrations/sql/0084-add-features-column-to-file-change-table.sql")}
|
||||
|
||||
{:name "0085-add-webhook-table"
|
||||
:fn (mg/resource "app/migrations/sql/0085-add-webhook-table.sql")}
|
||||
|
||||
{:name "0086-add-webhook-delivery-table"
|
||||
:fn (mg/resource "app/migrations/sql/0086-add-webhook-delivery-table.sql")}
|
||||
|
||||
{:name "0087-mod-task-table"
|
||||
:fn (mg/resource "app/migrations/sql/0087-mod-task-table.sql")}
|
||||
])
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
ALTER INDEX team_font_variant_deleted_at_idx
|
||||
RENAME TO team_font_variant__deleted_at__idx;
|
||||
|
||||
ALTER INDEX team_deleted_at_idx
|
||||
RENAME TO team__deleted_at__idx;
|
||||
|
||||
ALTER INDEX profile_deleted_at_idx
|
||||
RENAME TO profile__deleted_at__idx;
|
||||
|
||||
ALTER INDEX project_deleted_at_idx
|
||||
RENAME TO project__deleted_at__idx;
|
||||
@@ -1,3 +0,0 @@
|
||||
CREATE INDEX file__deleted_at__idx
|
||||
ON file (deleted_at, id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
@@ -1,2 +0,0 @@
|
||||
ALTER TABLE file
|
||||
ADD COLUMN features text[] DEFAULT NULL;
|
||||
@@ -1,15 +0,0 @@
|
||||
CREATE TABLE file_data_fragment (
|
||||
id uuid NOT NULL,
|
||||
file_id uuid NOT NULL REFERENCES file(id) ON DELETE CASCADE DEFERRABLE,
|
||||
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
|
||||
metadata jsonb NULL,
|
||||
content bytea NOT NULL,
|
||||
|
||||
PRIMARY KEY (file_id, id)
|
||||
);
|
||||
|
||||
ALTER TABLE file_data_fragment
|
||||
ALTER COLUMN metadata SET STORAGE external,
|
||||
ALTER COLUMN content SET STORAGE external;
|
||||
@@ -1,8 +0,0 @@
|
||||
ALTER TABLE file_change
|
||||
ADD COLUMN features text[] DEFAULT NULL;
|
||||
|
||||
ALTER TABLE file_change
|
||||
ALTER COLUMN features SET STORAGE external;
|
||||
|
||||
ALTER TABLE file
|
||||
ALTER COLUMN features SET STORAGE external;
|
||||
@@ -1,25 +0,0 @@
|
||||
CREATE TABLE webhook (
|
||||
id uuid PRIMARY KEY,
|
||||
team_id uuid NOT NULL REFERENCES team(id) ON DELETE CASCADE DEFERRABLE,
|
||||
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT now(),
|
||||
|
||||
uri text NOT NULL,
|
||||
mtype text NOT NULL,
|
||||
|
||||
error_code text NULL,
|
||||
error_count smallint DEFAULT 0,
|
||||
|
||||
is_active boolean DEFAULT true,
|
||||
secret_key text NULL
|
||||
);
|
||||
|
||||
ALTER TABLE webhook
|
||||
ALTER COLUMN uri SET STORAGE external,
|
||||
ALTER COLUMN mtype SET STORAGE external,
|
||||
ALTER COLUMN error_code SET STORAGE external,
|
||||
ALTER COLUMN secret_key SET STORAGE external;
|
||||
|
||||
|
||||
CREATE INDEX webhook__team_id__idx ON webhook (team_id);
|
||||
@@ -1,16 +0,0 @@
|
||||
CREATE TABLE webhook_delivery (
|
||||
webhook_id uuid NOT NULL REFERENCES webhook(id) ON DELETE CASCADE DEFERRABLE,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
|
||||
error_code text NULL,
|
||||
|
||||
req_data jsonb NULL,
|
||||
rsp_data jsonb NULL,
|
||||
|
||||
PRIMARY KEY (webhook_id, created_at)
|
||||
);
|
||||
|
||||
ALTER TABLE webhook_delivery
|
||||
ALTER COLUMN error_code SET STORAGE external,
|
||||
ALTER COLUMN req_data SET STORAGE external,
|
||||
ALTER COLUMN rsp_data SET STORAGE external;
|
||||
@@ -1,9 +0,0 @@
|
||||
ALTER TABLE task
|
||||
ADD COLUMN label text NULL;
|
||||
|
||||
ALTER TABLE task
|
||||
ALTER COLUMN label SET STORAGE external;
|
||||
|
||||
CREATE INDEX task__label__idx
|
||||
ON task (label, name, queue)
|
||||
WHERE status = 'new';
|
||||
@@ -20,8 +20,7 @@
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[promesa.core :as p]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
@@ -53,8 +52,8 @@
|
||||
(s/def ::rcv-ch ::aa/channel)
|
||||
(s/def ::pub-ch ::aa/channel)
|
||||
(s/def ::state ::us/agent)
|
||||
(s/def ::pconn ::redis/connection-holder)
|
||||
(s/def ::sconn ::redis/connection-holder)
|
||||
(s/def ::pconn ::redis/connection)
|
||||
(s/def ::sconn ::redis/connection)
|
||||
(s/def ::msgbus
|
||||
(s/keys :req [::cmd-ch ::rcv-ch ::pub-ch ::state ::pconn ::sconn ::wrk/executor]))
|
||||
|
||||
@@ -123,8 +122,8 @@
|
||||
|
||||
(defn- redis-disconnect
|
||||
[{:keys [::pconn ::sconn] :as cfg}]
|
||||
(d/close! pconn)
|
||||
(d/close! sconn))
|
||||
(redis/close! pconn)
|
||||
(redis/close! sconn))
|
||||
|
||||
(defn- conj-subscription
|
||||
"A low level function that is responsible to create on-demand
|
||||
@@ -206,33 +205,31 @@
|
||||
(when-let [closed (a/<! (send-to-topic topic message))]
|
||||
(send-via executor state unsubscribe-channels cfg closed nil))))
|
||||
]
|
||||
(px/thread
|
||||
{:name "penpot/msgbus-io-loop"}
|
||||
(loop []
|
||||
(let [[val port] (a/alts!! [pub-ch rcv-ch])]
|
||||
(cond
|
||||
(nil? val)
|
||||
(do
|
||||
(l/trace :hint "stopping io-loop, nil received")
|
||||
(send-via executor state (fn [state]
|
||||
(->> (vals state)
|
||||
(mapcat identity)
|
||||
(filter some?)
|
||||
(run! a/close!))
|
||||
nil)))
|
||||
|
||||
(= port rcv-ch)
|
||||
(do
|
||||
(a/<!! (process-incoming val))
|
||||
(recur))
|
||||
(a/go-loop []
|
||||
(let [[val port] (a/alts! [pub-ch rcv-ch])]
|
||||
(cond
|
||||
(nil? val)
|
||||
(do
|
||||
(l/trace :hint "stopping io-loop, nil received")
|
||||
(send-via executor state (fn [state]
|
||||
(->> (vals state)
|
||||
(mapcat identity)
|
||||
(filter some?)
|
||||
(run! a/close!))
|
||||
nil)))
|
||||
|
||||
(= port pub-ch)
|
||||
(let [result (a/<!! (redis-pub cfg val))]
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "unexpected error on publishing"
|
||||
:message val
|
||||
:cause result))
|
||||
(recur))))))))
|
||||
(= port rcv-ch)
|
||||
(do
|
||||
(a/<! (process-incoming val))
|
||||
(recur))
|
||||
|
||||
(= port pub-ch)
|
||||
(let [result (a/<! (redis-pub cfg val))]
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "unexpected error on publishing" :message val
|
||||
:cause result))
|
||||
(recur)))))))
|
||||
|
||||
(defn- redis-pub
|
||||
"Publish a message to the redis server. Asynchronous operation,
|
||||
|
||||
@@ -21,19 +21,13 @@
|
||||
[promesa.core :as p])
|
||||
(:import
|
||||
clojure.lang.IDeref
|
||||
clojure.lang.MapEntry
|
||||
io.lettuce.core.KeyValue
|
||||
io.lettuce.core.RedisClient
|
||||
io.lettuce.core.RedisCommandInterruptedException
|
||||
io.lettuce.core.RedisCommandTimeoutException
|
||||
io.lettuce.core.RedisException
|
||||
io.lettuce.core.RedisURI
|
||||
io.lettuce.core.ScriptOutputType
|
||||
io.lettuce.core.api.StatefulConnection
|
||||
io.lettuce.core.api.StatefulRedisConnection
|
||||
io.lettuce.core.api.async.RedisAsyncCommands
|
||||
io.lettuce.core.api.async.RedisScriptingAsyncCommands
|
||||
io.lettuce.core.api.sync.RedisCommands
|
||||
io.lettuce.core.codec.ByteArrayCodec
|
||||
io.lettuce.core.codec.RedisCodec
|
||||
io.lettuce.core.codec.StringCodec
|
||||
@@ -51,12 +45,13 @@
|
||||
|
||||
(declare initialize-resources)
|
||||
(declare shutdown-resources)
|
||||
(declare connect*)
|
||||
(declare connect)
|
||||
(declare close!)
|
||||
|
||||
(s/def ::timer
|
||||
#(instance? Timer %))
|
||||
|
||||
(s/def ::default-connection
|
||||
(s/def ::connection
|
||||
#(or (instance? StatefulRedisConnection %)
|
||||
(and (instance? IDeref %)
|
||||
(instance? StatefulRedisConnection (deref %)))))
|
||||
@@ -66,13 +61,6 @@
|
||||
(and (instance? IDeref %)
|
||||
(instance? StatefulRedisPubSubConnection (deref %)))))
|
||||
|
||||
(s/def ::connection
|
||||
(s/or :default ::default-connection
|
||||
:pubsub ::pubsub-connection))
|
||||
|
||||
(s/def ::connection-holder
|
||||
(s/keys :req [::connection]))
|
||||
|
||||
(s/def ::redis-uri
|
||||
#(instance? RedisURI %))
|
||||
|
||||
@@ -87,37 +75,32 @@
|
||||
(s/def ::connect? ::us/boolean)
|
||||
(s/def ::io-threads ::us/integer)
|
||||
(s/def ::worker-threads ::us/integer)
|
||||
(s/def ::cache #(instance? clojure.lang.Atom %))
|
||||
|
||||
(s/def ::redis
|
||||
(s/keys :req [::resources
|
||||
::redis-uri
|
||||
::timer
|
||||
::mtx/metrics]
|
||||
:opt [::connection
|
||||
::cache]))
|
||||
(s/keys :req [::resources ::redis-uri ::timer ::mtx/metrics]
|
||||
:opt [::connection]))
|
||||
|
||||
(defmethod ig/pre-init-spec ::redis [_]
|
||||
(s/keys :req-un [::uri ::mtx/metrics]
|
||||
:opt-un [::timeout
|
||||
::connect?
|
||||
::io-threads
|
||||
::worker-threads]))
|
||||
|
||||
(defmethod ig/prep-key ::redis
|
||||
[_ cfg]
|
||||
(let [runtime (Runtime/getRuntime)
|
||||
cpus (.availableProcessors ^Runtime runtime)]
|
||||
(merge {::timeout (dt/duration "10s")
|
||||
::io-threads (max 3 cpus)
|
||||
::worker-threads (max 3 cpus)}
|
||||
(d/without-nils cfg))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::redis [_]
|
||||
(s/keys :req [::uri ::mtx/metrics]
|
||||
:opt [::timeout
|
||||
::connect?
|
||||
::io-threads
|
||||
::worker-threads]))
|
||||
(merge {:timeout (dt/duration 5000)
|
||||
:io-threads (max 3 cpus)
|
||||
:worker-threads (max 3 cpus)}
|
||||
(d/without-nils cfg))))
|
||||
|
||||
(defmethod ig/init-key ::redis
|
||||
[_ {:keys [::connect?] :as cfg}]
|
||||
(let [state (initialize-resources cfg)]
|
||||
(cond-> state
|
||||
connect? (assoc ::connection (connect* cfg {})))))
|
||||
[_ {:keys [connect?] :as cfg}]
|
||||
(let [cfg (initialize-resources cfg)]
|
||||
(cond-> cfg
|
||||
connect? (assoc ::connection (connect cfg)))))
|
||||
|
||||
(defmethod ig/halt-key! ::redis
|
||||
[_ state]
|
||||
@@ -131,7 +114,7 @@
|
||||
|
||||
(defn- initialize-resources
|
||||
"Initialize redis connection resources"
|
||||
[{:keys [::uri ::io-threads ::worker-threads ::connect?] :as cfg}]
|
||||
[{:keys [uri io-threads worker-threads connect? metrics] :as cfg}]
|
||||
(l/info :hint "initialize redis resources"
|
||||
:uri uri
|
||||
:io-threads io-threads
|
||||
@@ -148,32 +131,34 @@
|
||||
redis-uri (RedisURI/create ^String uri)]
|
||||
|
||||
(-> cfg
|
||||
(assoc ::resources resources)
|
||||
(assoc ::timer timer)
|
||||
(assoc ::mtx/metrics metrics)
|
||||
(assoc ::cache (atom {}))
|
||||
(assoc ::redis-uri redis-uri))))
|
||||
(assoc ::timer timer)
|
||||
(assoc ::redis-uri redis-uri)
|
||||
(assoc ::resources resources))))
|
||||
|
||||
(defn- shutdown-resources
|
||||
[{:keys [::resources ::cache ::timer]}]
|
||||
(run! d/close! (vals @cache))
|
||||
(run! close! (vals @cache))
|
||||
(when resources
|
||||
(.shutdown ^ClientResources resources))
|
||||
(when timer
|
||||
(.stop ^Timer timer)))
|
||||
|
||||
(defn connect*
|
||||
[{:keys [::resources ::redis-uri] :as state}
|
||||
{:keys [timeout codec type]
|
||||
:or {codec default-codec type :default}}]
|
||||
(defn connect
|
||||
[{:keys [::resources ::redis-uri] :as cfg}
|
||||
& {:keys [timeout codec type] :or {codec default-codec type :default}}]
|
||||
|
||||
(us/assert! ::resources resources)
|
||||
|
||||
(let [client (RedisClient/create ^ClientResources resources ^RedisURI redis-uri)
|
||||
timeout (or timeout (::timeout state))
|
||||
timeout (or timeout (:timeout cfg))
|
||||
conn (case type
|
||||
:default (.connect ^RedisClient client ^RedisCodec codec)
|
||||
:pubsub (.connectPubSub ^RedisClient client ^RedisCodec codec))]
|
||||
|
||||
(.setTimeout ^StatefulConnection conn ^Duration timeout)
|
||||
|
||||
(reify
|
||||
IDeref
|
||||
(deref [_] conn)
|
||||
@@ -183,113 +168,53 @@
|
||||
(.close ^StatefulConnection conn)
|
||||
(.shutdown ^RedisClient client)))))
|
||||
|
||||
(defn connect
|
||||
[state & {:as opts}]
|
||||
(let [connection (connect* state opts)]
|
||||
(-> state
|
||||
(assoc ::connection connection)
|
||||
(dissoc ::cache)
|
||||
(vary-meta assoc `d/close! (fn [_] (d/close! connection))))))
|
||||
|
||||
(defn get-or-connect
|
||||
[{:keys [::cache] :as state} key options]
|
||||
(-> state
|
||||
(assoc ::connection
|
||||
(or (get @cache key)
|
||||
(-> (swap! cache (fn [cache]
|
||||
(when-let [prev (get cache key)]
|
||||
(d/close! prev))
|
||||
(assoc cache key (connect* state options))))
|
||||
(get key))))
|
||||
(dissoc ::cache)))
|
||||
(assoc state ::connection
|
||||
(or (get @cache key)
|
||||
(-> (swap! cache (fn [cache]
|
||||
(when-let [prev (get cache key)]
|
||||
(close! prev))
|
||||
(assoc cache key (connect state options))))
|
||||
(get key)))))
|
||||
|
||||
(defn add-listener!
|
||||
[{:keys [::connection] :as conn} listener]
|
||||
(us/assert! ::connection-holder conn)
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
[conn listener]
|
||||
(us/assert! ::pubsub-connection @conn)
|
||||
(us/assert! ::pubsub-listener listener)
|
||||
(.addListener ^StatefulRedisPubSubConnection @connection
|
||||
|
||||
(.addListener ^StatefulRedisPubSubConnection @conn
|
||||
^RedisPubSubListener listener)
|
||||
conn)
|
||||
|
||||
(defn publish!
|
||||
[{:keys [::connection] :as conn} topic message]
|
||||
[conn topic message]
|
||||
(us/assert! ::us/string topic)
|
||||
(us/assert! ::us/bytes message)
|
||||
(us/assert! ::connection-holder conn)
|
||||
(us/assert! ::default-connection connection)
|
||||
(us/assert! ::connection @conn)
|
||||
|
||||
(let [pcomm (.async ^StatefulRedisConnection @connection)]
|
||||
(let [pcomm (.async ^StatefulRedisConnection @conn)]
|
||||
(.publish ^RedisAsyncCommands pcomm ^String topic ^bytes message)))
|
||||
|
||||
(defn subscribe!
|
||||
"Blocking operation, intended to be used on a thread/agent thread."
|
||||
[{:keys [::connection] :as conn} & topics]
|
||||
(us/assert! ::connection-holder conn)
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection @connection)]
|
||||
(.subscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
"Blocking operation, intended to be used on a worker/agent thread."
|
||||
[conn & topics]
|
||||
(us/assert! ::pubsub-connection @conn)
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection @conn)]
|
||||
(.subscribe ^RedisPubSubCommands cmd topics)))
|
||||
|
||||
(defn unsubscribe!
|
||||
"Blocking operation, intended to be used on a thread/agent thread."
|
||||
[{:keys [::connection] :as conn} & topics]
|
||||
(us/assert! ::connection-holder conn)
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection @connection)]
|
||||
(.unsubscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(defn rpush!
|
||||
[{:keys [::connection] :as conn} key payload]
|
||||
(us/assert! ::connection-holder conn)
|
||||
(us/assert! (or (and (vector? payload)
|
||||
(every? bytes? payload))
|
||||
(bytes? payload)))
|
||||
(try
|
||||
(let [cmd (.sync ^StatefulRedisConnection @connection)
|
||||
data (if (vector? payload) payload [payload])
|
||||
vals (make-array (. Class (forName "[B")) (count data))]
|
||||
|
||||
(loop [i 0 xs (seq data)]
|
||||
(when xs
|
||||
(aset ^"[[B" vals i ^bytes (first xs))
|
||||
(recur (inc i) (next xs))))
|
||||
|
||||
(.rpush ^RedisCommands cmd
|
||||
^String key
|
||||
^"[[B" vals))
|
||||
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(defn blpop!
|
||||
[{:keys [::connection] :as conn} timeout & keys]
|
||||
(us/assert! ::connection-holder conn)
|
||||
(try
|
||||
(let [keys (into-array Object (map str keys))
|
||||
cmd (.sync ^StatefulRedisConnection @connection)
|
||||
timeout (/ (double (inst-ms timeout)) 1000.0)]
|
||||
(when-let [res (.blpop ^RedisCommands cmd
|
||||
^double timeout
|
||||
^"[Ljava.lang.String;" keys)]
|
||||
(MapEntry/create
|
||||
(.getKey ^KeyValue res)
|
||||
(.getValue ^KeyValue res))))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
"Blocking operation, intended to be used on a worker/agent thread."
|
||||
[conn & topics]
|
||||
(us/assert! ::pubsub-connection @conn)
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection @conn)]
|
||||
(.unsubscribe ^RedisPubSubCommands cmd topics)))
|
||||
|
||||
(defn open?
|
||||
[{:keys [::connection] :as conn}]
|
||||
(us/assert! ::connection-holder conn)
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
(.isOpen ^StatefulConnection @connection))
|
||||
[conn]
|
||||
(.isOpen ^StatefulConnection @conn))
|
||||
|
||||
(defn pubsub-listener
|
||||
[& {:keys [on-message on-subscribe on-unsubscribe]}]
|
||||
@@ -318,6 +243,10 @@
|
||||
(when on-unsubscribe
|
||||
(on-unsubscribe nil topic count)))))
|
||||
|
||||
(defn close!
|
||||
[o]
|
||||
(.close ^AutoCloseable o))
|
||||
|
||||
(def ^:private scripts-cache (atom {}))
|
||||
(def noop-fn (constantly nil))
|
||||
|
||||
@@ -333,12 +262,12 @@
|
||||
::rscript/vals]))
|
||||
|
||||
(defn eval!
|
||||
[{:keys [::mtx/metrics ::connection] :as state} script]
|
||||
(us/assert! ::redis state)
|
||||
(us/assert! ::connection-holder state)
|
||||
[{:keys [::mtx/metrics] :as state} script]
|
||||
(us/assert! ::rscript/script script)
|
||||
(us/assert! ::redis state)
|
||||
|
||||
(let [cmd (.async ^StatefulRedisConnection @connection)
|
||||
(let [rconn (-> state ::connection deref)
|
||||
cmd (.async ^StatefulRedisConnection rconn)
|
||||
keys (into-array String (map str (::rscript/keys script)))
|
||||
vals (into-array String (map str (::rscript/vals script)))
|
||||
sname (::rscript/name script)]
|
||||
@@ -347,52 +276,44 @@
|
||||
(if (instance? io.lettuce.core.RedisNoScriptException cause)
|
||||
(do
|
||||
(l/error :hint "no script found" :name sname :cause cause)
|
||||
(->> (load-script)
|
||||
(p/mapcat eval-script)))
|
||||
(-> (load-script)
|
||||
(p/then eval-script)))
|
||||
(if-let [on-error (::rscript/on-error script)]
|
||||
(on-error cause)
|
||||
(p/rejected cause))))
|
||||
|
||||
(eval-script [sha]
|
||||
(let [tpoint (dt/tpoint)]
|
||||
(->> (.evalsha ^RedisScriptingAsyncCommands cmd
|
||||
^String sha
|
||||
^ScriptOutputType ScriptOutputType/MULTI
|
||||
^"[Ljava.lang.String;" keys
|
||||
^"[Ljava.lang.String;" vals)
|
||||
(p/fmap (fn [result]
|
||||
(let [elapsed (tpoint)]
|
||||
(mtx/run! metrics {:id :redis-eval-timing
|
||||
:labels [(name sname)]
|
||||
:val (inst-ms elapsed)})
|
||||
(l/trace :hint "eval script"
|
||||
:name (name sname)
|
||||
:sha sha
|
||||
:params (str/join "," (::rscript/vals script))
|
||||
:elapsed (dt/format-duration elapsed))
|
||||
result)))
|
||||
(p/merr on-error))))
|
||||
(-> (.evalsha ^RedisScriptingAsyncCommands cmd
|
||||
^String sha
|
||||
^ScriptOutputType ScriptOutputType/MULTI
|
||||
^"[Ljava.lang.String;" keys
|
||||
^"[Ljava.lang.String;" vals)
|
||||
(p/then (fn [result]
|
||||
(let [elapsed (tpoint)]
|
||||
(mtx/run! metrics {:id :redis-eval-timing
|
||||
:labels [(name sname)]
|
||||
:val (inst-ms elapsed)})
|
||||
(l/trace :hint "eval script"
|
||||
:name (name sname)
|
||||
:sha sha
|
||||
:params (str/join "," (::rscript/vals script))
|
||||
:elapsed (dt/format-duration elapsed))
|
||||
result)))
|
||||
(p/catch on-error))))
|
||||
|
||||
(read-script []
|
||||
(-> script ::rscript/path io/resource slurp))
|
||||
|
||||
(load-script []
|
||||
(l/trace :hint "load script" :name sname)
|
||||
(->> (.scriptLoad ^RedisScriptingAsyncCommands cmd
|
||||
(-> (.scriptLoad ^RedisScriptingAsyncCommands cmd
|
||||
^String (read-script))
|
||||
(p/map (fn [sha]
|
||||
(p/then (fn [sha]
|
||||
(swap! scripts-cache assoc sname sha)
|
||||
sha))))]
|
||||
|
||||
(if-let [sha (get @scripts-cache sname)]
|
||||
(eval-script sha)
|
||||
(->> (load-script)
|
||||
(p/mapcat eval-script))))))
|
||||
|
||||
(defn timeout-exception?
|
||||
[cause]
|
||||
(instance? RedisCommandTimeoutException cause))
|
||||
|
||||
(defn exception?
|
||||
[cause]
|
||||
(instance? RedisException cause))
|
||||
(-> (load-script)
|
||||
(p/then eval-script))))))
|
||||
|
||||
@@ -6,33 +6,23 @@
|
||||
|
||||
(ns app.rpc
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http :as-alias http]
|
||||
[app.http.client :as-alias http.client]
|
||||
[app.http.session :as-alias http.session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.metrics :as mtx]
|
||||
[app.msgbus :as-alias mbus]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.cond :as cond]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.storage :as-alias sto]
|
||||
[app.rpc.semaphore :as-alias rsem]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as ts]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yrq]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
(defn- default-handler
|
||||
@@ -41,29 +31,23 @@
|
||||
|
||||
(defn- handle-response-transformation
|
||||
[response request mdata]
|
||||
(let [transform-fn (reduce (fn [res-fn transform-fn]
|
||||
(fn [request response]
|
||||
(p/then (res-fn request response) #(transform-fn request %))))
|
||||
(constantly response)
|
||||
(::response-transform-fns mdata))]
|
||||
(transform-fn request response)))
|
||||
(let [response (if (sv/wrapped? response) @response response)]
|
||||
(if-let [transform-fn (:transform-response mdata)]
|
||||
(p/do (transform-fn request response))
|
||||
(p/resolved response))))
|
||||
|
||||
(defn- handle-before-comple-hook
|
||||
[response mdata]
|
||||
(doseq [hook-fn (::before-complete-fns mdata)]
|
||||
(when-let [hook-fn (:before-complete mdata)]
|
||||
(ex/ignoring (hook-fn)))
|
||||
response)
|
||||
|
||||
(defn- handle-response
|
||||
[request result]
|
||||
(if (fn? result)
|
||||
(p/wrap (result request))
|
||||
(let [mdata (meta result)]
|
||||
(p/-> (yrs/response {:status (::http/status mdata 200)
|
||||
:headers (::http/headers mdata {})
|
||||
:body (rph/unwrap result)})
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata)))))
|
||||
(let [mdata (meta result)]
|
||||
(p/-> (yrs/response 200 result (::http/headers mdata {}))
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata))))
|
||||
|
||||
(defn- rpc-query-handler
|
||||
"Ring handler that dispatches query requests and convert between
|
||||
@@ -88,7 +72,7 @@
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [profile-id session-id params] :as request} respond raise]
|
||||
(let [type (keyword (:type params))
|
||||
data (into {::http/request request} params)
|
||||
data (into {::request request} params)
|
||||
data (if profile-id
|
||||
(assoc data :profile-id profile-id ::session-id session-id)
|
||||
(dissoc data :profile-id))
|
||||
@@ -106,20 +90,18 @@
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [profile-id session-id params] :as request} respond raise]
|
||||
(let [cmd (keyword (:command params))
|
||||
etag (yrq/get-header request "if-none-match")
|
||||
data (into {::http/request request ::cond/key etag} params)
|
||||
data (into {::request request} params)
|
||||
data (if profile-id
|
||||
(assoc data :profile-id profile-id ::session-id session-id)
|
||||
(dissoc data :profile-id))
|
||||
|
||||
method (get methods cmd default-handler)]
|
||||
(binding [cond/*enabled* true]
|
||||
(-> (method data)
|
||||
(p/then (partial handle-response request))
|
||||
(p/then respond)
|
||||
(p/catch (fn [cause]
|
||||
(let [context {:profile-id profile-id}]
|
||||
(raise (ex/wrap-with-context cause context)))))))))
|
||||
(-> (method data)
|
||||
(p/then (partial handle-response request))
|
||||
(p/then respond)
|
||||
(p/catch (fn [cause]
|
||||
(let [context {:profile-id profile-id}]
|
||||
(raise (ex/wrap-with-context cause context))))))))
|
||||
|
||||
(defn- wrap-metrics
|
||||
"Wrap service method with metrics measurement."
|
||||
@@ -141,69 +123,44 @@
|
||||
[{:keys [executor] :as cfg} f mdata]
|
||||
(with-meta
|
||||
(fn [cfg params]
|
||||
(->> (px/submit! executor (px/wrap-bindings #(f cfg params)))
|
||||
(p/mapcat p/wrap)
|
||||
(p/map rph/wrap)))
|
||||
(-> (px/submit! executor #(f cfg params))
|
||||
(p/bind p/wrap)))
|
||||
mdata))
|
||||
|
||||
(defn- wrap-audit
|
||||
[cfg f mdata]
|
||||
(if-let [collector (::audit/collector cfg)]
|
||||
(letfn [(handle-audit [params result]
|
||||
(let [resultm (meta result)
|
||||
request (::http/request params)
|
||||
profile-id (or (::audit/profile-id resultm)
|
||||
(:profile-id result)
|
||||
(:profile-id params)
|
||||
uuid/zero)
|
||||
|
||||
props (or (::audit/replace-props resultm)
|
||||
(-> params
|
||||
(d/without-qualified)
|
||||
(merge (::audit/props resultm))
|
||||
(dissoc :profile-id)
|
||||
(dissoc :type)))
|
||||
|
||||
event {:type (or (::audit/type resultm)
|
||||
[{:keys [audit] :as cfg} f mdata]
|
||||
(if audit
|
||||
(with-meta
|
||||
(fn [cfg {:keys [::request] :as params}]
|
||||
(p/finally (f cfg params)
|
||||
(fn [result _]
|
||||
(when result
|
||||
(let [resultm (meta result)
|
||||
profile-id (or (::audit/profile-id resultm)
|
||||
(:profile-id result)
|
||||
(:profile-id params))
|
||||
props (or (::audit/replace-props resultm)
|
||||
(-> params
|
||||
(merge (::audit/props resultm))
|
||||
(dissoc :type)))]
|
||||
(audit :cmd :submit
|
||||
:type (or (::audit/type resultm)
|
||||
(::type cfg))
|
||||
:name (or (::audit/name resultm)
|
||||
(::sv/name mdata))
|
||||
:profile-id profile-id
|
||||
:ip-addr (some-> request audit/parse-client-ip)
|
||||
:props props
|
||||
::webhooks/batch-key
|
||||
(or (::webhooks/batch-key mdata)
|
||||
(::webhooks/batch-key resultm))
|
||||
|
||||
::webhooks/batch-timeout
|
||||
(or (::webhooks/batch-timeout mdata)
|
||||
(::webhooks/batch-timeout resultm))
|
||||
|
||||
::webhooks/event?
|
||||
(or (::webhooks/event? mdata)
|
||||
(::webhooks/event? resultm)
|
||||
false)}]
|
||||
|
||||
(audit/submit! collector event)))
|
||||
|
||||
(handle-request [cfg params]
|
||||
(->> (f cfg params)
|
||||
(p/mcat (fn [result]
|
||||
(->> (handle-audit params result)
|
||||
(p/map (constantly result)))))))]
|
||||
(if-not (::audit/skip mdata)
|
||||
(with-meta handle-request mdata)
|
||||
f))
|
||||
:props (dissoc props ::request)))))))
|
||||
mdata)
|
||||
f))
|
||||
|
||||
(defn- wrap
|
||||
[cfg f mdata]
|
||||
(let [f (as-> f $
|
||||
(wrap-dispatch cfg $ mdata)
|
||||
(cond/wrap cfg $ mdata)
|
||||
(retry/wrap-retry cfg $ mdata)
|
||||
(wrap-metrics cfg $ mdata)
|
||||
(climit/wrap cfg $ mdata)
|
||||
(retry/wrap-retry cfg $ mdata)
|
||||
(rsem/wrap cfg $ mdata)
|
||||
(rlimit/wrap cfg $ mdata)
|
||||
(wrap-audit cfg $ mdata))
|
||||
|
||||
@@ -215,7 +172,6 @@
|
||||
(fn [{:keys [::request] :as params}]
|
||||
;; Raise authentication error when rpc method requires auth but
|
||||
;; no profile-id is found in the request.
|
||||
|
||||
(p/do!
|
||||
(if (and auth? (not (uuid? (:profile-id params))))
|
||||
(ex/raise :type :authentication
|
||||
@@ -223,6 +179,7 @@
|
||||
:hint "authentication required for this endpoint")
|
||||
(let [params (us/conform spec (dissoc params ::request))]
|
||||
(f cfg (assoc params ::request request))))))
|
||||
|
||||
mdata)))
|
||||
|
||||
(defn- process-method
|
||||
@@ -267,40 +224,32 @@
|
||||
'app.rpc.commands.comments
|
||||
'app.rpc.commands.management
|
||||
'app.rpc.commands.verify-token
|
||||
'app.rpc.commands.search
|
||||
'app.rpc.commands.auth
|
||||
'app.rpc.commands.ldap
|
||||
'app.rpc.commands.demo
|
||||
'app.rpc.commands.webhooks
|
||||
'app.rpc.commands.audit
|
||||
'app.rpc.commands.files
|
||||
'app.rpc.commands.files.update
|
||||
'app.rpc.commands.files.create
|
||||
'app.rpc.commands.files.temp)
|
||||
'app.rpc.commands.files)
|
||||
(map (partial process-method cfg))
|
||||
(into {}))))
|
||||
|
||||
(s/def ::audit (s/nilable fn?))
|
||||
(s/def ::http-client fn?)
|
||||
(s/def ::ldap (s/nilable map?))
|
||||
(s/def ::msgbus ::mbus/msgbus)
|
||||
(s/def ::climit (s/nilable ::climit/climit))
|
||||
(s/def ::rlimit (s/nilable ::rlimit/rlimit))
|
||||
|
||||
(s/def ::public-uri ::us/not-empty-string)
|
||||
(s/def ::session map?)
|
||||
(s/def ::storage some?)
|
||||
(s/def ::sprops map?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::methods [_]
|
||||
(s/keys :req [::audit/collector
|
||||
::http.client/client
|
||||
::db/pool
|
||||
::wrk/executor]
|
||||
:req-un [::sto/storage
|
||||
::http.session/session
|
||||
(s/keys :req-un [::storage
|
||||
::session
|
||||
::sprops
|
||||
::audit
|
||||
::public-uri
|
||||
::msgbus
|
||||
::rlimit
|
||||
::climit
|
||||
::wrk/executor
|
||||
::http-client
|
||||
::rsem/semaphores
|
||||
::rlimit/rlimit
|
||||
::mtx/metrics
|
||||
::db/pool
|
||||
::ldap]))
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.climit
|
||||
"Concurrencly limiter for RPC."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.edn :as edn]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.bulkhead :as pxb])
|
||||
(:import
|
||||
com.github.benmanes.caffeine.cache.Cache
|
||||
com.github.benmanes.caffeine.cache.CacheLoader
|
||||
com.github.benmanes.caffeine.cache.Caffeine
|
||||
com.github.benmanes.caffeine.cache.RemovalListener))
|
||||
|
||||
(defn- capacity-exception?
|
||||
[o]
|
||||
(and (ex/ex-info? o)
|
||||
(let [data (ex-data o)]
|
||||
(and (= :bulkhead-error (:type data))
|
||||
(= :capacity-limit-reached (:code data))))))
|
||||
|
||||
(defn invoke!
|
||||
[limiter f]
|
||||
(->> (px/submit! limiter f)
|
||||
(p/hcat (fn [result cause]
|
||||
(cond
|
||||
(capacity-exception? cause)
|
||||
(p/rejected
|
||||
(ex/error :type :internal
|
||||
:code :concurrency-limit-reached
|
||||
:queue (-> limiter meta :bkey name)
|
||||
:cause cause))
|
||||
|
||||
(some? cause)
|
||||
(p/rejected cause)
|
||||
|
||||
:else
|
||||
(p/resolved result))))))
|
||||
|
||||
(defn- create-limiter
|
||||
[{:keys [executor metrics concurrency queue-size bkey skey]}]
|
||||
(let [labels (into-array String [(name bkey)])
|
||||
on-queue (fn [instance]
|
||||
(l/trace :hint "enqueued"
|
||||
:key (name bkey)
|
||||
:skey (str skey)
|
||||
:queue-size (get instance ::pxb/current-queue-size)
|
||||
:concurrency (get instance ::pxb/current-concurrency))
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue-size
|
||||
:val (get instance ::pxb/current-queue-size)
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-concurrency
|
||||
:val (get instance ::pxb/current-concurrency)
|
||||
:labels labels))
|
||||
|
||||
on-run (fn [instance task]
|
||||
(let [elapsed (- (inst-ms (dt/now))
|
||||
(inst-ms task))]
|
||||
(l/trace :hint "execute"
|
||||
:key (name bkey)
|
||||
:skey (str skey)
|
||||
:elapsed (str elapsed "ms"))
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-timing
|
||||
:val elapsed
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue-size
|
||||
:val (get instance ::pxb/current-queue-size)
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-concurrency
|
||||
:val (get instance ::pxb/current-concurrency)
|
||||
:labels labels)))
|
||||
|
||||
options {:executor executor
|
||||
:concurrency concurrency
|
||||
:queue-size (or queue-size Integer/MAX_VALUE)
|
||||
:on-queue on-queue
|
||||
:on-run on-run}]
|
||||
|
||||
(-> (pxb/create options)
|
||||
(vary-meta assoc :bkey bkey :skey skey))))
|
||||
|
||||
(defn- create-cache
|
||||
[{:keys [executor] :as params} config]
|
||||
(let [listener (reify RemovalListener
|
||||
(onRemoval [_ key _val cause]
|
||||
(l/trace :hint "cache: remove" :key key :reason (str cause))))
|
||||
|
||||
loader (reify CacheLoader
|
||||
(load [_ key]
|
||||
(let [[bkey skey] key]
|
||||
(when-let [config (get config bkey)]
|
||||
(-> (merge params config)
|
||||
(assoc :bkey bkey)
|
||||
(assoc :skey skey)
|
||||
(create-limiter))))))]
|
||||
|
||||
(.. (Caffeine/newBuilder)
|
||||
(weakValues)
|
||||
(executor executor)
|
||||
(removalListener listener)
|
||||
(build loader))))
|
||||
|
||||
(defprotocol IConcurrencyManager)
|
||||
|
||||
(s/def ::concurrency ::us/integer)
|
||||
(s/def ::queue-size ::us/integer)
|
||||
(s/def ::config
|
||||
(s/map-of keyword?
|
||||
(s/keys :req-un [::concurrency]
|
||||
:opt-un [::queue-size])))
|
||||
|
||||
(defmethod ig/prep-key ::rpc/climit
|
||||
[_ cfg]
|
||||
(merge {:path (cf/get :rpc-climit-config)}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::rpc/climit [_]
|
||||
(s/keys :req-un [::wrk/executor ::mtx/metrics ::fs/path]))
|
||||
|
||||
(defmethod ig/init-key ::rpc/climit
|
||||
[_ {:keys [path] :as params}]
|
||||
(when (contains? cf/flags :rpc-climit)
|
||||
(if-let [config (some->> path slurp edn/read-string)]
|
||||
(do
|
||||
(l/info :hint "initializing concurrency limit" :config (str path))
|
||||
(us/verify! ::config config)
|
||||
|
||||
(let [cache (create-cache params config)]
|
||||
^{::cache cache}
|
||||
(reify
|
||||
IConcurrencyManager
|
||||
clojure.lang.IDeref
|
||||
(deref [_] config)
|
||||
|
||||
clojure.lang.ILookup
|
||||
(valAt [_ key]
|
||||
(let [key (if (vector? key) key [key])]
|
||||
(.get ^Cache cache key))))))
|
||||
|
||||
(l/warn :hint "unable to load configuration" :config (str path)))))
|
||||
|
||||
|
||||
(s/def ::climit #(satisfies? IConcurrencyManager %))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmacro with-dispatch
|
||||
[lim & body]
|
||||
`(if ~lim
|
||||
(invoke! ~lim (^:once fn [] (p/wrap (do ~@body))))
|
||||
(p/wrap (do ~@body))))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [climit]} f {:keys [::queue ::key-fn] :as mdata}]
|
||||
(if (and (some? climit)
|
||||
(some? queue))
|
||||
(if-let [config (get @climit queue)]
|
||||
(do
|
||||
(l/debug :hint "wrap: instrumenting method"
|
||||
:limit-name (name queue)
|
||||
:service-name (::sv/name mdata)
|
||||
:queue-size (or (:queue-size config) Integer/MAX_VALUE)
|
||||
:concurrency (:concurrency config)
|
||||
:keyed? (some? key-fn))
|
||||
(if (some? key-fn)
|
||||
(fn [cfg params]
|
||||
(let [key [queue (key-fn params)]
|
||||
lim (get climit key)]
|
||||
(invoke! lim (partial f cfg params))))
|
||||
|
||||
(let [lim (get climit queue)]
|
||||
(fn [cfg params]
|
||||
(invoke! lim (partial f cfg params))))))
|
||||
(do
|
||||
(l/warn :hint "wrap: no config found"
|
||||
:queue (name queue)
|
||||
:service (::sv/name mdata))
|
||||
f))
|
||||
f))
|
||||
@@ -1,86 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.audit
|
||||
"Audit Log related RPC methods"
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as-alias http]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(defn- event->row [event]
|
||||
[(uuid/next)
|
||||
(:name event)
|
||||
(:source event)
|
||||
(:type event)
|
||||
(:timestamp event)
|
||||
(:profile-id event)
|
||||
(db/inet (:ip-addr event))
|
||||
(db/tjson (:props event))
|
||||
(db/tjson (d/without-nils (:context event)))])
|
||||
|
||||
(def ^:private event-columns
|
||||
[:id :name :source :type :tracked-at
|
||||
:profile-id :ip-addr :props :context])
|
||||
|
||||
(defn- handle-events
|
||||
[{:keys [::db/pool]} {:keys [profile-id events ::http/request] :as params}]
|
||||
(let [ip-addr (audit/parse-client-ip request)
|
||||
xform (comp
|
||||
(map #(assoc % :profile-id profile-id))
|
||||
(map #(assoc % :ip-addr ip-addr))
|
||||
(map #(assoc % :source "frontend"))
|
||||
(filter :profile-id)
|
||||
(map event->row))
|
||||
events (sequence xform events)]
|
||||
(when (seq events)
|
||||
(db/insert-multi! pool :audit-log event-columns events))))
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::type ::us/string)
|
||||
(s/def ::props (s/map-of ::us/keyword any?))
|
||||
(s/def ::timestamp dt/instant?)
|
||||
(s/def ::context (s/map-of ::us/keyword any?))
|
||||
|
||||
(s/def ::event
|
||||
(s/keys :req-un [::type ::name ::props ::timestamp]
|
||||
:opt-un [::context]))
|
||||
|
||||
(s/def ::events (s/every ::event))
|
||||
|
||||
(s/def ::push-audit-events
|
||||
(s/keys :req-un [::events ::profile-id]))
|
||||
|
||||
(sv/defmethod ::push-audit-events
|
||||
{::climit/queue :push-audit-events
|
||||
::climit/key-fn :profile-id
|
||||
::audit/skip true
|
||||
::doc/added "1.17"}
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} params]
|
||||
(if (or (db/read-only? pool)
|
||||
(not (contains? cf/flags :audit-log)))
|
||||
(do
|
||||
(l/warn :hint "audit: http handler disabled or db is read-only")
|
||||
(rph/wrap nil))
|
||||
|
||||
(->> (px/submit! executor #(handle-events cfg params))
|
||||
(p/fmap (constantly nil)))))
|
||||
|
||||
@@ -13,13 +13,11 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.emails :as eml]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.mutations.teams :as teams]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.rpc.semaphore :as rsem]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
@@ -135,10 +133,10 @@
|
||||
{:invitation-token (:invitation-token params)}
|
||||
profile)]
|
||||
|
||||
(-> response
|
||||
(rph/with-transform (session/create-fn session (:id profile)))
|
||||
(rph/with-meta {::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
(with-meta response
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))))
|
||||
|
||||
(s/def ::login-with-password
|
||||
(s/keys :req-un [::email ::password]
|
||||
@@ -147,7 +145,7 @@
|
||||
(sv/defmethod ::login-with-password
|
||||
"Performs authentication using penpot password."
|
||||
{:auth false
|
||||
::climit/queue :auth
|
||||
::rsem/queue :auth
|
||||
::doc/added "1.15"}
|
||||
[cfg params]
|
||||
(login-with-password cfg params))
|
||||
@@ -162,7 +160,8 @@
|
||||
{:auth false
|
||||
::doc/added "1.15"}
|
||||
[{:keys [session] :as cfg} _]
|
||||
(rph/with-transform {} (session/delete-fn session)))
|
||||
(with-meta {}
|
||||
{:transform-response (:delete session)}))
|
||||
|
||||
;; ---- COMMAND: Recover Profile
|
||||
|
||||
@@ -187,7 +186,7 @@
|
||||
|
||||
(sv/defmethod ::recover-profile
|
||||
{:auth false
|
||||
::climit/queue :auth
|
||||
::rsem/queue :auth
|
||||
::doc/added "1.15"}
|
||||
[cfg params]
|
||||
(recover-profile cfg params))
|
||||
@@ -288,8 +287,7 @@
|
||||
props (-> (audit/extract-utm-params params)
|
||||
(merge (:props params))
|
||||
(merge {:viewed-tutorial? false
|
||||
:viewed-walkthrough? false
|
||||
:nudge {:big 10 :small 1}})
|
||||
:viewed-walkthrough? false})
|
||||
(db/tjson))
|
||||
|
||||
password (if-let [password (:password params)]
|
||||
@@ -376,6 +374,8 @@
|
||||
(create-profile conn)
|
||||
(create-profile-relations conn)
|
||||
(profile/decode-profile-row)))
|
||||
audit-fn (:audit cfg)
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify sprops {:token token :iss :team-invitation}))]
|
||||
|
||||
@@ -384,11 +384,10 @@
|
||||
;; accordingly.
|
||||
(when-let [id (:profile-id claims)]
|
||||
(db/update! conn :profile {:modified-at (dt/now)} {:id id})
|
||||
(when-let [collector (::audit/collector cfg)]
|
||||
(audit/submit! collector
|
||||
{:type "fact"
|
||||
:name "register-profile-retry"
|
||||
:profile-id id})))
|
||||
(audit-fn :cmd :submit
|
||||
:type "fact"
|
||||
:name "register-profile-retry"
|
||||
:profile-id id))
|
||||
|
||||
(cond
|
||||
;; If invitation token comes in params, this is because the
|
||||
@@ -401,33 +400,33 @@
|
||||
(let [claims (assoc invitation :member-id (:id profile))
|
||||
token (tokens/generate sprops claims)
|
||||
resp {:invitation-token token}]
|
||||
(-> resp
|
||||
(rph/with-transform (session/create-fn session (:id profile)))
|
||||
(rph/with-meta {::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))
|
||||
(with-meta resp
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))
|
||||
|
||||
;; If auth backend is different from "penpot" means user is
|
||||
;; registering using third party auth mechanism; in this case
|
||||
;; we need to mark this session as logged.
|
||||
(not= "penpot" (:auth-backend profile))
|
||||
(-> (profile/strip-private-attrs profile)
|
||||
(rph/with-transform (session/create-fn session (:id profile)))
|
||||
(rph/with-meta {::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))
|
||||
(with-meta (profile/strip-private-attrs profile)
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})
|
||||
|
||||
;; If the `:enable-insecure-register` flag is set, we proceed
|
||||
;; to sign in the user directly, without email verification.
|
||||
(true? is-active)
|
||||
(-> (profile/strip-private-attrs profile)
|
||||
(rph/with-transform (session/create-fn session (:id profile)))
|
||||
(rph/with-meta {::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))
|
||||
(with-meta (profile/strip-private-attrs profile)
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})
|
||||
|
||||
;; In all other cases, send a verification email.
|
||||
:else
|
||||
(do
|
||||
(send-email-verification! conn sprops profile)
|
||||
(rph/with-meta profile
|
||||
(with-meta profile
|
||||
{::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))))
|
||||
|
||||
@@ -436,7 +435,7 @@
|
||||
|
||||
(sv/defmethod ::register-profile
|
||||
{:auth false
|
||||
::climit/queue :auth
|
||||
::rsem/queue :auth
|
||||
::doc/added "1.15"}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
|
||||
@@ -16,15 +16,14 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.media :as media]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.queries.projects :as projects]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.fressian :as fres]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -290,11 +289,9 @@
|
||||
|
||||
(defn- retrieve-file
|
||||
[pool file-id]
|
||||
(with-open [conn (db/open pool)]
|
||||
(binding [pmap/*load-fn* (partial files/load-pointer conn file-id)]
|
||||
(some-> (db/get* conn :file {:id file-id})
|
||||
(files/decode-row)
|
||||
(update :data files/process-pointers deref)))))
|
||||
(->> (db/query pool :file {:id file-id})
|
||||
(map files/decode-row)
|
||||
(first)))
|
||||
|
||||
(def ^:private sql:file-media-objects
|
||||
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||
@@ -637,7 +634,7 @@
|
||||
|
||||
params {:id file-id'
|
||||
:project-id project-id
|
||||
:name (:name file)
|
||||
:name (str "Imported: " (:name file))
|
||||
:revn (:revn file)
|
||||
:is-shared (:is-shared file)
|
||||
:data (blob/encode data)
|
||||
@@ -810,7 +807,7 @@
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id id)
|
||||
(with-open [^AutoCloseable output (io/output-stream output)]
|
||||
(with-open [output (io/output-stream output)]
|
||||
(binding [*position* (atom 0)]
|
||||
(write-export! (assoc cfg ::output output))))
|
||||
|
||||
@@ -833,7 +830,7 @@
|
||||
(defn export-to-tmpfile!
|
||||
[cfg]
|
||||
(let [path (tmp/tempfile :prefix "penpot.export.")]
|
||||
(with-open [^AutoCloseable output (io/output-stream path)]
|
||||
(with-open [output (io/output-stream path)]
|
||||
(export! cfg output)
|
||||
path)))
|
||||
|
||||
@@ -845,7 +842,7 @@
|
||||
(try
|
||||
(l/info :hint "start importation" :import-id id)
|
||||
(binding [*position* (atom 0)]
|
||||
(with-open [^AutoCloseable input (io/input-stream input)]
|
||||
(with-open [input (io/input-stream input)]
|
||||
(read-import! (assoc cfg ::input input))))
|
||||
|
||||
(catch Throwable cause
|
||||
@@ -873,7 +870,7 @@
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id include-libraries? embed-assets?] :as params}]
|
||||
(files/check-read-permissions! pool profile-id file-id)
|
||||
(let [body (reify yrs/StreamableResponseBody
|
||||
(let [resp (reify yrs/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(-> cfg
|
||||
(assoc ::file-ids [file-id])
|
||||
@@ -881,8 +878,11 @@
|
||||
(assoc ::include-libraries? include-libraries?)
|
||||
(export! output-stream))))]
|
||||
|
||||
(fn [_]
|
||||
(yrs/response 200 body {"content-type" "application/octet-stream"}))))
|
||||
(with-meta (sv/wrap nil)
|
||||
{:transform-response (fn [_ response]
|
||||
(-> response
|
||||
(assoc :body resp)
|
||||
(assoc :headers {"content-type" "application/octet-stream"})))})))
|
||||
|
||||
(s/def ::file ::media/upload)
|
||||
(s/def ::import-binfile
|
||||
|
||||
@@ -10,9 +10,8 @@
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.util.blob :as blob]
|
||||
@@ -44,7 +43,6 @@
|
||||
#(or (:file-id %) (:team-id %))))
|
||||
|
||||
(sv/defmethod ::get-comment-threads
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(with-open [conn (db/open pool)]
|
||||
(retrieve-comment-threads conn params)))
|
||||
@@ -247,8 +245,7 @@
|
||||
(sv/defmethod ::create-comment-thread
|
||||
{::retry/max-retries 3
|
||||
::retry/matches retry/conflict-db-insert?
|
||||
::doc/added "1.15"
|
||||
::webhooks/event? true}
|
||||
::doc/added "1.15"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
@@ -367,8 +364,7 @@
|
||||
:opt-un [::share-id]))
|
||||
|
||||
(sv/defmethod ::create-comment
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true}
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(create-comment conn params)))
|
||||
@@ -487,8 +483,7 @@
|
||||
(s/keys :req-un [::profile-id ::id]))
|
||||
|
||||
(sv/defmethod ::delete-comment
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true}
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [comment (db/get-by-id conn :comment id {:for-update true})]
|
||||
@@ -534,3 +529,4 @@
|
||||
:frame-id frame-id}
|
||||
{:id (:id thread)})
|
||||
nil)))
|
||||
|
||||
|
||||
@@ -6,367 +6,20 @@
|
||||
|
||||
(ns app.rpc.commands.files
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.shapes :as gsh]
|
||||
[app.common.pages.helpers :as cph]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.shape-tree :as ctt]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc.commands.files.thumbnails :as-alias thumbs]
|
||||
[app.rpc.cond :as-alias cond]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.projects :as projects]
|
||||
[app.rpc.queries.share-link :refer [retrieve-share-link]]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;; --- FEATURES
|
||||
|
||||
(def supported-features
|
||||
#{"storage/objects-map"
|
||||
"storage/pointer-map"
|
||||
"components/v2"})
|
||||
|
||||
(def default-features #{})
|
||||
|
||||
;; --- SPECS
|
||||
|
||||
(s/def ::features ::us/set-of-strings)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::frame-id ::us/uuid)
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::is-shared ::us/boolean)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::search-term ::us/string)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
(def long-cache-duration
|
||||
(dt/duration {:days 7}))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [data changes features] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
|
||||
;; --- FILE PERMISSIONS
|
||||
|
||||
(def ^:private sql:file-permissions
|
||||
"select fpr.is_owner,
|
||||
fpr.is_admin,
|
||||
fpr.can_edit
|
||||
from file_profile_rel as fpr
|
||||
where fpr.file_id = ?
|
||||
and fpr.profile_id = ?
|
||||
union all
|
||||
select tpr.is_owner,
|
||||
tpr.is_admin,
|
||||
tpr.can_edit
|
||||
from team_profile_rel as tpr
|
||||
inner join project as p on (p.team_id = tpr.team_id)
|
||||
inner join file as f on (p.id = f.project_id)
|
||||
where f.id = ?
|
||||
and tpr.profile_id = ?
|
||||
union all
|
||||
select ppr.is_owner,
|
||||
ppr.is_admin,
|
||||
ppr.can_edit
|
||||
from project_profile_rel as ppr
|
||||
inner join file as f on (f.project_id = ppr.project_id)
|
||||
where f.id = ?
|
||||
and ppr.profile_id = ?")
|
||||
|
||||
(defn get-file-permissions
|
||||
[conn profile-id file-id]
|
||||
(when (and profile-id file-id)
|
||||
(db/exec! conn [sql:file-permissions
|
||||
file-id profile-id
|
||||
file-id profile-id
|
||||
file-id profile-id])))
|
||||
|
||||
(defn get-permissions
|
||||
([conn profile-id file-id]
|
||||
(let [rows (get-file-permissions conn profile-id file-id)
|
||||
is-owner (boolean (some :is-owner rows))
|
||||
is-admin (boolean (some :is-admin rows))
|
||||
can-edit (boolean (some :can-edit rows))]
|
||||
(when (seq rows)
|
||||
{:type :membership
|
||||
:is-owner is-owner
|
||||
:is-admin (or is-owner is-admin)
|
||||
:can-edit (or is-owner is-admin can-edit)
|
||||
:can-read true
|
||||
:is-logged (some? profile-id)})))
|
||||
|
||||
([conn profile-id file-id share-id]
|
||||
(let [perms (get-permissions conn profile-id file-id)
|
||||
ldata (retrieve-share-link conn file-id share-id)]
|
||||
|
||||
;; NOTE: in a future when share-link becomes more powerful and
|
||||
;; will allow us specify which parts of the app is available, we
|
||||
;; will probably need to tweak this function in order to expose
|
||||
;; this flags to the frontend.
|
||||
(cond
|
||||
(some? perms) perms
|
||||
(some? ldata) {:type :share-link
|
||||
:can-read true
|
||||
:pages (:pages ldata)
|
||||
:is-logged (some? profile-id)
|
||||
:who-comment (:who-comment ldata)
|
||||
:who-inspect (:who-inspect ldata)}))))
|
||||
|
||||
(def has-edit-permissions?
|
||||
(perms/make-edition-predicate-fn get-permissions))
|
||||
|
||||
(def has-read-permissions?
|
||||
(perms/make-read-predicate-fn get-permissions))
|
||||
|
||||
(def has-comment-permissions?
|
||||
(perms/make-comment-predicate-fn get-permissions))
|
||||
|
||||
(def check-edition-permissions!
|
||||
(perms/make-check-fn has-edit-permissions?))
|
||||
|
||||
(def check-read-permissions!
|
||||
(perms/make-check-fn has-read-permissions?))
|
||||
|
||||
;; A user has comment permissions if she has read permissions, or comment permissions
|
||||
(defn check-comment-permissions!
|
||||
[conn profile-id file-id share-id]
|
||||
(let [can-read (has-read-permissions? conn profile-id file-id)
|
||||
can-comment (has-comment-permissions? conn profile-id file-id share-id)]
|
||||
(when-not (or can-read can-comment)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "not found"))))
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
(defn get-team-id
|
||||
[conn project-id]
|
||||
(:team-id (db/get-by-id conn :project project-id {:columns [:team-id]})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FEATURES: pointer-map
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn check-features-compatibility!
|
||||
[features]
|
||||
(let [not-supported (set/difference features supported-features)]
|
||||
(when (seq not-supported)
|
||||
(ex/raise :type :restriction
|
||||
:code :features-not-supported
|
||||
:feature (first not-supported)
|
||||
:hint (format "features %s not supported" (str/join "," not-supported))))
|
||||
features))
|
||||
|
||||
(defn load-pointer
|
||||
[conn file-id id]
|
||||
(let [row (db/get conn :file-data-fragment
|
||||
{:id id :file-id file-id}
|
||||
{:columns [:content]
|
||||
:check-deleted? false})]
|
||||
(blob/decode (:content row))))
|
||||
|
||||
(defn persist-pointers!
|
||||
[conn file-id]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(db/insert! conn :file-data-fragment
|
||||
{:id id
|
||||
:file-id file-id
|
||||
:content content})))))
|
||||
|
||||
(defn process-pointers
|
||||
[file update-fn]
|
||||
(update file :data (fn resolve-fn [data]
|
||||
(cond-> data
|
||||
(contains? data :pages-index)
|
||||
(update :pages-index resolve-fn)
|
||||
|
||||
:always
|
||||
(update-vals (fn [val]
|
||||
(if (pmap/pointer-map? val)
|
||||
(update-fn val)
|
||||
val)))))))
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; QUERY COMMANDS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn handle-file-features
|
||||
[{:keys [features] :as file} client-features]
|
||||
(when (and (contains? features "components/v2")
|
||||
(not (contains? client-features "components/v2")))
|
||||
(ex/raise :type :restriction
|
||||
:code :feature-mismatch
|
||||
:feature "components/v2"
|
||||
:hint "file has 'components/v2' feature enabled but frontend didn't specifies it"))
|
||||
;; --- Query: File Libraries used by a File
|
||||
|
||||
(cond-> file
|
||||
(and (contains? client-features "components/v2")
|
||||
(not (contains? features "components/v2")))
|
||||
(update :data ctf/migrate-to-components-v2)
|
||||
|
||||
(and (contains? features "storage/pointer-map")
|
||||
(not (contains? client-features "storage/pointer-map")))
|
||||
(process-pointers deref)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-file (by id)
|
||||
|
||||
(defn get-file
|
||||
[conn id client-features]
|
||||
;; here we check if client requested features are supported
|
||||
(check-features-compatibility! client-features)
|
||||
|
||||
(binding [pmap/*load-fn* (partial load-pointer conn id)]
|
||||
(-> (db/get-by-id conn :file id)
|
||||
(decode-row)
|
||||
(pmg/migrate-file)
|
||||
(handle-file-features client-features))))
|
||||
|
||||
(defn get-minimal-file
|
||||
[{:keys [pool] :as cfg} id]
|
||||
(db/get pool :file {:id id} {:columns [:id :modified-at :revn]}))
|
||||
|
||||
(defn get-file-etag
|
||||
[{:keys [modified-at revn]}]
|
||||
(str (dt/format-instant modified-at :iso) "-" revn))
|
||||
|
||||
(s/def ::get-file
|
||||
(s/keys :req-un [::profile-id ::id]
|
||||
:opt-un [::features]))
|
||||
|
||||
(sv/defmethod ::get-file
|
||||
"Retrieve a file by its ID. Only authenticated users."
|
||||
{::doc/added "1.17"
|
||||
::cond/get-object #(get-minimal-file %1 (:id %2))
|
||||
::cond/key-fn get-file-etag}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id id features] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(let [perms (get-permissions conn profile-id id)]
|
||||
(check-read-permissions! perms)
|
||||
(let [file (-> (get-file conn id features)
|
||||
(assoc :permissions perms))]
|
||||
(vary-meta file assoc ::cond/key (get-file-etag file))))))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-file-fragment (by id)
|
||||
|
||||
(defn- get-file-fragment
|
||||
[conn file-id fragment-id]
|
||||
(some-> (db/get conn :file-data-fragment {:file-id file-id :id fragment-id})
|
||||
(update :content blob/decode)))
|
||||
|
||||
(s/def ::share-id ::us/uuid)
|
||||
(s/def ::fragment-id ::us/uuid)
|
||||
|
||||
(s/def ::get-file-fragment
|
||||
(s/keys :req-un [::file-id ::fragment-id]
|
||||
:opt-un [::share-id ::profile-id]))
|
||||
|
||||
(sv/defmethod ::get-file-fragment
|
||||
"Retrieve a file by its ID. Only authenticated users."
|
||||
{::doc/added "1.17"
|
||||
:auth false}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id fragment-id share-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(let [perms (get-permissions conn profile-id file-id share-id)]
|
||||
(check-read-permissions! perms)
|
||||
(-> (get-file-fragment conn file-id fragment-id)
|
||||
(rph/with-http-cache long-cache-duration)))))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-object-thumbnails
|
||||
|
||||
(defn get-object-thumbnails
|
||||
([conn file-id]
|
||||
(let [sql (str/concat
|
||||
"select object_id, data "
|
||||
" from file_object_thumbnail"
|
||||
" where file_id=?")]
|
||||
(->> (db/exec! conn [sql file-id])
|
||||
(d/index-by :object-id :data))))
|
||||
|
||||
([conn file-id object-ids]
|
||||
(let [sql (str/concat
|
||||
"select object_id, data "
|
||||
" from file_object_thumbnail"
|
||||
" where file_id=? and object_id = ANY(?)")
|
||||
ids (db/create-array conn "text" (seq object-ids))]
|
||||
(->> (db/exec! conn [sql file-id ids])
|
||||
(d/index-by :object-id :data)))))
|
||||
|
||||
(s/def ::get-file-object-thumbnails
|
||||
(s/keys :req-un [::profile-id ::file-id]))
|
||||
|
||||
(sv/defmethod ::get-file-object-thumbnails
|
||||
"Retrieve a file object thumbnails."
|
||||
{::doc/added "1.17"
|
||||
::cond/get-object #(get-minimal-file %1 (:file-id %2))
|
||||
::cond/reuse-key? true
|
||||
::cond/key-fn get-file-etag}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-object-thumbnails conn file-id)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-project-files
|
||||
|
||||
(def ^:private sql:project-files
|
||||
"select f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.revn,
|
||||
f.is_shared
|
||||
from file as f
|
||||
where f.project_id = ?
|
||||
and f.deleted_at is null
|
||||
order by f.modified_at desc")
|
||||
|
||||
(s/def ::get-project-files
|
||||
(s/keys :req-un [::profile-id ::project-id]))
|
||||
|
||||
(defn get-project-files
|
||||
[conn project-id]
|
||||
(db/exec! conn [sql:project-files project-id]))
|
||||
|
||||
(sv/defmethod ::get-project-files
|
||||
"Get all files for the specified project."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(projects/check-read-permissions! conn profile-id project-id)
|
||||
(get-project-files conn project-id)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: has-file-libraries
|
||||
|
||||
(declare get-has-file-libraries)
|
||||
(declare retrieve-has-file-libraries)
|
||||
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
@@ -379,8 +32,8 @@
|
||||
{::doc/added "1.15.1"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(get-has-file-libraries conn params)))
|
||||
(files/check-read-permissions! pool profile-id file-id)
|
||||
(retrieve-has-file-libraries conn params)))
|
||||
|
||||
(def ^:private sql:has-file-libraries
|
||||
"SELECT COUNT(*) > 0 AS has_libraries
|
||||
@@ -390,599 +43,8 @@
|
||||
AND (fl.deleted_at IS NULL OR
|
||||
fl.deleted_at > now())")
|
||||
|
||||
(defn- get-has-file-libraries
|
||||
(defn- retrieve-has-file-libraries
|
||||
[conn {:keys [file-id]}]
|
||||
(let [row (db/exec-one! conn [sql:has-file-libraries file-id])]
|
||||
(:has-libraries row)))
|
||||
|
||||
|
||||
;; --- QUERY COMMAND: get-page
|
||||
|
||||
(defn- prune-objects
|
||||
"Given the page data and the object-id returns the page data with all
|
||||
other not needed objects removed from the `:objects` data
|
||||
structure."
|
||||
[{:keys [objects] :as page} object-id]
|
||||
(let [objects (cph/get-children-with-self objects object-id)]
|
||||
(assoc page :objects (d/index-by :id objects))))
|
||||
|
||||
(defn- prune-thumbnails
|
||||
"Given the page data, removes the `:thumbnail` prop from all
|
||||
shapes."
|
||||
[page]
|
||||
(update page :objects update-vals #(dissoc % :thumbnail)))
|
||||
|
||||
(defn get-page
|
||||
[conn {:keys [file-id page-id object-id features]}]
|
||||
(let [file (get-file conn file-id features)
|
||||
page-id (or page-id (-> file :data :pages first))
|
||||
page (dm/get-in file [:data :pages-index page-id])]
|
||||
(cond-> (prune-thumbnails page)
|
||||
(uuid? object-id)
|
||||
(prune-objects object-id))))
|
||||
|
||||
(s/def ::page-id ::us/uuid)
|
||||
(s/def ::object-id ::us/uuid)
|
||||
(s/def ::get-page
|
||||
(s/and
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::page-id ::object-id ::features])
|
||||
(fn [obj]
|
||||
(if (contains? obj :object-id)
|
||||
(contains? obj :page-id)
|
||||
true))))
|
||||
|
||||
(sv/defmethod ::get-page
|
||||
"Retrieves the page data from file and returns it. If no page-id is
|
||||
specified, the first page will be returned. If object-id is
|
||||
specified, only that object and its children will be returned in the
|
||||
page objects data structure.
|
||||
|
||||
If you specify the object-id, the page-id parameter becomes
|
||||
mandatory.
|
||||
|
||||
Mainly used for rendering purposes."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-page conn params)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-team-shared-files
|
||||
|
||||
(def ^:private sql:team-shared-files
|
||||
"select f.id,
|
||||
f.revn,
|
||||
f.data,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared
|
||||
from file as f
|
||||
inner join project as p on (p.id = f.project_id)
|
||||
where f.is_shared = true
|
||||
and f.deleted_at is null
|
||||
and p.deleted_at is null
|
||||
and p.team_id = ?
|
||||
order by f.modified_at desc")
|
||||
|
||||
(defn get-team-shared-files
|
||||
[conn {:keys [team-id] :as params}]
|
||||
(let [assets-sample
|
||||
(fn [assets limit]
|
||||
(let [sorted-assets (->> (vals assets)
|
||||
(sort-by #(str/lower (:name %))))]
|
||||
|
||||
{:count (count sorted-assets)
|
||||
:sample (into [] (take limit sorted-assets))}))
|
||||
|
||||
library-summary
|
||||
(fn [data]
|
||||
{:components (assets-sample (:components data) 4)
|
||||
:colors (assets-sample (:colors data) 3)
|
||||
:typographies (assets-sample (:typographies data) 3)})
|
||||
|
||||
xform (comp
|
||||
(map decode-row)
|
||||
(map #(assoc % :library-summary (library-summary (:data %))))
|
||||
(map #(dissoc % :data)))]
|
||||
|
||||
(into #{} xform (db/exec! conn [sql:team-shared-files team-id]))))
|
||||
|
||||
(s/def ::get-team-shared-files
|
||||
(s/keys :req-un [::profile-id ::team-id]))
|
||||
|
||||
(sv/defmethod ::get-team-shared-files
|
||||
"Get all file (libraries) for the specified team."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(with-open [conn (db/open pool)]
|
||||
(get-team-shared-files conn params)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-file-libraries
|
||||
|
||||
(def ^:private sql:file-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ?::uuid
|
||||
UNION
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT l.id,
|
||||
l.data,
|
||||
l.features,
|
||||
l.project_id,
|
||||
l.created_at,
|
||||
l.modified_at,
|
||||
l.deleted_at,
|
||||
l.name,
|
||||
l.revn,
|
||||
l.synced_at
|
||||
FROM libs AS l
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
|
||||
(defn get-file-libraries
|
||||
[conn file-id client-features]
|
||||
(check-features-compatibility! client-features)
|
||||
(->> (db/exec! conn [sql:file-libraries file-id])
|
||||
(mapv (fn [{:keys [id] :as row}]
|
||||
(binding [pmap/*load-fn* (partial load-pointer conn id)]
|
||||
(-> (decode-row row)
|
||||
(assoc :is-indirect false)
|
||||
(update :data dissoc :pages-index)
|
||||
(handle-file-features client-features)))))))
|
||||
|
||||
(s/def ::get-file-libraries
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::features]))
|
||||
|
||||
(sv/defmethod ::get-file-libraries
|
||||
"Get libraries used by the specified file."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id features] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-file-libraries conn file-id features)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: Files that use this File library
|
||||
|
||||
(def ^:private sql:library-using-files
|
||||
"SELECT f.id,
|
||||
f.name
|
||||
FROM file_library_rel AS flr
|
||||
JOIN file AS f ON (f.id = flr.file_id)
|
||||
WHERE flr.library_file_id = ?
|
||||
AND (f.deleted_at IS NULL OR f.deleted_at > now())")
|
||||
|
||||
(defn get-library-file-references
|
||||
[conn file-id]
|
||||
(db/exec! conn [sql:library-using-files file-id]))
|
||||
|
||||
(s/def ::get-library-file-references
|
||||
(s/keys :req-un [::profile-id ::file-id]))
|
||||
|
||||
(sv/defmethod ::get-library-file-references
|
||||
"Returns all the file references that use specified file (library) id."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-library-file-references conn file-id)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-team-recent-files
|
||||
|
||||
(def sql:team-recent-files
|
||||
"with recent_files as (
|
||||
select f.id,
|
||||
f.revn,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
row_number() over w as row_num
|
||||
from file as f
|
||||
join project as p on (p.id = f.project_id)
|
||||
where p.team_id = ?
|
||||
and p.deleted_at is null
|
||||
and f.deleted_at is null
|
||||
window w as (partition by f.project_id order by f.modified_at desc)
|
||||
order by f.modified_at desc
|
||||
)
|
||||
select * from recent_files where row_num <= 10;")
|
||||
|
||||
(defn get-team-recent-files
|
||||
[conn team-id]
|
||||
(db/exec! conn [sql:team-recent-files team-id]))
|
||||
|
||||
(s/def ::get-team-recent-files
|
||||
(s/keys :req-un [::profile-id ::team-id]))
|
||||
|
||||
(sv/defmethod ::get-team-recent-files
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-team-recent-files conn team-id)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-file-thumbnail
|
||||
|
||||
(defn get-file-thumbnail
|
||||
[conn file-id revn]
|
||||
(let [sql (sql/select :file-thumbnail
|
||||
(cond-> {:file-id file-id}
|
||||
revn (assoc :revn revn))
|
||||
{:limit 1
|
||||
:order-by [[:revn :desc]]})
|
||||
row (db/exec-one! conn sql)]
|
||||
(when-not row
|
||||
(ex/raise :type :not-found
|
||||
:code :file-thumbnail-not-found))
|
||||
|
||||
{:data (:data row)
|
||||
:props (some-> (:props row) db/decode-transit-pgobject)
|
||||
:revn (:revn row)
|
||||
:file-id (:file-id row)}))
|
||||
|
||||
(s/def ::revn ::us/integer)
|
||||
|
||||
(s/def ::get-file-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::revn]))
|
||||
|
||||
(sv/defmethod ::get-file-thumbnail
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool]} {:keys [profile-id file-id revn]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(-> (get-file-thumbnail conn file-id revn)
|
||||
(rph/with-http-cache long-cache-duration))))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-file-data-for-thumbnail
|
||||
|
||||
(defn get-file-data-for-thumbnail
|
||||
[conn {:keys [data id] :as file}]
|
||||
(letfn [;; function responsible on finding the frame marked to be
|
||||
;; used as thumbnail; the returned frame always have
|
||||
;; the :page-id set to the page that it belongs.
|
||||
(get-thumbnail-frame [data]
|
||||
(d/seek :use-for-thumbnail?
|
||||
(for [page (-> data :pages-index vals)
|
||||
frame (-> page :objects ctt/get-frames)]
|
||||
(assoc frame :page-id (:id page)))))
|
||||
|
||||
;; function responsible to filter objects data structure of
|
||||
;; all unneeded shapes if a concrete frame is provided. If no
|
||||
;; frame, the objects is returned untouched.
|
||||
(filter-objects [objects frame-id]
|
||||
(d/index-by :id (cph/get-children-with-self objects frame-id)))
|
||||
|
||||
;; function responsible of assoc available thumbnails
|
||||
;; to frames and remove all children shapes from objects if
|
||||
;; thumbnails is available
|
||||
(assoc-thumbnails [objects page-id thumbnails]
|
||||
(loop [objects objects
|
||||
frames (filter cph/frame-shape? (vals objects))]
|
||||
|
||||
(if-let [frame (-> frames first)]
|
||||
(let [frame-id (:id frame)
|
||||
object-id (str page-id frame-id)
|
||||
frame (if-let [thumb (get thumbnails object-id)]
|
||||
(assoc frame :thumbnail thumb :shapes [])
|
||||
(dissoc frame :thumbnail))
|
||||
|
||||
children-ids
|
||||
(cph/get-children-ids objects frame-id)
|
||||
|
||||
bounds
|
||||
(when (:show-content frame)
|
||||
(gsh/selection-rect (concat [frame] (->> children-ids (map (d/getf objects))))))
|
||||
|
||||
frame
|
||||
(cond-> frame
|
||||
(some? bounds)
|
||||
(assoc :children-bounds bounds))]
|
||||
|
||||
(if (:thumbnail frame)
|
||||
(recur (-> objects
|
||||
(assoc frame-id frame)
|
||||
(d/without-keys children-ids))
|
||||
(rest frames))
|
||||
(recur (assoc objects frame-id frame)
|
||||
(rest frames))))
|
||||
|
||||
objects)))]
|
||||
|
||||
(let [frame (get-thumbnail-frame data)
|
||||
frame-id (:id frame)
|
||||
page-id (or (:page-id frame)
|
||||
(-> data :pages first))
|
||||
|
||||
page (dm/get-in data [:pages-index page-id])
|
||||
frame-ids (if (some? frame) (list frame-id) (map :id (ctt/get-frames (:objects page))))
|
||||
|
||||
obj-ids (map #(str page-id %) frame-ids)
|
||||
thumbs (get-object-thumbnails conn id obj-ids)]
|
||||
|
||||
(cond-> page
|
||||
;; If we have frame, we need to specify it on the page level
|
||||
;; and remove the all other unrelated objects.
|
||||
(some? frame-id)
|
||||
(-> (assoc :thumbnail-frame-id frame-id)
|
||||
(update :objects filter-objects frame-id))
|
||||
|
||||
;; Assoc the available thumbnails and prune not visible shapes
|
||||
;; for avoid transfer unnecessary data.
|
||||
:always
|
||||
(update :objects assoc-thumbnails page-id thumbs)))))
|
||||
|
||||
(s/def ::get-file-data-for-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::features]))
|
||||
|
||||
(sv/defmethod ::get-file-data-for-thumbnail
|
||||
"Retrieves the data for generate the thumbnail of the file. Used
|
||||
mainly for render thumbnails on dashboard."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id features] :as props}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(let [file (get-file conn file-id features)]
|
||||
{:file-id file-id
|
||||
:revn (:revn file)
|
||||
:page (get-file-data-for-thumbnail conn file)})))
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MUTATION COMMANDS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; --- MUTATION COMMAND: rename-file
|
||||
|
||||
(defn rename-file
|
||||
[conn {:keys [id name] :as params}]
|
||||
(-> (db/update! conn :file
|
||||
{:name name
|
||||
:modified-at (dt/now)}
|
||||
{:id id})
|
||||
(select-keys [:id :name :created-at :modified-at])))
|
||||
|
||||
(s/def ::rename-file
|
||||
(s/keys :req-un [::profile-id ::name ::id]))
|
||||
|
||||
(sv/defmethod ::rename-file
|
||||
{::doc/added "1.17"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(rename-file conn params)))
|
||||
|
||||
|
||||
;; --- MUTATION COMMAND: set-file-shared
|
||||
|
||||
(defn unlink-files
|
||||
[conn {:keys [id] :as params}]
|
||||
(db/delete! conn :file-library-rel {:library-file-id id}))
|
||||
|
||||
(defn set-file-shared
|
||||
[conn {:keys [id is-shared] :as params}]
|
||||
(-> (db/update! conn :file
|
||||
{:is-shared is-shared}
|
||||
{:id id})
|
||||
(select-keys [:id :name :is-shared])))
|
||||
|
||||
(defn absorb-library
|
||||
"Find all files using a shared library, and absorb all library assets
|
||||
into the file local libraries"
|
||||
[conn {:keys [id] :as params}]
|
||||
(let [library (db/get-by-id conn :file id)]
|
||||
(when (:is-shared library)
|
||||
(let [ldata (-> library decode-row pmg/migrate-file :data)]
|
||||
(->> (db/query conn :file-library-rel {:library-file-id id})
|
||||
(map :file-id)
|
||||
(keep #(db/get-by-id conn :file % {:check-deleted? false}))
|
||||
(map decode-row)
|
||||
(map pmg/migrate-file)
|
||||
(run! (fn [{:keys [id data revn] :as file}]
|
||||
(let [data (ctf/absorb-assets data ldata)]
|
||||
(db/update! conn :file
|
||||
{:revn (inc revn)
|
||||
:data (blob/encode data)
|
||||
:modified-at (dt/now)}
|
||||
{:id id})))))))))
|
||||
|
||||
(s/def ::set-file-shared
|
||||
(s/keys :req-un [::profile-id ::id ::is-shared]))
|
||||
|
||||
(sv/defmethod ::set-file-shared
|
||||
{::doc/added "1.17"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id is-shared] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(when-not is-shared
|
||||
(absorb-library conn params)
|
||||
(unlink-files conn params))
|
||||
(set-file-shared conn params)))
|
||||
|
||||
|
||||
;; --- MUTATION COMMAND: delete-file
|
||||
|
||||
(defn mark-file-deleted
|
||||
[conn {:keys [id] :as params}]
|
||||
(db/update! conn :file
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id})
|
||||
nil)
|
||||
|
||||
(s/def ::delete-file
|
||||
(s/keys :req-un [::id ::profile-id]))
|
||||
|
||||
(sv/defmethod ::delete-file
|
||||
{::doc/added "1.17"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(absorb-library conn params)
|
||||
(mark-file-deleted conn params)))
|
||||
|
||||
;; --- MUTATION COMMAND: link-file-to-library
|
||||
|
||||
(def sql:link-file-to-library
|
||||
"insert into file_library_rel (file_id, library_file_id)
|
||||
values (?, ?)
|
||||
on conflict do nothing;")
|
||||
|
||||
(defn link-file-to-library
|
||||
[conn {:keys [file-id library-id] :as params}]
|
||||
(db/exec-one! conn [sql:link-file-to-library file-id library-id]))
|
||||
|
||||
(s/def ::link-file-to-library
|
||||
(s/keys :req-un [::profile-id ::file-id ::library-id]))
|
||||
|
||||
(sv/defmethod ::link-file-to-library
|
||||
{::doc/added "1.17"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id library-id] :as params}]
|
||||
(when (= file-id library-id)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-library
|
||||
:hint "A file cannot be linked to itself"))
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(check-edition-permissions! conn profile-id library-id)
|
||||
(link-file-to-library conn params)))
|
||||
|
||||
;; --- MUTATION COMMAND: unlink-file-from-library
|
||||
|
||||
(defn unlink-file-from-library
|
||||
[conn {:keys [file-id library-id] :as params}]
|
||||
(db/delete! conn :file-library-rel
|
||||
{:file-id file-id
|
||||
:library-file-id library-id}))
|
||||
|
||||
(s/def ::unlink-file-from-library
|
||||
(s/keys :req-un [::profile-id ::file-id ::library-id]))
|
||||
|
||||
(sv/defmethod ::unlink-file-from-library
|
||||
{::doc/added "1.17"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(unlink-file-from-library conn params)))
|
||||
|
||||
|
||||
;; --- MUTATION COMMAND: update-sync
|
||||
|
||||
(defn update-sync
|
||||
[conn {:keys [file-id library-id] :as params}]
|
||||
(db/update! conn :file-library-rel
|
||||
{:synced-at (dt/now)}
|
||||
{:file-id file-id
|
||||
:library-file-id library-id}))
|
||||
|
||||
(s/def ::update-file-library-sync-status
|
||||
(s/keys :req-un [::profile-id ::file-id ::library-id]))
|
||||
|
||||
;; TODO: improve naming
|
||||
|
||||
(sv/defmethod ::update-file-library-sync-status
|
||||
"Update the synchronization statos of a file->library link"
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(update-sync conn params)))
|
||||
|
||||
|
||||
;; --- MUTATION COMMAND: ignore-sync
|
||||
|
||||
(defn ignore-sync
|
||||
[conn {:keys [file-id date] :as params}]
|
||||
(db/update! conn :file
|
||||
{:ignore-sync-until date}
|
||||
{:id file-id}))
|
||||
|
||||
(s/def ::ignore-file-library-sync-status
|
||||
(s/keys :req-un [::profile-id ::file-id ::date]))
|
||||
|
||||
;; TODO: improve naming
|
||||
(sv/defmethod ::ignore-file-library-sync-status
|
||||
"Ignore updates in linked files"
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(ignore-sync conn params)))
|
||||
|
||||
|
||||
;; --- MUTATION COMMAND: upsert-file-object-thumbnail
|
||||
|
||||
(def sql:upsert-object-thumbnail
|
||||
"insert into file_object_thumbnail(file_id, object_id, data)
|
||||
values (?, ?, ?)
|
||||
on conflict(file_id, object_id) do
|
||||
update set data = ?;")
|
||||
|
||||
(defn upsert-file-object-thumbnail!
|
||||
[conn {:keys [file-id object-id data]}]
|
||||
(if data
|
||||
(db/exec-one! conn [sql:upsert-object-thumbnail file-id object-id data data])
|
||||
(db/delete! conn :file-object-thumbnail {:file-id file-id :object-id object-id})))
|
||||
|
||||
(s/def ::data (s/nilable ::us/string))
|
||||
(s/def ::thumbs/object-id ::us/string)
|
||||
(s/def ::upsert-file-object-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id ::thumbs/object-id ::data]))
|
||||
|
||||
(sv/defmethod ::upsert-file-object-thumbnail
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(upsert-file-object-thumbnail! conn params)
|
||||
nil))
|
||||
|
||||
;; --- MUTATION COMMAND: upsert-file-thumbnail
|
||||
|
||||
(def sql:upsert-file-thumbnail
|
||||
"insert into file_thumbnail (file_id, revn, data, props)
|
||||
values (?, ?, ?, ?::jsonb)
|
||||
on conflict(file_id, revn) do
|
||||
update set data = ?, props=?, updated_at=now();")
|
||||
|
||||
(defn upsert-file-thumbnail
|
||||
[conn {:keys [file-id revn data props]}]
|
||||
(let [props (db/tjson (or props {}))]
|
||||
(db/exec-one! conn [sql:upsert-file-thumbnail
|
||||
file-id revn data props data props])))
|
||||
|
||||
(s/def ::revn ::us/integer)
|
||||
(s/def ::props map?)
|
||||
(s/def ::upsert-file-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id ::revn ::data ::props]))
|
||||
|
||||
(sv/defmethod ::upsert-file-thumbnail
|
||||
"Creates or updates the file thumbnail. Mainly used for paint the
|
||||
grid thumbnails."
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(upsert-file-thumbnail conn params)
|
||||
nil))
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.files.create
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.files.features :as ffeat]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.projects :as proj]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
(defn create-file-role!
|
||||
[conn {:keys [file-id profile-id role]}]
|
||||
(let [params {:file-id file-id
|
||||
:profile-id profile-id}]
|
||||
(->> (perms/assign-role-flags params role)
|
||||
(db/insert! conn :file-profile-rel))))
|
||||
|
||||
(defn create-file
|
||||
[conn {:keys [id name project-id is-shared data revn
|
||||
modified-at deleted-at create-page
|
||||
ignore-sync-until features]
|
||||
:or {is-shared false revn 0 create-page true}
|
||||
:as params}]
|
||||
(let [id (or id (:id data) (uuid/next))
|
||||
features (-> (into files/default-features features)
|
||||
(files/check-features-compatibility!))
|
||||
|
||||
data (or data
|
||||
(binding [ffeat/*current* features
|
||||
ffeat/*wrap-with-objects-map-fn* (if (features "storate/objects-map") omap/wrap identity)
|
||||
ffeat/*wrap-with-pointer-map-fn* (if (features "storage/pointer-map") pmap/wrap identity)]
|
||||
(if create-page
|
||||
(ctf/make-file-data id)
|
||||
(ctf/make-file-data id nil))))
|
||||
|
||||
features (db/create-array conn "text" features)
|
||||
file (db/insert! conn :file
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:data (blob/encode data)
|
||||
:features features
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}))]
|
||||
|
||||
(->> (assoc params :file-id id :role :owner)
|
||||
(create-file-role! conn))
|
||||
|
||||
(files/decode-row file)))
|
||||
|
||||
(s/def ::create-file
|
||||
(s/keys :req-un [::files/profile-id
|
||||
::files/name
|
||||
::files/project-id]
|
||||
:opt-un [::files/id
|
||||
::files/is-shared
|
||||
::files/features]))
|
||||
|
||||
(sv/defmethod ::create-file
|
||||
{::doc/added "1.17"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team-id (files/get-team-id conn project-id)]
|
||||
(-> (create-file conn params)
|
||||
(vary-meta assoc ::audit/props {:team-id team-id})))))
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.files.temp
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.pages :as cp]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files.create :as files.create]
|
||||
[app.rpc.commands.files.update :as files.update]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.queries.projects :as proj]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
;; --- MUTATION COMMAND: create-temp-file
|
||||
|
||||
(s/def ::create-page ::us/boolean)
|
||||
|
||||
(s/def ::create-temp-file
|
||||
(s/keys :req-un [::files/profile-id
|
||||
::files/name
|
||||
::files/project-id]
|
||||
:opt-un [::files/id
|
||||
::files/is-shared
|
||||
::files/features
|
||||
::create-page]))
|
||||
|
||||
(sv/defmethod ::create-temp-file
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
(files.create/create-file conn (assoc params :deleted-at (dt/in-future {:days 1})))))
|
||||
|
||||
;; --- MUTATION COMMAND: update-temp-file
|
||||
|
||||
(defn update-temp-file
|
||||
[conn {:keys [profile-id session-id id revn changes] :as params}]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at (dt/now)
|
||||
:file-id id
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)}))
|
||||
|
||||
(s/def ::update-temp-file
|
||||
(s/keys :req-un [::files.update/changes
|
||||
::files.update/revn
|
||||
::files.update/session-id
|
||||
::files/id]))
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(update-temp-file conn params)
|
||||
nil))
|
||||
|
||||
;; --- MUTATION COMMAND: persist-temp-file
|
||||
|
||||
(defn persist-temp-file
|
||||
[conn {:keys [id] :as params}]
|
||||
(let [file (db/get-by-id conn :file id)
|
||||
revs (db/query conn :file-change
|
||||
{:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
revn (count revs)]
|
||||
|
||||
(when (nil? (:deleted-at file))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
(loop [revs (seq revs)
|
||||
data (blob/decode (:data file))]
|
||||
(if-let [rev (first revs)]
|
||||
(recur (rest revs)
|
||||
(->> rev :changes blob/decode (cp/process-changes data)))
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:revn revn
|
||||
:data (blob/encode data)}
|
||||
{:id id})))
|
||||
nil))
|
||||
|
||||
(s/def ::persist-temp-file
|
||||
(s/keys :req-un [::files/id
|
||||
::files/profile-id]))
|
||||
|
||||
(sv/defmethod ::persist-temp-file
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(persist-temp-file conn params)))
|
||||
@@ -1,304 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.files.update
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.features :as ffeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pages :as cp]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.metrics :as mtx]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
;; --- SPECS
|
||||
|
||||
(s/def ::changes
|
||||
(s/coll-of map? :kind vector?))
|
||||
|
||||
(s/def ::hint-origin ::us/keyword)
|
||||
(s/def ::hint-events
|
||||
(s/every ::us/keyword :kind vector?))
|
||||
|
||||
(s/def ::change-with-metadata
|
||||
(s/keys :req-un [::changes]
|
||||
:opt-un [::hint-origin
|
||||
::hint-events]))
|
||||
|
||||
(s/def ::changes-with-metadata
|
||||
(s/every ::change-with-metadata :kind vector?))
|
||||
|
||||
(s/def ::session-id ::us/uuid)
|
||||
(s/def ::revn ::us/integer)
|
||||
(s/def ::update-file
|
||||
(s/and
|
||||
(s/keys :req-un [::files/id ::files/profile-id ::session-id ::revn]
|
||||
:opt-un [::changes ::changes-with-metadata ::features])
|
||||
(fn [o]
|
||||
(or (contains? o :changes)
|
||||
(contains? o :changes-with-metadata)))))
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
;; File changes that affect to the library, and must be notified
|
||||
;; to all clients using it.
|
||||
|
||||
(def ^:private library-change-types
|
||||
#{:add-color :mod-color :del-color
|
||||
:add-media :mod-media :del-media
|
||||
:add-component :mod-component :del-component
|
||||
:add-typography :mod-typography :del-typography})
|
||||
|
||||
(def ^:private file-change-types
|
||||
#{:add-obj :mod-obj :del-obj
|
||||
:reg-objects :mov-objects})
|
||||
|
||||
(defn- library-change?
|
||||
[{:keys [type] :as change}]
|
||||
(or (contains? library-change-types type)
|
||||
(and (contains? file-change-types type)
|
||||
(some? (:component-id change)))))
|
||||
|
||||
(def ^:private sql:get-file
|
||||
"SELECT f.*, p.team_id
|
||||
FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE f.id = ?
|
||||
AND (f.deleted_at IS NULL OR
|
||||
f.deleted_at > now())
|
||||
FOR KEY SHARE")
|
||||
|
||||
(defn get-file
|
||||
[conn id]
|
||||
(let [file (db/exec-one! conn [sql:get-file id])]
|
||||
(when-not file
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint (format "file with id '%s' does not exists" id)))
|
||||
(update file :features db/decode-pgarray #{})))
|
||||
|
||||
(defn- wrap-with-pointer-map-context
|
||||
[f]
|
||||
(fn [{:keys [conn] :as cfg} {:keys [id] :as file}]
|
||||
(binding [pmap/*tracked* (atom {})
|
||||
pmap/*load-fn* (partial files/load-pointer conn id)
|
||||
ffeat/*wrap-with-pointer-map-fn* pmap/wrap]
|
||||
(let [result (f cfg file)]
|
||||
(files/persist-pointers! conn id)
|
||||
result))))
|
||||
|
||||
(defn- wrap-with-objects-map-context
|
||||
[f]
|
||||
(fn [cfg file]
|
||||
(binding [ffeat/*wrap-with-objects-map-fn* omap/wrap]
|
||||
(f cfg file))))
|
||||
|
||||
(declare get-lagged-changes)
|
||||
(declare send-notifications!)
|
||||
(declare update-file)
|
||||
(declare update-file*)
|
||||
(declare take-snapshot?)
|
||||
|
||||
;; If features are specified from params and the final feature
|
||||
;; set is different than the persisted one, update it on the
|
||||
;; database.
|
||||
|
||||
(defn webhook-batch-keyfn
|
||||
[props]
|
||||
(str "rpc:update-file:" (:id props)))
|
||||
|
||||
(sv/defmethod ::update-file
|
||||
{::climit/queue :update-file
|
||||
::climit/key-fn :id
|
||||
::webhooks/event? true
|
||||
::webhooks/batch-timeout (dt/duration "2s")
|
||||
::webhooks/batch-key webhook-batch-keyfn
|
||||
::doc/added "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
|
||||
(let [cfg (assoc cfg :conn conn)
|
||||
tpoint (dt/tpoint)]
|
||||
(-> (update-file cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))
|
||||
|
||||
(defn update-file
|
||||
[{:keys [conn metrics] :as cfg} {:keys [id profile-id changes changes-with-metadata] :as params}]
|
||||
(let [file (get-file conn id)
|
||||
features (->> (concat (:features file)
|
||||
(:features params))
|
||||
(into files/default-features)
|
||||
(files/check-features-compatibility!))]
|
||||
|
||||
(files/check-edition-permissions! conn profile-id (:id file))
|
||||
|
||||
(binding [ffeat/*current* features
|
||||
ffeat/*previous* (:features file)]
|
||||
(let [update-fn (cond-> update-file*
|
||||
(contains? features "storage/pointer-map")
|
||||
(wrap-with-pointer-map-context)
|
||||
|
||||
(contains? features "storage/objects-map")
|
||||
(wrap-with-objects-map-context))
|
||||
|
||||
file (assoc file :features features)
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
(vec changes))
|
||||
|
||||
params (assoc params :file file :changes changes)]
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
(when (not= features (:features file))
|
||||
(let [features (db/create-array conn "text" features)]
|
||||
(db/update! conn :file
|
||||
{:features features}
|
||||
{:id id})))
|
||||
|
||||
(-> (update-fn cfg params)
|
||||
(vary-meta assoc ::audit/replace-props
|
||||
{:id (:id file)
|
||||
:name (:name file)
|
||||
:features (:features file)
|
||||
:project-id (:project-id file)
|
||||
:team-id (:team-id file)}))))))
|
||||
|
||||
(defn- update-file*
|
||||
[{:keys [conn] :as cfg} {:keys [file changes session-id profile-id] :as params}]
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
(let [ts (dt/now)
|
||||
file (-> file
|
||||
(update :revn inc)
|
||||
(update :data (fn [data]
|
||||
(cond-> data
|
||||
:always
|
||||
(-> (blob/decode)
|
||||
(assoc :id (:id file))
|
||||
(pmg/migrate-data))
|
||||
|
||||
(and (contains? ffeat/*current* "components/v2")
|
||||
(not (contains? ffeat/*previous* "components/v2")))
|
||||
(ctf/migrate-to-components-v2)
|
||||
|
||||
:always
|
||||
(-> (cp/process-changes changes)
|
||||
(blob/encode))))))]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at ts
|
||||
:file-id (:id file)
|
||||
:revn (:revn file)
|
||||
:features (db/create-array conn "text" (:features file))
|
||||
:data (when (take-snapshot? file)
|
||||
(:data file))
|
||||
:changes (blob/encode changes)})
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:data-backend nil
|
||||
:modified-at ts
|
||||
:has-media-trimmed false}
|
||||
{:id (:id file)})
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at ts}
|
||||
{:id (:project-id file)})
|
||||
|
||||
(let [params (assoc params :file file)]
|
||||
;; Send asynchronous notifications
|
||||
(send-notifications! cfg params)
|
||||
|
||||
;; Retrieve and return lagged data
|
||||
(get-lagged-changes conn params))))
|
||||
|
||||
(defn- take-snapshot?
|
||||
"Defines the rule when file `data` snapshot should be saved."
|
||||
[{:keys [revn modified-at] :as file}]
|
||||
(let [freq (or (cf/get :file-change-snapshot-every) 20)
|
||||
timeout (or (cf/get :file-change-snapshot-timeout)
|
||||
(dt/duration {:hours 1}))]
|
||||
(or (= 1 freq)
|
||||
(zero? (mod revn freq))
|
||||
(> (inst-ms (dt/diff modified-at (dt/now)))
|
||||
(inst-ms timeout)))))
|
||||
|
||||
(def ^:private
|
||||
sql:lagged-changes
|
||||
"select s.id, s.revn, s.file_id,
|
||||
s.session_id, s.changes
|
||||
from file_change as s
|
||||
where s.file_id = ?
|
||||
and s.revn > ?
|
||||
order by s.created_at asc")
|
||||
|
||||
(defn- get-lagged-changes
|
||||
[conn params]
|
||||
(->> (db/exec! conn [sql:lagged-changes (:id params) (:revn params)])
|
||||
(into [] (comp (map files/decode-row)
|
||||
(map (fn [row]
|
||||
(cond-> row
|
||||
(= (:revn row) (:revn (:file params)))
|
||||
(assoc :changes []))))))))
|
||||
|
||||
(defn- send-notifications!
|
||||
[{:keys [conn] :as cfg} {:keys [file changes session-id] :as params}]
|
||||
(let [lchanges (filter library-change? changes)
|
||||
msgbus (:msgbus cfg)]
|
||||
|
||||
;; Asynchronously publish message to the msgbus
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-change
|
||||
:profile-id (:profile-id params)
|
||||
:file-id (:id file)
|
||||
:session-id (:session-id params)
|
||||
:revn (:revn file)
|
||||
:changes changes})
|
||||
|
||||
(when (and (:is-shared file) (seq lchanges))
|
||||
(let [team-id (or (:team-id file)
|
||||
(files/get-team-id conn (:project-id file)))]
|
||||
;; Asynchronously publish message to the msgbus
|
||||
(mbus/pub! msgbus
|
||||
:topic team-id
|
||||
:message {:type :library-change
|
||||
:profile-id (:profile-id params)
|
||||
:file-id (:id file)
|
||||
:session-id session-id
|
||||
:revn (:revn file)
|
||||
:modified-at (dt/now)
|
||||
:changes lchanges})))))
|
||||
@@ -10,11 +10,9 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.rpc.commands.auth :as cmd.auth]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -63,16 +61,15 @@
|
||||
:member-id (:id profile)
|
||||
:member-email (:email profile))
|
||||
token (tokens :generate claims)]
|
||||
(with-meta {:invitation-token token}
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/props (:props profile)
|
||||
::audit/profile-id (:id profile)}))
|
||||
|
||||
(-> {:invitation-token token}
|
||||
(rph/with-transform (session/create-fn session (:id profile)))
|
||||
(rph/with-meta {::audit/props (:props profile)
|
||||
::audit/profile-id (:id profile)})))
|
||||
|
||||
(-> profile
|
||||
(rph/with-transform (session/create-fn session (:id profile)))
|
||||
(rph/with-meta {::audit/props (:props profile)
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
(with-meta profile
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/props (:props profile)
|
||||
::audit/profile-id (:id profile)})))))
|
||||
|
||||
(defn- login-or-register
|
||||
[{:keys [pool] :as cfg} info]
|
||||
|
||||
@@ -14,13 +14,11 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.binfile :as binfile]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.mutations.projects :refer [create-project-role create-project]]
|
||||
[app.rpc.queries.projects :as proj]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -55,7 +53,7 @@
|
||||
(assoc key (get index (get item key) (get item key)))))
|
||||
|
||||
(defn- process-file
|
||||
[conn {:keys [id] :as file} index]
|
||||
[file index]
|
||||
(letfn [(process-form [form]
|
||||
(cond-> form
|
||||
;; Relink library items
|
||||
@@ -99,25 +97,18 @@
|
||||
res)))
|
||||
media
|
||||
media))]
|
||||
(-> file
|
||||
(update :id #(get index %))
|
||||
(update :data
|
||||
(fn [data]
|
||||
(binding [pmap/*load-fn* (partial files/load-pointer conn id)
|
||||
pmap/*tracked* (atom {})]
|
||||
(let [file-id (get index id)
|
||||
data (-> data
|
||||
(blob/decode)
|
||||
(assoc :id file-id)
|
||||
(pmg/migrate-data)
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(d/without-nils)
|
||||
(files/process-pointers pmap/clone)
|
||||
(blob/encode))]
|
||||
(files/persist-pointers! conn file-id)
|
||||
data)))))))
|
||||
|
||||
(update file :data
|
||||
(fn [data]
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id (:id file))
|
||||
(pmg/migrate-data)
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(d/without-nils)
|
||||
(blob/encode))))))
|
||||
|
||||
(def sql:retrieve-used-libraries
|
||||
"select flr.*
|
||||
@@ -175,9 +166,9 @@
|
||||
file (-> file
|
||||
(assoc :created-at now)
|
||||
(assoc :modified-at now)
|
||||
(assoc :ignore-sync-until ignore))
|
||||
|
||||
file (process-file conn file index)]
|
||||
(assoc :ignore-sync-until ignore)
|
||||
(update :id #(get index %))
|
||||
(process-file index))]
|
||||
|
||||
(db/insert! conn :file file)
|
||||
(db/insert! conn :file-profile-rel
|
||||
@@ -203,8 +194,7 @@
|
||||
(proj/check-edition-permissions! conn profile-id (:project-id file))
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
(-> (duplicate-file* conn params {:reset-shared-flag true})
|
||||
(update :data blob/decode)
|
||||
(update :features db/decode-pgarray #{}))))
|
||||
(update :data blob/decode))))
|
||||
|
||||
;; --- COMMAND: Duplicate Project
|
||||
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.search
|
||||
(:require
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
(def ^:private sql:search-files
|
||||
"with projects as (
|
||||
select p.*
|
||||
from project as p
|
||||
inner join team_profile_rel as tpr on (tpr.team_id = p.team_id)
|
||||
where tpr.profile_id = ?
|
||||
and p.team_id = ?
|
||||
and (p.deleted_at is null or p.deleted_at > now())
|
||||
and (tpr.is_admin = true or
|
||||
tpr.is_owner = true or
|
||||
tpr.can_edit = true)
|
||||
union
|
||||
select p.*
|
||||
from project as p
|
||||
inner join project_profile_rel as ppr on (ppr.project_id = p.id)
|
||||
where ppr.profile_id = ?
|
||||
and p.team_id = ?
|
||||
and (p.deleted_at is null or p.deleted_at > now())
|
||||
and (ppr.is_admin = true or
|
||||
ppr.is_owner = true or
|
||||
ppr.can_edit = true)
|
||||
)
|
||||
select distinct
|
||||
f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared
|
||||
from file as f
|
||||
inner join projects as pr on (f.project_id = pr.id)
|
||||
where f.name ilike ('%' || ? || '%')
|
||||
order by f.created_at asc")
|
||||
|
||||
(defn search-files
|
||||
[conn {:keys [profile-id team-id search-term] :as params}]
|
||||
(db/exec! conn [sql:search-files
|
||||
profile-id team-id
|
||||
profile-id team-id
|
||||
search-term]))
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::search-files ::us/string)
|
||||
|
||||
(s/def ::search-files
|
||||
(s/keys :req-un [::profile-id ::team-id]
|
||||
:opt-un [::search-term]))
|
||||
|
||||
(sv/defmethod ::search-files
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [pool]} {:keys [search-term] :as params}]
|
||||
(when search-term
|
||||
(search-files pool params)))
|
||||
@@ -9,10 +9,8 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.mutations.teams :as teams]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.tokens :as tokens]
|
||||
@@ -48,7 +46,7 @@
|
||||
{:email email}
|
||||
{:id profile-id})
|
||||
|
||||
(rph/with-meta claims
|
||||
(with-meta claims
|
||||
{::audit/name "update-profile-email"
|
||||
::audit/props {:email email}
|
||||
::audit/profile-id profile-id}))
|
||||
@@ -68,11 +66,11 @@
|
||||
{:is-active true}
|
||||
{:id (:id profile)}))
|
||||
|
||||
(-> claims
|
||||
(rph/with-transform (session/create-fn session profile-id))
|
||||
(rph/with-meta {::audit/name "verify-profile-email"
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))
|
||||
(with-meta claims
|
||||
{:transform-response ((:create session) profile-id)
|
||||
::audit/name "verify-profile-email"
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))
|
||||
|
||||
(defmethod process-token :auth
|
||||
[{:keys [conn] :as cfg} _params {:keys [profile-id] :as claims}]
|
||||
@@ -148,13 +146,14 @@
|
||||
;; proceed with accepting the invitation and joining the
|
||||
;; current profile to the invited team.
|
||||
(let [profile (accept-invitation cfg claims invitation profile)]
|
||||
(-> (assoc claims :state :created)
|
||||
(rph/with-meta {::audit/name "accept-team-invitation"
|
||||
::audit/props (merge
|
||||
(audit/profile->props profile)
|
||||
{:team-id (:team-id claims)
|
||||
:role (:role claims)})
|
||||
::audit/profile-id profile-id})))
|
||||
(with-meta
|
||||
(assoc claims :state :created)
|
||||
{::audit/name "accept-team-invitation"
|
||||
::audit/props (merge
|
||||
(audit/profile->props profile)
|
||||
{:team-id (:team-id claims)
|
||||
:role (:role claims)})
|
||||
::audit/profile-id profile-id}))
|
||||
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-token
|
||||
@@ -170,14 +169,15 @@
|
||||
{:email member-email})
|
||||
{:columns [:id :email]})]
|
||||
(let [profile (accept-invitation cfg claims invitation member)]
|
||||
(-> (assoc claims :state :created)
|
||||
(rph/with-transform (session/create-fn session (:id profile)))
|
||||
(rph/with-meta {::audit/name "accept-team-invitation"
|
||||
::audit/props (merge
|
||||
(audit/profile->props profile)
|
||||
{:team-id (:team-id claims)
|
||||
:role (:role claims)})
|
||||
::audit/profile-id member-id})))
|
||||
(with-meta
|
||||
(assoc claims :state :created)
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/name "accept-team-invitation"
|
||||
::audit/props (merge
|
||||
(audit/profile->props profile)
|
||||
{:team-id (:team-id claims)
|
||||
:role (:role claims)})
|
||||
::audit/profile-id member-id}))
|
||||
|
||||
{:invitation-token token
|
||||
:iss :team-invitation
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.viewer
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.comments :as comments]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.cond :as-alias cond]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.queries.share-link :as slnk]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
;; --- Query: View Only Bundle
|
||||
|
||||
(defn- get-project
|
||||
[conn id]
|
||||
(db/get-by-id conn :project id {:columns [:id :name :team-id]}))
|
||||
|
||||
(defn- get-bundle
|
||||
[conn file-id profile-id features]
|
||||
(let [file (files/get-file conn file-id features)
|
||||
project (get-project conn (:project-id file))
|
||||
libs (files/get-file-libraries conn file-id features)
|
||||
users (comments/get-file-comments-users conn file-id profile-id)
|
||||
|
||||
links (->> (db/query conn :share-link {:file-id file-id})
|
||||
(mapv slnk/decode-share-link-row))
|
||||
|
||||
fonts (db/query conn :team-font-variant
|
||||
{:team-id (:team-id project)
|
||||
:deleted-at nil})]
|
||||
|
||||
{:file file
|
||||
:users users
|
||||
:fonts fonts
|
||||
:project project
|
||||
:share-links links
|
||||
:libraries libs}))
|
||||
|
||||
(defn- remove-not-allowed-pages
|
||||
[data allowed]
|
||||
(-> data
|
||||
(update :pages (fn [pages] (filterv #(contains? allowed %) pages)))
|
||||
(update :pages-index select-keys allowed)))
|
||||
|
||||
(defn get-view-only-bundle
|
||||
[conn {:keys [profile-id file-id share-id features] :as params}]
|
||||
(let [perms (files/get-permissions conn profile-id file-id share-id)
|
||||
bundle (-> (get-bundle conn file-id profile-id features)
|
||||
(assoc :permissions perms))]
|
||||
|
||||
;; When we have neither profile nor share, we just return a not
|
||||
;; found response to the user.
|
||||
(when-not perms
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "object not found"))
|
||||
|
||||
(update bundle :file
|
||||
(fn [file]
|
||||
(cond-> file
|
||||
(= :share-link (:type perms))
|
||||
(update :data remove-not-allowed-pages (:pages perms))
|
||||
|
||||
:always
|
||||
(update :data select-keys [:id :options :pages :pages-index]))))))
|
||||
|
||||
(s/def ::get-view-only-bundle
|
||||
(s/keys :req-un [::files/file-id]
|
||||
:opt-un [::files/profile-id
|
||||
::files/share-id
|
||||
::files/features]))
|
||||
|
||||
(sv/defmethod ::get-view-only-bundle
|
||||
{:auth false
|
||||
::cond/get-object #(files/get-minimal-file %1 (:file-id %2))
|
||||
::cond/key-fn files/get-file-etag
|
||||
::cond/reuse-key? true
|
||||
::doc/added "1.17"}
|
||||
[{:keys [pool]} params]
|
||||
(with-open [conn (db/open pool)]
|
||||
(get-view-only-bundle conn params)))
|
||||
@@ -1,143 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.webhooks
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.queries.teams :refer [check-edition-permissions! check-read-permissions!]]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.core :as p]))
|
||||
|
||||
;; --- Mutation: Create Webhook
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::uri ::us/not-empty-string)
|
||||
(s/def ::is-active ::us/boolean)
|
||||
(s/def ::mtype
|
||||
#{"application/json"
|
||||
"application/x-www-form-urlencoded"
|
||||
"application/transit+json"})
|
||||
|
||||
(s/def ::create-webhook
|
||||
(s/keys :req-un [::profile-id ::team-id ::uri ::mtype]
|
||||
:opt-un [::is-active]))
|
||||
|
||||
;; NOTE: for now the quote is hardcoded but this need to be solved in
|
||||
;; a more universal way for handling properly object quotes
|
||||
(def max-hooks-for-team 8)
|
||||
|
||||
(defn- validate-webhook!
|
||||
[cfg whook params]
|
||||
(letfn [(handle-exception [exception]
|
||||
(if-let [hint (webhooks/interpret-exception exception)]
|
||||
(ex/raise :type :validation
|
||||
:code :webhook-validation
|
||||
:hint hint)
|
||||
(ex/raise :type :internal
|
||||
:code :webhook-validation
|
||||
:cause exception)))
|
||||
|
||||
(handle-response [response]
|
||||
(when-let [hint (webhooks/interpret-response response)]
|
||||
(ex/raise :type :validation
|
||||
:code :webhook-validation
|
||||
:hint hint)))]
|
||||
|
||||
(if (not= (:uri whook) (:uri params))
|
||||
(->> (http/req! cfg {:method :head
|
||||
:uri (:uri params)
|
||||
:timeout (dt/duration "3s")})
|
||||
(p/hmap (fn [response exception]
|
||||
(if exception
|
||||
(handle-exception exception)
|
||||
(handle-response response)))))
|
||||
(p/resolved nil))))
|
||||
|
||||
(defn- validate-quotes!
|
||||
[{:keys [::db/pool]} {:keys [team-id]}]
|
||||
(let [sql ["select count(*) as total from webhook where team_id = ?" team-id]
|
||||
total (:total (db/exec-one! pool sql))]
|
||||
(when (>= total max-hooks-for-team)
|
||||
(ex/raise :type :restriction
|
||||
:code :webhooks-quote-reached
|
||||
:hint (str/ffmt "can't create more than % webhooks per team" max-hooks-for-team)))))
|
||||
|
||||
(defn- insert-webhook!
|
||||
[{:keys [::db/pool]} {:keys [team-id uri mtype is-active] :as params}]
|
||||
(db/insert! pool :webhook
|
||||
{:id (uuid/next)
|
||||
:team-id team-id
|
||||
:uri uri
|
||||
:is-active is-active
|
||||
:mtype mtype}))
|
||||
|
||||
(defn- update-webhook!
|
||||
[{:keys [::db/pool] :as cfg} {:keys [id] :as wook} {:keys [uri mtype is-active] :as params}]
|
||||
(db/update! pool :webhook
|
||||
{:uri uri
|
||||
:is-active is-active
|
||||
:mtype mtype
|
||||
:error-code nil
|
||||
:error-count 0}
|
||||
{:id id}))
|
||||
|
||||
(sv/defmethod ::create-webhook
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(check-edition-permissions! pool profile-id team-id)
|
||||
(->> (validate-quotes! cfg params)
|
||||
(p/fmap executor (fn [_] (validate-webhook! cfg nil params)))
|
||||
(p/fmap executor (fn [_] (insert-webhook! cfg params)))))
|
||||
|
||||
(s/def ::update-webhook
|
||||
(s/keys :req-un [::id ::uri ::mtype ::is-active]))
|
||||
|
||||
(sv/defmethod ::update-webhook
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(let [whook (db/get pool :webhook {:id id})]
|
||||
(check-edition-permissions! pool profile-id (:team-id whook))
|
||||
(->> (validate-webhook! cfg whook params)
|
||||
(p/fmap executor (fn [_] (update-webhook! cfg whook params))))))
|
||||
|
||||
(s/def ::delete-webhook
|
||||
(s/keys :req-un [::profile-id ::id]))
|
||||
|
||||
(sv/defmethod ::delete-webhook
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [profile-id id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [whook (db/get conn :webhook {:id id})]
|
||||
(check-edition-permissions! conn profile-id (:team-id whook))
|
||||
(db/delete! conn :webhook {:id id})
|
||||
nil)))
|
||||
|
||||
;; --- Query: Webhooks
|
||||
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::get-webhooks
|
||||
(s/keys :req-un [::profile-id ::team-id]))
|
||||
|
||||
(def sql:get-webhooks
|
||||
"select id, uri, mtype, is_active, error_code, error_count
|
||||
from webhook where team_id = ? order by uri")
|
||||
|
||||
(sv/defmethod ::get-webhooks
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id team-id)
|
||||
(db/exec! conn [sql:get-webhooks team-id])))
|
||||
@@ -1,67 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.cond
|
||||
"Conditional loading middleware.
|
||||
|
||||
A middleware consists mainly on wrapping a RPC method with
|
||||
conditional logic. It expects to to have some metadata set on the RPC
|
||||
method that will enable this middleware to retrieve the necessary data
|
||||
for process the conditional logic:
|
||||
|
||||
- `::get-object` => should be a function that retrieves the minimum version
|
||||
of the object that will be used for calculate the KEY (etags in terms of
|
||||
the HTTP protocol).
|
||||
- `::key-fn` a function used to generate a string representation
|
||||
of the object. This function can be applied to the object returned by the
|
||||
`get-object` but also to the RPC return value (in case you don't provide
|
||||
the return value calculated key under `::key` metadata prop.
|
||||
- `::reuse-key?` enables reusing the key calculated on first time; usefull
|
||||
when the target object is not retrieved on the RPC (typical on retrieving
|
||||
dependent objects).
|
||||
"
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.services :as-alias sv]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[yetti.response :as yrs]))
|
||||
|
||||
(def
|
||||
^{:dynamic true
|
||||
:doc "Runtime flag for enable/disable conditional processing of RPC methods."}
|
||||
*enabled* false)
|
||||
|
||||
(defn- fmt-key
|
||||
[s]
|
||||
(when s
|
||||
(str "W/\"" s "\"")))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [executor]} f {:keys [::get-object ::key-fn ::reuse-key?] :as mdata}]
|
||||
(if (and (ifn? get-object) (ifn? key-fn))
|
||||
(do
|
||||
(l/debug :hint "instrumenting method" :service (::sv/name mdata))
|
||||
(fn [cfg {:keys [::key] :as params}]
|
||||
(if *enabled*
|
||||
(->> (if (or key reuse-key?)
|
||||
(->> (px/submit! executor (partial get-object cfg params))
|
||||
(p/map key-fn)
|
||||
(p/map fmt-key))
|
||||
(p/resolved nil))
|
||||
(p/mapcat (fn [key']
|
||||
(if (and (some? key)
|
||||
(= key key'))
|
||||
(p/resolved (fn [_] (yrs/response 304)))
|
||||
(->> (f cfg params)
|
||||
(p/map (fn [result]
|
||||
(->> (or (and reuse-key? key')
|
||||
(-> result meta ::key fmt-key)
|
||||
(-> result key-fn fmt-key))
|
||||
(rph/with-header result "etag")))))))))
|
||||
(f cfg params))))
|
||||
f))
|
||||
@@ -6,72 +6,11 @@
|
||||
|
||||
(ns app.rpc.helpers
|
||||
"General purpose RPC helpers."
|
||||
(:refer-clojure :exclude [with-meta])
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.http :as-alias http]
|
||||
[app.rpc :as-alias rpc]))
|
||||
(:require [app.common.data.macros :as dm]))
|
||||
|
||||
;; A utilty wrapper object for wrap service responses that does not
|
||||
;; implements the IObj interface that make possible attach metadata to
|
||||
;; it.
|
||||
|
||||
(deftype MetadataWrapper [obj ^:unsynchronized-mutable metadata]
|
||||
clojure.lang.IDeref
|
||||
(deref [_] obj)
|
||||
|
||||
clojure.lang.IObj
|
||||
(withMeta [_ meta]
|
||||
(MetadataWrapper. obj meta))
|
||||
|
||||
(meta [_] metadata))
|
||||
|
||||
(defn wrap
|
||||
"Conditionally wrap a value into MetadataWrapper instance. If the
|
||||
object already implements IObj interface it will be returned as is."
|
||||
([] (wrap nil))
|
||||
([o]
|
||||
(if (instance? clojure.lang.IObj o)
|
||||
o
|
||||
(MetadataWrapper. o {})))
|
||||
([o m]
|
||||
(MetadataWrapper. o m)))
|
||||
|
||||
(defn wrapped?
|
||||
[o]
|
||||
(instance? MetadataWrapper o))
|
||||
|
||||
(defn unwrap
|
||||
[o]
|
||||
(if (wrapped? o) @o o))
|
||||
|
||||
(defn with-header
|
||||
"Add a http header to the RPC result."
|
||||
[mdw key val]
|
||||
(vary-meta mdw update ::http/headers assoc key val))
|
||||
|
||||
(defn with-transform
|
||||
"Adds a http response transform to the RPC result."
|
||||
[mdw transform-fn]
|
||||
(vary-meta mdw update ::rpc/response-transform-fns conj transform-fn))
|
||||
|
||||
(defn with-defer
|
||||
"Defer execution of the function until request is finished."
|
||||
[mdw hook-fn]
|
||||
(vary-meta mdw update ::rpc/before-complete-fns conj hook-fn))
|
||||
|
||||
(defn with-meta
|
||||
[mdw mdata]
|
||||
(vary-meta mdw merge mdata))
|
||||
|
||||
(defn assoc-meta
|
||||
[mdw k v]
|
||||
(vary-meta mdw assoc k v))
|
||||
|
||||
(defn with-http-cache
|
||||
[mdw max-age]
|
||||
(vary-meta mdw update ::rpc/response-transform-fns conj
|
||||
(fn [_ response]
|
||||
(let [exp (if (integer? max-age) max-age (inst-ms max-age))
|
||||
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
|
||||
(update response :headers assoc "cache-control" val)))))
|
||||
(defn http-cache
|
||||
[{:keys [max-age]}]
|
||||
(fn [_ response]
|
||||
(let [exp (if (integer? max-age) max-age (inst-ms max-age))
|
||||
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
|
||||
(update response :headers assoc "cache-control" val))))
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.comments :as cmd.comments]
|
||||
[app.rpc.commands.files :as cmd.files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -27,7 +27,7 @@
|
||||
::doc/deprecated "1.15"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(cmd.comments/create-comment-thread conn params)))
|
||||
|
||||
;; --- Mutation: Update Comment Thread Status
|
||||
@@ -44,7 +44,7 @@
|
||||
(db/with-atomic [conn pool]
|
||||
(let [cthr (db/get-by-id conn :comment-thread id {:for-update true})]
|
||||
(when-not cthr (ex/raise :type :not-found))
|
||||
(cmd.files/check-comment-permissions! conn profile-id (:file-id cthr) share-id)
|
||||
(files/check-comment-permissions! conn profile-id (:file-id cthr) share-id)
|
||||
(cmd.comments/upsert-comment-thread-status! conn profile-id (:id cthr)))))
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@
|
||||
(when-not thread
|
||||
(ex/raise :type :not-found))
|
||||
|
||||
(cmd.files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
|
||||
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:is-resolved is-resolved}
|
||||
{:id id})
|
||||
|
||||
@@ -6,231 +6,571 @@
|
||||
|
||||
(ns app.rpc.mutations.files
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pages :as cp]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.files :as cmd.files]
|
||||
[app.rpc.commands.files.create :as cmd.files.create]
|
||||
[app.rpc.commands.files.temp :as cmd.files.temp]
|
||||
[app.rpc.commands.files.update :as cmd.files.update]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.metrics :as mtx]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.queries.projects :as proj]
|
||||
[app.rpc.semaphore :as rsem]
|
||||
[app.storage.impl :as simpl]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]))
|
||||
[clojure.spec.alpha :as s]
|
||||
[promesa.core :as p]))
|
||||
|
||||
(declare create-file)
|
||||
(declare retrieve-team-id)
|
||||
|
||||
;; --- Helpers & Specs
|
||||
|
||||
(s/def ::frame-id ::us/uuid)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::url ::us/url)
|
||||
|
||||
;; --- Mutation: Create File
|
||||
|
||||
(s/def ::create-file ::cmd.files.create/create-file)
|
||||
(s/def ::is-shared ::us/boolean)
|
||||
(s/def ::create-file
|
||||
(s/keys :req-un [::profile-id ::name ::project-id]
|
||||
:opt-un [::id ::is-shared ::components-v2]))
|
||||
|
||||
(sv/defmethod ::create-file
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id features components-v2] :as params}]
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team-id (cmd.files/get-team-id conn project-id)
|
||||
features (cond-> (or features #{})
|
||||
;; BACKWARD COMPATIBILITY with the components-v2 param
|
||||
components-v2 (conj "components/v2"))
|
||||
params (assoc params :features features)]
|
||||
(-> (cmd.files.create/create-file conn params)
|
||||
(vary-meta assoc ::audit/props {:team-id team-id})))))
|
||||
(let [team-id (retrieve-team-id conn project-id)]
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
(with-meta
|
||||
(create-file conn params)
|
||||
{::audit/props {:team-id team-id}}))))
|
||||
|
||||
(defn create-file-role
|
||||
[conn {:keys [file-id profile-id role]}]
|
||||
(let [params {:file-id file-id
|
||||
:profile-id profile-id}]
|
||||
(->> (perms/assign-role-flags params role)
|
||||
(db/insert! conn :file-profile-rel))))
|
||||
|
||||
(defn create-file
|
||||
[conn {:keys [id name project-id is-shared data revn
|
||||
modified-at deleted-at ignore-sync-until
|
||||
components-v2]
|
||||
:or {is-shared false revn 0}
|
||||
:as params}]
|
||||
(let [id (or id (:id data) (uuid/next))
|
||||
data (or data (ctf/make-file-data id components-v2))
|
||||
file (db/insert! conn :file
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:data (blob/encode data)
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}))]
|
||||
|
||||
(->> (assoc params :file-id id :role :owner)
|
||||
(create-file-role conn))
|
||||
|
||||
(assoc file :data data)))
|
||||
|
||||
;; --- Mutation: Rename File
|
||||
|
||||
(s/def ::rename-file ::cmd.files/rename-file)
|
||||
(declare rename-file)
|
||||
|
||||
(s/def ::rename-file
|
||||
(s/keys :req-un [::profile-id ::name ::id]))
|
||||
|
||||
(sv/defmethod ::rename-file
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id id)
|
||||
(cmd.files/rename-file conn params)))
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(rename-file conn params)))
|
||||
|
||||
(defn- rename-file
|
||||
[conn {:keys [id name] :as params}]
|
||||
(db/update! conn :file
|
||||
{:name name}
|
||||
{:id id}))
|
||||
|
||||
|
||||
;; --- Mutation: Set File shared
|
||||
|
||||
(s/def ::set-file-shared ::cmd.files/set-file-shared)
|
||||
(declare set-file-shared)
|
||||
(declare unlink-files)
|
||||
(declare absorb-library)
|
||||
|
||||
(s/def ::set-file-shared
|
||||
(s/keys :req-un [::profile-id ::id ::is-shared]))
|
||||
|
||||
(sv/defmethod ::set-file-shared
|
||||
{::doc/added "1.2"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id is-shared] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id id)
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(when-not is-shared
|
||||
(cmd.files/absorb-library conn params)
|
||||
(cmd.files/unlink-files conn params))
|
||||
(cmd.files/set-file-shared conn params)))
|
||||
(absorb-library conn params)
|
||||
(unlink-files conn params))
|
||||
(set-file-shared conn params)))
|
||||
|
||||
(defn- unlink-files
|
||||
[conn {:keys [id] :as params}]
|
||||
(db/delete! conn :file-library-rel {:library-file-id id}))
|
||||
|
||||
(defn- set-file-shared
|
||||
[conn {:keys [id is-shared] :as params}]
|
||||
(db/update! conn :file
|
||||
{:is-shared is-shared}
|
||||
{:id id}))
|
||||
|
||||
;; --- Mutation: Delete File
|
||||
|
||||
(s/def ::delete-file ::cmd.files/delete-file)
|
||||
(declare mark-file-deleted)
|
||||
|
||||
(s/def ::delete-file
|
||||
(s/keys :req-un [::id ::profile-id]))
|
||||
|
||||
(sv/defmethod ::delete-file
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id id)
|
||||
(cmd.files/absorb-library conn params)
|
||||
(cmd.files/mark-file-deleted conn params)))
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(absorb-library conn params)
|
||||
(mark-file-deleted conn params)))
|
||||
|
||||
(defn mark-file-deleted
|
||||
[conn {:keys [id] :as params}]
|
||||
(db/update! conn :file
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id})
|
||||
nil)
|
||||
|
||||
(defn absorb-library
|
||||
"Find all files using a shared library, and absorb all library assets
|
||||
into the file local libraries"
|
||||
[conn {:keys [id] :as params}]
|
||||
(let [library (db/get-by-id conn :file id)]
|
||||
(when (:is-shared library)
|
||||
(let [ldata (-> library files/decode-row pmg/migrate-file :data)]
|
||||
(->> (db/query conn :file-library-rel {:library-file-id id})
|
||||
(keep (fn [{:keys [file-id]}]
|
||||
(some->> (db/get-by-id conn :file file-id {:check-not-found false})
|
||||
(files/decode-row)
|
||||
(pmg/migrate-file))))
|
||||
(run! (fn [{:keys [id data revn] :as file}]
|
||||
(let [data (ctf/absorb-assets data ldata)]
|
||||
(db/update! conn :file
|
||||
{:revn (inc revn)
|
||||
:data (blob/encode data)
|
||||
:modified-at (dt/now)}
|
||||
{:id id})))))))))
|
||||
|
||||
;; --- Mutation: Link file to library
|
||||
|
||||
(s/def ::link-file-to-library ::cmd.files/link-file-to-library)
|
||||
(declare link-file-to-library)
|
||||
|
||||
(s/def ::link-file-to-library
|
||||
(s/keys :req-un [::profile-id ::file-id ::library-id]))
|
||||
|
||||
(sv/defmethod ::link-file-to-library
|
||||
{::doc/added "1.3"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id library-id] :as params}]
|
||||
(when (= file-id library-id)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-library
|
||||
:hint "A file cannot be linked to itself"))
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id file-id)
|
||||
(cmd.files/check-edition-permissions! conn profile-id library-id)
|
||||
(cmd.files/link-file-to-library conn params)))
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(files/check-edition-permissions! conn profile-id library-id)
|
||||
(link-file-to-library conn params)))
|
||||
|
||||
(def sql:link-file-to-library
|
||||
"insert into file_library_rel (file_id, library_file_id)
|
||||
values (?, ?)
|
||||
on conflict do nothing;")
|
||||
|
||||
(defn- link-file-to-library
|
||||
[conn {:keys [file-id library-id] :as params}]
|
||||
(db/exec-one! conn [sql:link-file-to-library file-id library-id]))
|
||||
|
||||
|
||||
;; --- Mutation: Unlink file from library
|
||||
|
||||
(s/def ::unlink-file-from-library ::cmd.files/unlink-file-from-library)
|
||||
(declare unlink-file-from-library)
|
||||
|
||||
(s/def ::unlink-file-from-library
|
||||
(s/keys :req-un [::profile-id ::file-id ::library-id]))
|
||||
|
||||
(sv/defmethod ::unlink-file-from-library
|
||||
{::doc/added "1.3"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id file-id)
|
||||
(cmd.files/unlink-file-from-library conn params)))
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(unlink-file-from-library conn params)))
|
||||
|
||||
(defn- unlink-file-from-library
|
||||
[conn {:keys [file-id library-id] :as params}]
|
||||
(db/delete! conn :file-library-rel
|
||||
{:file-id file-id
|
||||
:library-file-id library-id}))
|
||||
|
||||
|
||||
;; --- Mutation: Update synchronization status of a link
|
||||
|
||||
(s/def ::update-sync ::cmd.files/update-file-library-sync-status)
|
||||
(declare update-sync)
|
||||
|
||||
(s/def ::update-sync
|
||||
(s/keys :req-un [::profile-id ::file-id ::library-id]))
|
||||
|
||||
(sv/defmethod ::update-sync
|
||||
{::doc/added "1.10"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id file-id)
|
||||
(cmd.files/update-sync conn params)))
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(update-sync conn params)))
|
||||
|
||||
(defn- update-sync
|
||||
[conn {:keys [file-id library-id] :as params}]
|
||||
(db/update! conn :file-library-rel
|
||||
{:synced-at (dt/now)}
|
||||
{:file-id file-id
|
||||
:library-file-id library-id}))
|
||||
|
||||
;; --- Mutation: Ignore updates in linked files
|
||||
|
||||
(declare ignore-sync)
|
||||
|
||||
(s/def ::ignore-sync ::cmd.files/ignore-file-library-sync-status)
|
||||
(s/def ::ignore-sync
|
||||
(s/keys :req-un [::profile-id ::file-id ::date]))
|
||||
|
||||
(sv/defmethod ::ignore-sync
|
||||
{::doc/added "1.10"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id file-id)
|
||||
(cmd.files/ignore-sync conn params)))
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(ignore-sync conn params)))
|
||||
|
||||
(defn- ignore-sync
|
||||
[conn {:keys [file-id date] :as params}]
|
||||
(db/update! conn :file
|
||||
{:ignore-sync-until date}
|
||||
{:id file-id}))
|
||||
|
||||
|
||||
;; --- MUTATION: update-file
|
||||
|
||||
;; A generic, Changes based (granular) file update method.
|
||||
|
||||
;; File changes that affect to the library, and must be notified
|
||||
;; to all clients using it.
|
||||
(defn library-change?
|
||||
[change]
|
||||
(or (#{:add-color :mod-color :del-color
|
||||
:add-media :mod-media :del-media
|
||||
:add-component :mod-component :del-component
|
||||
:add-typography :mod-typography :del-typography} (:type change))
|
||||
(and (#{:add-obj :mod-obj :del-obj
|
||||
:reg-objects :mov-objects} (:type change))
|
||||
(some? (:component-id change)))))
|
||||
|
||||
(declare insert-change)
|
||||
(declare retrieve-lagged-changes)
|
||||
(declare send-notifications)
|
||||
(declare update-file)
|
||||
|
||||
(s/def ::changes
|
||||
(s/coll-of map? :kind vector?))
|
||||
|
||||
(s/def ::hint-origin ::us/keyword)
|
||||
(s/def ::hint-events
|
||||
(s/every ::us/keyword :kind vector?))
|
||||
|
||||
(s/def ::change-with-metadata
|
||||
(s/keys :req-un [::changes]
|
||||
:opt-un [::hint-origin
|
||||
::hint-events]))
|
||||
|
||||
(s/def ::changes-with-metadata
|
||||
(s/every ::change-with-metadata :kind vector?))
|
||||
|
||||
(s/def ::session-id ::us/uuid)
|
||||
(s/def ::revn ::us/integer)
|
||||
(s/def ::components-v2 ::us/boolean)
|
||||
(s/def ::update-file
|
||||
(s/and ::cmd.files.update/update-file
|
||||
(s/keys :opt-un [::components-v2])))
|
||||
(s/and
|
||||
(s/keys :req-un [::id ::session-id ::profile-id ::revn]
|
||||
:opt-un [::changes ::changes-with-metadata ::components-v2])
|
||||
(fn [o]
|
||||
(or (contains? o :changes)
|
||||
(contains? o :changes-with-metadata)))))
|
||||
|
||||
(sv/defmethod ::update-file
|
||||
{::climit/queue :update-file
|
||||
::climit/key-fn :id
|
||||
::doc/added "1.0"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id features components-v2] :as params}]
|
||||
{::rsem/queue :update-file}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/xact-lock! conn id)
|
||||
(cmd.files/check-edition-permissions! conn profile-id id)
|
||||
(let [{:keys [id] :as file} (db/get-by-id conn :file id {:for-key-share true})
|
||||
team-id (retrieve-team-id conn (:project-id file))]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(with-meta
|
||||
(update-file (assoc cfg :conn conn)
|
||||
(assoc params :file file))
|
||||
{::audit/props {:project-id (:project-id file)
|
||||
:team-id team-id}}))))
|
||||
|
||||
(let [;; BACKWARD COMPATIBILITY with the components-v2 parameter
|
||||
features (cond-> (or features #{})
|
||||
components-v2 (conj "components/v2"))
|
||||
tpoint (dt/tpoint)
|
||||
params (assoc params :features features)
|
||||
cfg (assoc cfg :conn conn)]
|
||||
(defn- take-snapshot?
|
||||
"Defines the rule when file `data` snapshot should be saved."
|
||||
[{:keys [revn modified-at] :as file}]
|
||||
(let [freq (or (cf/get :file-change-snapshot-every) 20)
|
||||
timeout (or (cf/get :file-change-snapshot-timeout)
|
||||
(dt/duration {:hours 1}))]
|
||||
(or (= 1 freq)
|
||||
(zero? (mod revn freq))
|
||||
(> (inst-ms (dt/diff modified-at (dt/now)))
|
||||
(inst-ms timeout)))))
|
||||
|
||||
(-> (cmd.files.update/update-file cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))
|
||||
(defn- delete-from-storage
|
||||
[{:keys [storage] :as cfg} file]
|
||||
(p/do
|
||||
(when-let [backend (simpl/resolve-backend storage (:data-backend file))]
|
||||
(simpl/del-object backend file))))
|
||||
|
||||
(defn- update-file
|
||||
[{:keys [conn metrics] :as cfg}
|
||||
{:keys [file changes changes-with-metadata session-id profile-id components-v2] :as params}]
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
(let [changes (if changes-with-metadata
|
||||
(mapcat :changes changes-with-metadata)
|
||||
changes)
|
||||
|
||||
changes (vec changes)
|
||||
|
||||
;; Trace the number of changes processed
|
||||
_ (mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
ts (dt/now)
|
||||
file (-> file
|
||||
(update :revn inc)
|
||||
(update :data (fn [data]
|
||||
;; Trace the length of bytes of processed data
|
||||
(mtx/run! metrics {:id :update-file-bytes-processed :inc (alength data)})
|
||||
(cond-> data
|
||||
:always
|
||||
(-> (blob/decode)
|
||||
(assoc :id (:id file))
|
||||
(pmg/migrate-data))
|
||||
|
||||
components-v2
|
||||
(ctf/migrate-to-components-v2)
|
||||
|
||||
:always
|
||||
(-> (cp/process-changes changes)
|
||||
(blob/encode))))))]
|
||||
;; Insert change to the xlog
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at ts
|
||||
:file-id (:id file)
|
||||
:revn (:revn file)
|
||||
:data (when (take-snapshot? file)
|
||||
(:data file))
|
||||
:changes (blob/encode changes)})
|
||||
|
||||
;; Update file
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:data-backend nil
|
||||
:modified-at ts
|
||||
:has-media-trimmed false}
|
||||
{:id (:id file)})
|
||||
|
||||
;; We need to delete the data from external storage backend
|
||||
(when-not (nil? (:data-backend file))
|
||||
@(delete-from-storage cfg file))
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at ts}
|
||||
{:id (:project-id file)})
|
||||
|
||||
(let [params (assoc params :file file :changes changes)]
|
||||
;; Send asynchronous notifications
|
||||
(send-notifications cfg params)
|
||||
|
||||
;; Retrieve and return lagged data
|
||||
(retrieve-lagged-changes conn params))))
|
||||
|
||||
(def ^:private
|
||||
sql:lagged-changes
|
||||
"select s.id, s.revn, s.file_id,
|
||||
s.session_id, s.changes
|
||||
from file_change as s
|
||||
where s.file_id = ?
|
||||
and s.revn > ?
|
||||
order by s.created_at asc")
|
||||
|
||||
(defn- retrieve-lagged-changes
|
||||
[conn params]
|
||||
(->> (db/exec! conn [sql:lagged-changes (:id params) (:revn params)])
|
||||
(into [] (comp (map files/decode-row)
|
||||
(map (fn [row]
|
||||
(cond-> row
|
||||
(= (:revn row) (:revn (:file params)))
|
||||
(assoc :changes []))))))))
|
||||
|
||||
(defn- send-notifications
|
||||
[{:keys [conn] :as cfg} {:keys [file changes session-id] :as params}]
|
||||
(let [lchanges (filter library-change? changes)
|
||||
msgbus (:msgbus cfg)]
|
||||
|
||||
|
||||
;; Asynchronously publish message to the msgbus
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-change
|
||||
:profile-id (:profile-id params)
|
||||
:file-id (:id file)
|
||||
:session-id (:session-id params)
|
||||
:revn (:revn file)
|
||||
:changes changes})
|
||||
|
||||
(when (and (:is-shared file) (seq lchanges))
|
||||
(let [team-id (retrieve-team-id conn (:project-id file))]
|
||||
;; Asynchronously publish message to the msgbus
|
||||
(mbus/pub! msgbus
|
||||
:topic team-id
|
||||
:message {:type :library-change
|
||||
:profile-id (:profile-id params)
|
||||
:file-id (:id file)
|
||||
:session-id session-id
|
||||
:revn (:revn file)
|
||||
:modified-at (dt/now)
|
||||
:changes lchanges})))))
|
||||
|
||||
(defn- retrieve-team-id
|
||||
[conn project-id]
|
||||
(:team-id (db/get-by-id conn :project project-id {:columns [:team-id]})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TEMPORARY FILES (behaves differently)
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::create-temp-file ::create-file)
|
||||
|
||||
(sv/defmethod ::create-temp-file
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
(create-file conn (assoc params :deleted-at (dt/in-future {:days 1})))))
|
||||
|
||||
(s/def ::update-temp-file
|
||||
(s/keys :req-un [::changes ::revn ::session-id ::id]))
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id session-id id revn changes] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at (dt/now)
|
||||
:file-id id
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)})
|
||||
nil))
|
||||
|
||||
(s/def ::persist-temp-file
|
||||
(s/keys :req-un [::id ::profile-id]))
|
||||
|
||||
(sv/defmethod ::persist-temp-file
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(let [file (db/get-by-id conn :file id)
|
||||
revs (db/query conn :file-change
|
||||
{:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
revn (count revs)]
|
||||
|
||||
(when (nil? (:deleted-at file))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
(loop [revs (seq revs)
|
||||
data (blob/decode (:data file))]
|
||||
(if-let [rev (first revs)]
|
||||
(recur (rest revs)
|
||||
(->> rev :changes blob/decode (cp/process-changes data)))
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:revn revn
|
||||
:data (blob/encode data)}
|
||||
{:id id})))
|
||||
nil)))
|
||||
|
||||
;; --- Mutation: upsert object thumbnail
|
||||
|
||||
(s/def ::upsert-file-object-thumbnail ::cmd.files/upsert-file-object-thumbnail)
|
||||
(def sql:upsert-object-thumbnail
|
||||
"insert into file_object_thumbnail(file_id, object_id, data)
|
||||
values (?, ?, ?)
|
||||
on conflict(file_id, object_id) do
|
||||
update set data = ?;")
|
||||
|
||||
(s/def ::data (s/nilable ::us/string))
|
||||
(s/def ::object-id ::us/string)
|
||||
(s/def ::upsert-file-object-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id ::object-id ::data]))
|
||||
|
||||
(sv/defmethod ::upsert-file-object-thumbnail
|
||||
{::doc/added "1.13"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id object-id data]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id file-id)
|
||||
(cmd.files/upsert-file-object-thumbnail! conn params)
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(if data
|
||||
(db/exec-one! conn [sql:upsert-object-thumbnail file-id object-id data data])
|
||||
(db/delete! conn :file-object-thumbnail {:file-id file-id :object-id object-id}))
|
||||
nil))
|
||||
|
||||
|
||||
;; --- Mutation: upsert file thumbnail
|
||||
|
||||
(s/def ::upsert-file-thumbnail ::cmd.files/upsert-file-thumbnail)
|
||||
(def sql:upsert-file-thumbnail
|
||||
"insert into file_thumbnail (file_id, revn, data, props)
|
||||
values (?, ?, ?, ?::jsonb)
|
||||
on conflict(file_id, revn) do
|
||||
update set data = ?, props=?, updated_at=now();")
|
||||
|
||||
(s/def ::revn ::us/integer)
|
||||
(s/def ::props map?)
|
||||
(s/def ::upsert-file-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id ::revn ::data ::props]))
|
||||
|
||||
(sv/defmethod ::upsert-file-thumbnail
|
||||
"Creates or updates the file thumbnail. Mainly used for paint the
|
||||
grid thumbnails."
|
||||
{::doc/added "1.13"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id revn data props]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id file-id)
|
||||
(cmd.files/upsert-file-thumbnail conn params)
|
||||
nil))
|
||||
|
||||
|
||||
;; --- MUTATION COMMAND: create-temp-file
|
||||
|
||||
(s/def ::create-temp-file ::cmd.files.temp/create-temp-file)
|
||||
|
||||
(sv/defmethod ::create-temp-file
|
||||
{::doc/added "1.7"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
(cmd.files.create/create-file conn (assoc params :deleted-at (dt/in-future {:days 1})))))
|
||||
|
||||
;; --- MUTATION COMMAND: update-temp-file
|
||||
|
||||
(s/def ::update-temp-file ::cmd.files.temp/update-temp-file)
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
{::doc/added "1.7"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files.temp/update-temp-file conn params)
|
||||
nil))
|
||||
|
||||
;; --- MUTATION COMMAND: persist-temp-file
|
||||
|
||||
(s/def ::persist-temp-file ::cmd.files.temp/persist-temp-file)
|
||||
|
||||
(sv/defmethod ::persist-temp-file
|
||||
{::doc/added "1.7"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(cmd.files/check-edition-permissions! conn profile-id id)
|
||||
(cmd.files.temp/persist-temp-file conn params)))
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(let [props (db/tjson (or props {}))]
|
||||
(db/exec-one! conn [sql:upsert-file-thumbnail
|
||||
file-id revn data props data props])
|
||||
nil)))
|
||||
|
||||
@@ -11,19 +11,15 @@
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.rpc.semaphore :as rsem]
|
||||
[app.storage :as sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[promesa.core :as p]))
|
||||
|
||||
(declare create-font-variant)
|
||||
|
||||
@@ -44,23 +40,21 @@
|
||||
::font-id ::font-family ::font-weight ::font-style]))
|
||||
|
||||
(sv/defmethod ::create-font-variant
|
||||
{::doc/added "1.3"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [team-id profile-id] :as params}]
|
||||
(let [cfg (update cfg :storage media/configure-assets-storage)]
|
||||
(teams/check-edition-permissions! pool profile-id team-id)
|
||||
(create-font-variant cfg params)))
|
||||
|
||||
(defn create-font-variant
|
||||
[{:keys [storage pool executor climit] :as cfg} {:keys [data] :as params}]
|
||||
[{:keys [storage pool executor semaphores] :as cfg} {:keys [data] :as params}]
|
||||
(letfn [(generate-fonts [data]
|
||||
(climit/with-dispatch (:process-font climit)
|
||||
(rsem/with-dispatch (:process-font semaphores)
|
||||
(media/run {:cmd :generate-fonts :input data})))
|
||||
|
||||
;; Function responsible of calculating cryptographyc hash of
|
||||
;; the provided data.
|
||||
(calculate-hash [data]
|
||||
(px/with-dispatch executor
|
||||
(rsem/with-dispatch (:process-font semaphores)
|
||||
(sto/calculate-hash data)))
|
||||
|
||||
(validate-data [data]
|
||||
@@ -109,29 +103,28 @@
|
||||
:ttf-file-id (:id ttf)}))
|
||||
]
|
||||
|
||||
(->> (generate-fonts data)
|
||||
(p/map validate-data)
|
||||
(p/mcat executor persist-fonts)
|
||||
(p/map executor insert-into-db)
|
||||
(p/map (fn [result]
|
||||
(let [params (update params :data (comp vec keys))]
|
||||
(rph/with-meta result {::audit/replace-props params})))))))
|
||||
(-> (generate-fonts data)
|
||||
(p/then validate-data)
|
||||
(p/then persist-fonts executor)
|
||||
(p/then insert-into-db executor))))
|
||||
|
||||
;; --- UPDATE FONT FAMILY
|
||||
|
||||
(s/def ::update-font
|
||||
(s/keys :req-un [::profile-id ::team-id ::id ::name]))
|
||||
|
||||
(def sql:update-font
|
||||
"update team_font_variant
|
||||
set font_family = ?
|
||||
where team_id = ?
|
||||
and font_id = ?")
|
||||
|
||||
(sv/defmethod ::update-font
|
||||
{::doc/added "1.3"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [team-id profile-id id name] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(db/update! conn :team-font-variant
|
||||
{:font-family name}
|
||||
{:font-id id
|
||||
:team-id team-id})))
|
||||
(db/exec-one! conn [sql:update-font name team-id id])
|
||||
nil))
|
||||
|
||||
;; --- DELETE FONT
|
||||
|
||||
@@ -139,11 +132,10 @@
|
||||
(s/keys :req-un [::profile-id ::team-id ::id]))
|
||||
|
||||
(sv/defmethod ::delete-font
|
||||
{::doc/added "1.3"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [id team-id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/now)}
|
||||
{:font-id id :team-id team-id})
|
||||
@@ -155,8 +147,7 @@
|
||||
(s/keys :req-un [::profile-id ::team-id ::id]))
|
||||
|
||||
(sv/defmethod ::delete-font-variant
|
||||
{::doc/added "1.3"
|
||||
::webhooks/event? true}
|
||||
{::doc/added "1.3"}
|
||||
[{:keys [pool] :as cfg} {:keys [id team-id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
|
||||
@@ -13,10 +13,9 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.media :as media]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.rpc.semaphore :as rsem]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.services :as sv]
|
||||
@@ -24,8 +23,7 @@
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
[promesa.core :as p]))
|
||||
|
||||
(def default-max-file-size (* 1024 1024 10)) ; 10 MiB
|
||||
|
||||
@@ -106,25 +104,25 @@
|
||||
;; inverse, soft referential integrity).
|
||||
|
||||
(defn create-file-media-object
|
||||
[{:keys [storage pool climit executor] :as cfg}
|
||||
[{:keys [storage pool semaphores] :as cfg}
|
||||
{:keys [id file-id is-local name content] :as params}]
|
||||
(letfn [;; Function responsible to retrieve the file information, as
|
||||
;; it is synchronous operation it should be wrapped into
|
||||
;; with-dispatch macro.
|
||||
(get-info [content]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(rsem/with-dispatch (:process-image semaphores)
|
||||
(media/run {:cmd :info :input content})))
|
||||
|
||||
;; Function responsible of calculating cryptographyc hash of
|
||||
;; the provided data.
|
||||
(calculate-hash [data]
|
||||
(px/with-dispatch executor
|
||||
(rsem/with-dispatch (:process-image semaphores)
|
||||
(sto/calculate-hash data)))
|
||||
|
||||
;; Function responsible of generating thumnail. As it is synchronous
|
||||
;; opetation, it should be wrapped into with-dispatch macro
|
||||
(generate-thumbnail [info]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(rsem/with-dispatch (:process-image semaphores)
|
||||
(media/run (assoc thumbnail-options
|
||||
:cmd :generic-thumbnail
|
||||
:input info))))
|
||||
@@ -156,15 +154,14 @@
|
||||
:bucket "file-media-object"})))
|
||||
|
||||
(insert-into-database [info image thumb]
|
||||
(px/with-dispatch executor
|
||||
(db/exec-one! pool [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
(:id image)
|
||||
(:id thumb)
|
||||
(:width info)
|
||||
(:height info)
|
||||
(:mtype info)])))]
|
||||
(db/exec-one! pool [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
(:id image)
|
||||
(:id thumb)
|
||||
(:width info)
|
||||
(:height info)
|
||||
(:mtype info)]))]
|
||||
|
||||
(p/let [info (get-info content)
|
||||
thumb (create-thumbnail info)
|
||||
@@ -187,7 +184,7 @@
|
||||
(create-file-media-object-from-url cfg params)))
|
||||
|
||||
(defn- create-file-media-object-from-url
|
||||
[cfg {:keys [url name] :as params}]
|
||||
[{:keys [http-client] :as cfg} {:keys [url name] :as params}]
|
||||
(letfn [(parse-and-validate-size [headers]
|
||||
(let [size (some-> (get headers "content-length") d/parse-integer)
|
||||
mtype (get headers "content-type")
|
||||
@@ -216,7 +213,7 @@
|
||||
:format format}))
|
||||
|
||||
(download-media [uri]
|
||||
(-> (http/req! cfg {:method :get :uri uri} {:response-type :input-stream})
|
||||
(-> (http-client {:method :get :uri uri} {:response-type :input-stream})
|
||||
(p/then process-response)))
|
||||
|
||||
(process-response [{:keys [body headers] :as response}]
|
||||
|
||||
@@ -12,16 +12,13 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.emails :as eml]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.auth :as cmd.auth]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.mutations.teams :as teams]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.rpc.semaphore :as rsem]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
@@ -49,7 +46,6 @@
|
||||
:opt-un [::lang ::theme]))
|
||||
|
||||
(sv/defmethod ::update-profile
|
||||
{::doc/added "1.0"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id fullname lang theme] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
;; NOTE: we need to retrieve the profile independently if we use
|
||||
@@ -72,11 +68,8 @@
|
||||
:props (db/tjson (:props profile))}
|
||||
{:id profile-id})
|
||||
|
||||
(-> profile
|
||||
profile/strip-private-attrs
|
||||
d/without-nils
|
||||
(rph/with-meta {::audit/props (audit/profile->props profile)})))))
|
||||
|
||||
(with-meta (-> profile profile/strip-private-attrs d/without-nils)
|
||||
{::audit/props (audit/profile->props profile)}))))
|
||||
|
||||
;; --- MUTATION: Update Password
|
||||
|
||||
@@ -88,11 +81,11 @@
|
||||
(s/keys :req-un [::profile-id ::password ::old-password]))
|
||||
|
||||
(sv/defmethod ::update-profile-password
|
||||
{::climit/queue :auth}
|
||||
{::rsem/queue :auth}
|
||||
[{:keys [pool] :as cfg} {:keys [password] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (validate-password! conn params)
|
||||
session-id (::rpc/session-id params)]
|
||||
session-id (:app.rpc/session-id params)]
|
||||
(when (= (str/lower (:email profile))
|
||||
(str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
@@ -138,7 +131,7 @@
|
||||
(update-profile-photo cfg params)))
|
||||
|
||||
(defn update-profile-photo
|
||||
[{:keys [pool storage executor] :as cfg} {:keys [profile-id file] :as params}]
|
||||
[{:keys [pool storage executor] :as cfg} {:keys [profile-id] :as params}]
|
||||
(p/let [profile (px/with-dispatch executor
|
||||
(db/get-by-id pool :profile profile-id))
|
||||
photo (teams/upload-photo cfg params)]
|
||||
@@ -151,13 +144,7 @@
|
||||
(db/update! pool :profile
|
||||
{:photo-id (:id photo)}
|
||||
{:id profile-id})
|
||||
|
||||
(-> (rph/wrap)
|
||||
(rph/with-meta {::audit/replace-props
|
||||
{:file-name (:filename file)
|
||||
:file-size (:size file)
|
||||
:file-path (str (:path file))
|
||||
:file-mtype (:mtype file)}}))))
|
||||
nil))
|
||||
|
||||
;; --- MUTATION: Request Email Change
|
||||
|
||||
@@ -289,7 +276,8 @@
|
||||
{:deleted-at deleted-at}
|
||||
{:id profile-id})
|
||||
|
||||
(rph/with-transform {} (session/delete-fn session)))))
|
||||
(with-meta {}
|
||||
{:transform-response (:delete session)}))))
|
||||
|
||||
(def sql:owned-teams
|
||||
"with owner_teams as (
|
||||
@@ -308,3 +296,77 @@
|
||||
(defn- get-owned-teams-with-participants
|
||||
[conn profile-id]
|
||||
(db/exec! conn [sql:owned-teams profile-id profile-id]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEPRECATED METHODS (TO BE REMOVED ON 1.16.x)
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; --- MUTATION: Login
|
||||
|
||||
(s/def ::login ::cmd.auth/login-with-password)
|
||||
|
||||
(sv/defmethod ::login
|
||||
{:auth false
|
||||
::rsem/queue :auth
|
||||
::doc/added "1.0"
|
||||
::doc/deprecated "1.15"}
|
||||
[cfg params]
|
||||
(cmd.auth/login-with-password cfg params))
|
||||
|
||||
;; --- MUTATION: Logout
|
||||
|
||||
(s/def ::logout ::cmd.auth/logout)
|
||||
|
||||
(sv/defmethod ::logout
|
||||
{:auth false
|
||||
::doc/added "1.0"
|
||||
::doc/deprecated "1.15"}
|
||||
[{:keys [session] :as cfg} _]
|
||||
(with-meta {}
|
||||
{:transform-response (:delete session)}))
|
||||
|
||||
;; --- MUTATION: Recover Profile
|
||||
|
||||
(s/def ::recover-profile ::cmd.auth/recover-profile)
|
||||
|
||||
(sv/defmethod ::recover-profile
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.15"}
|
||||
[cfg params]
|
||||
(cmd.auth/recover-profile cfg params))
|
||||
|
||||
;; --- MUTATION: Prepare Register
|
||||
|
||||
(s/def ::prepare-register-profile ::cmd.auth/prepare-register-profile)
|
||||
|
||||
(sv/defmethod ::prepare-register-profile
|
||||
{:auth false
|
||||
::doc/added "1.0"
|
||||
::doc/deprecated "1.15"}
|
||||
[cfg params]
|
||||
(cmd.auth/prepare-register cfg params))
|
||||
|
||||
;; --- MUTATION: Register Profile
|
||||
|
||||
(s/def ::register-profile ::cmd.auth/register-profile)
|
||||
|
||||
(sv/defmethod ::register-profile
|
||||
{:auth false
|
||||
::rsem/queue :auth
|
||||
::doc/added "1.0"
|
||||
::doc/deprecated "1.15"}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(-> (assoc cfg :conn conn)
|
||||
(cmd.auth/register-profile params))))
|
||||
|
||||
;; --- MUTATION: Request Profile Recovery
|
||||
|
||||
(s/def ::request-profile-recovery ::cmd.auth/request-profile-recovery)
|
||||
|
||||
(sv/defmethod ::request-profile-recovery
|
||||
{:auth false
|
||||
::doc/added "1.0"
|
||||
::doc/deprecated "1.15"}
|
||||
[cfg params]
|
||||
(cmd.auth/request-profile-recovery cfg params))
|
||||
|
||||
@@ -9,10 +9,6 @@
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.projects :as proj]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
@@ -26,6 +22,7 @@
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
|
||||
|
||||
;; --- Mutation: Create Project
|
||||
|
||||
(declare create-project)
|
||||
@@ -38,8 +35,6 @@
|
||||
:opt-un [::id]))
|
||||
|
||||
(sv/defmethod ::create-project
|
||||
{::doc/added "1.0"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
@@ -127,13 +122,10 @@
|
||||
;; this is not allowed.
|
||||
|
||||
(sv/defmethod ::delete-project
|
||||
{::doc/added "1.0"
|
||||
::webhooks/event? true}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(proj/check-edition-permissions! conn profile-id id)
|
||||
(let [project (db/update! conn :project
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id :is-default false})]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:team-id (:team-id project)}}))))
|
||||
(db/update! conn :project
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id :is-default false})
|
||||
nil))
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
|
||||
@@ -16,12 +16,11 @@
|
||||
[app.emails :as eml]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.media :as media]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.mutations.projects :as projects]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.rpc.semaphore :as rsem]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
@@ -316,13 +315,13 @@
|
||||
(assoc team :photo-id (:id photo))))
|
||||
|
||||
(defn upload-photo
|
||||
[{:keys [storage executor climit] :as cfg} {:keys [file]}]
|
||||
[{:keys [storage semaphores] :as cfg} {:keys [file]}]
|
||||
(letfn [(get-info [content]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(rsem/with-dispatch (:process-image semaphores)
|
||||
(media/run {:cmd :info :input content})))
|
||||
|
||||
(generate-thumbnail [info]
|
||||
(climit/with-dispatch (:process-image climit)
|
||||
(rsem/with-dispatch (:process-image semaphores)
|
||||
(media/run {:cmd :profile-thumbnail
|
||||
:format :jpeg
|
||||
:quality 85
|
||||
@@ -333,7 +332,7 @@
|
||||
;; Function responsible of calculating cryptographyc hash of
|
||||
;; the provided data.
|
||||
(calculate-hash [data]
|
||||
(px/with-dispatch executor
|
||||
(rsem/with-dispatch (:process-image semaphores)
|
||||
(sto/calculate-hash data)))]
|
||||
|
||||
(p/let [info (get-info file)
|
||||
@@ -341,10 +340,11 @@
|
||||
hash (calculate-hash (:data thumb))
|
||||
content (-> (sto/content (:data thumb) (:size thumb))
|
||||
(sto/wrap-with-hash hash))]
|
||||
(sto/put-object! storage {::sto/content content
|
||||
::sto/deduplicate? true
|
||||
:bucket "profile"
|
||||
:content-type (:mtype thumb)}))))
|
||||
(rsem/with-dispatch (:process-image semaphores)
|
||||
(sto/put-object! storage {::sto/content content
|
||||
::sto/deduplicate? true
|
||||
:bucket "profile"
|
||||
:content-type (:mtype thumb)})))))
|
||||
|
||||
;; --- Mutation: Invite Member
|
||||
|
||||
@@ -474,6 +474,7 @@
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id emails role] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [team (create-team conn params)
|
||||
audit-fn (:audit cfg)
|
||||
profile (db/get-by-id conn :profile profile-id)]
|
||||
|
||||
;; Create invitations for all provided emails.
|
||||
@@ -486,18 +487,18 @@
|
||||
:email email
|
||||
:role role)))
|
||||
|
||||
(-> team
|
||||
(vary-meta assoc ::audit/props {:invitations (count emails)})
|
||||
(rph/with-defer
|
||||
#(when-let [collector (::audit/collector cfg)]
|
||||
(audit/submit! collector
|
||||
{:type "mutation"
|
||||
:name "invite-team-member"
|
||||
:profile-id profile-id
|
||||
:props {:emails emails
|
||||
:role role
|
||||
:profile-id profile-id
|
||||
:invitations (count emails)}})))))))
|
||||
(with-meta team
|
||||
{::audit/props {:invitations (count emails)}
|
||||
|
||||
:before-complete
|
||||
#(audit-fn :cmd :submit
|
||||
:type "mutation"
|
||||
:name "invite-team-member"
|
||||
:profile-id profile-id
|
||||
:props {:emails emails
|
||||
:role role
|
||||
:profile-id profile-id
|
||||
:invitations (count emails)})}))))
|
||||
|
||||
;; --- Mutation: Update invitation role
|
||||
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
(:require
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.comments :as cmd.comments]
|
||||
[app.rpc.commands.files :as cmd.files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -52,7 +52,7 @@
|
||||
::doc/deprecated "1.15"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(cmd.files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(cmd.comments/get-comment-thread conn params)))
|
||||
|
||||
;; --- QUERY: Comments
|
||||
@@ -65,7 +65,7 @@
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id thread-id share-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(let [thread (db/get-by-id conn :comment-thread thread-id)]
|
||||
(cmd.files/check-comment-permissions! conn profile-id (:file-id thread) share-id))
|
||||
(files/check-comment-permissions! conn profile-id (:file-id thread) share-id))
|
||||
(cmd.comments/get-comments conn thread-id)))
|
||||
|
||||
|
||||
@@ -78,5 +78,5 @@
|
||||
::doc/added "1.13"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(cmd.files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(cmd.comments/get-file-comments-users conn file-id profile-id)))
|
||||
|
||||
@@ -6,62 +6,283 @@
|
||||
|
||||
(ns app.rpc.queries.files
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.shapes :as gsh]
|
||||
[app.common.pages.helpers :as cph]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.shape-tree :as ctt]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.files :as cmd.files]
|
||||
[app.rpc.commands.search :as cmd.search]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.db.sql :as sql]
|
||||
[app.rpc.helpers :as rpch]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.projects :as projects]
|
||||
[app.rpc.queries.share-link :refer [retrieve-share-link]]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(declare decode-row)
|
||||
|
||||
;; --- Helpers & Specs
|
||||
|
||||
(s/def ::frame-id ::us/uuid)
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::search-term ::us/string)
|
||||
(s/def ::components-v2 ::us/boolean)
|
||||
|
||||
;; --- Query: File Permissions
|
||||
|
||||
(def ^:private sql:file-permissions
|
||||
"select fpr.is_owner,
|
||||
fpr.is_admin,
|
||||
fpr.can_edit
|
||||
from file_profile_rel as fpr
|
||||
where fpr.file_id = ?
|
||||
and fpr.profile_id = ?
|
||||
union all
|
||||
select tpr.is_owner,
|
||||
tpr.is_admin,
|
||||
tpr.can_edit
|
||||
from team_profile_rel as tpr
|
||||
inner join project as p on (p.team_id = tpr.team_id)
|
||||
inner join file as f on (p.id = f.project_id)
|
||||
where f.id = ?
|
||||
and tpr.profile_id = ?
|
||||
union all
|
||||
select ppr.is_owner,
|
||||
ppr.is_admin,
|
||||
ppr.can_edit
|
||||
from project_profile_rel as ppr
|
||||
inner join file as f on (f.project_id = ppr.project_id)
|
||||
where f.id = ?
|
||||
and ppr.profile_id = ?")
|
||||
|
||||
(defn retrieve-file-permissions
|
||||
[conn profile-id file-id]
|
||||
(when (and profile-id file-id)
|
||||
(db/exec! conn [sql:file-permissions
|
||||
file-id profile-id
|
||||
file-id profile-id
|
||||
file-id profile-id])))
|
||||
|
||||
(defn get-permissions
|
||||
([conn profile-id file-id]
|
||||
(let [rows (retrieve-file-permissions conn profile-id file-id)
|
||||
is-owner (boolean (some :is-owner rows))
|
||||
is-admin (boolean (some :is-admin rows))
|
||||
can-edit (boolean (some :can-edit rows))]
|
||||
(when (seq rows)
|
||||
{:type :membership
|
||||
:is-owner is-owner
|
||||
:is-admin (or is-owner is-admin)
|
||||
:can-edit (or is-owner is-admin can-edit)
|
||||
:can-read true
|
||||
:is-logged (some? profile-id)})))
|
||||
([conn profile-id file-id share-id]
|
||||
(let [perms (get-permissions conn profile-id file-id)
|
||||
ldata (retrieve-share-link conn file-id share-id)]
|
||||
|
||||
;; NOTE: in a future when share-link becomes more powerful and
|
||||
;; will allow us specify which parts of the app is available, we
|
||||
;; will probably need to tweak this function in order to expose
|
||||
;; this flags to the frontend.
|
||||
(cond
|
||||
(some? perms) perms
|
||||
(some? ldata) {:type :share-link
|
||||
:can-read true
|
||||
:is-logged (some? profile-id)
|
||||
:who-comment (:who-comment ldata)
|
||||
:who-inspect (:who-inspect ldata)}))))
|
||||
|
||||
(def has-edit-permissions?
|
||||
(perms/make-edition-predicate-fn get-permissions))
|
||||
|
||||
(def has-read-permissions?
|
||||
(perms/make-read-predicate-fn get-permissions))
|
||||
|
||||
(def has-comment-permissions?
|
||||
(perms/make-comment-predicate-fn get-permissions))
|
||||
|
||||
(def check-edition-permissions!
|
||||
(perms/make-check-fn has-edit-permissions?))
|
||||
|
||||
(def check-read-permissions!
|
||||
(perms/make-check-fn has-read-permissions?))
|
||||
|
||||
;; A user has comment permissions if she has read permissions, or comment permissions
|
||||
(defn check-comment-permissions!
|
||||
[conn profile-id file-id share-id]
|
||||
(let [can-read (has-read-permissions? conn profile-id file-id)
|
||||
can-comment (has-comment-permissions? conn profile-id file-id share-id)]
|
||||
(when-not (or can-read can-comment)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "not found"))))
|
||||
|
||||
;; --- Query: Files search
|
||||
|
||||
;; TODO: this query need to a good refactor
|
||||
|
||||
(def ^:private sql:search-files
|
||||
"with projects as (
|
||||
select p.*
|
||||
from project as p
|
||||
inner join team_profile_rel as tpr on (tpr.team_id = p.team_id)
|
||||
where tpr.profile_id = ?
|
||||
and p.team_id = ?
|
||||
and p.deleted_at is null
|
||||
and (tpr.is_admin = true or
|
||||
tpr.is_owner = true or
|
||||
tpr.can_edit = true)
|
||||
union
|
||||
select p.*
|
||||
from project as p
|
||||
inner join project_profile_rel as ppr on (ppr.project_id = p.id)
|
||||
where ppr.profile_id = ?
|
||||
and p.team_id = ?
|
||||
and p.deleted_at is null
|
||||
and (ppr.is_admin = true or
|
||||
ppr.is_owner = true or
|
||||
ppr.can_edit = true)
|
||||
)
|
||||
select distinct
|
||||
f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared
|
||||
from file as f
|
||||
inner join projects as pr on (f.project_id = pr.id)
|
||||
where f.name ilike ('%' || ? || '%')
|
||||
and f.deleted_at is null
|
||||
order by f.created_at asc")
|
||||
|
||||
(s/def ::search-files
|
||||
(s/keys :req-un [::profile-id ::team-id]
|
||||
:opt-un [::search-term]))
|
||||
|
||||
(sv/defmethod ::search-files
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id team-id search-term] :as params}]
|
||||
(when search-term
|
||||
(db/exec! pool [sql:search-files
|
||||
profile-id team-id
|
||||
profile-id team-id
|
||||
search-term])))
|
||||
|
||||
;; --- Query: Project Files
|
||||
|
||||
(s/def ::project-files ::cmd.files/get-project-files)
|
||||
(def ^:private sql:project-files
|
||||
"select f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.revn,
|
||||
f.is_shared
|
||||
from file as f
|
||||
where f.project_id = ?
|
||||
and f.deleted_at is null
|
||||
order by f.modified_at desc")
|
||||
|
||||
(s/def ::project-files
|
||||
(s/keys :req-un [::profile-id ::project-id]))
|
||||
|
||||
(sv/defmethod ::project-files
|
||||
{::doc/added "1.1"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id project-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(projects/check-read-permissions! conn profile-id project-id)
|
||||
(cmd.files/get-project-files conn project-id)))
|
||||
(db/exec! conn [sql:project-files project-id])))
|
||||
|
||||
;; --- Query: File (By ID)
|
||||
|
||||
(s/def ::components-v2 ::us/boolean)
|
||||
(s/def ::file
|
||||
(s/and ::cmd.files/get-file
|
||||
(s/keys :opt-un [::components-v2])))
|
||||
(defn retrieve-object-thumbnails
|
||||
([{:keys [pool]} file-id]
|
||||
(let [sql (str/concat
|
||||
"select object_id, data "
|
||||
" from file_object_thumbnail"
|
||||
" where file_id=?")]
|
||||
(->> (db/exec! pool [sql file-id])
|
||||
(d/index-by :object-id :data))))
|
||||
|
||||
(defn get-file
|
||||
[conn id features]
|
||||
(let [file (cmd.files/get-file conn id features)
|
||||
thumbs (cmd.files/get-object-thumbnails conn id)]
|
||||
(assoc file :thumbnails thumbs)))
|
||||
([{:keys [pool]} file-id object-ids]
|
||||
(with-open [conn (db/open pool)]
|
||||
(let [sql (str/concat
|
||||
"select object_id, data "
|
||||
" from file_object_thumbnail"
|
||||
" where file_id=? and object_id = ANY(?)")
|
||||
ids (db/create-array conn "text" (seq object-ids))]
|
||||
(->> (db/exec! conn [sql file-id ids])
|
||||
(d/index-by :object-id :data))))))
|
||||
|
||||
(defn retrieve-file
|
||||
[{:keys [pool] :as cfg} id components-v2]
|
||||
(let [file (->> (db/get-by-id pool :file id)
|
||||
(decode-row)
|
||||
(pmg/migrate-file))]
|
||||
|
||||
(if components-v2
|
||||
(update file :data ctf/migrate-to-components-v2)
|
||||
(if (get-in file [:data :options :components-v2])
|
||||
(ex/raise :type :restriction
|
||||
:code :feature-disabled
|
||||
:hint "tried to open a components-v2 file with feature disabled")
|
||||
file))))
|
||||
|
||||
(s/def ::file
|
||||
(s/keys :req-un [::profile-id ::id]
|
||||
:opt-un [::components-v2]))
|
||||
|
||||
(sv/defmethod ::file
|
||||
"Retrieve a file by its ID. Only authenticated users."
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id id features components-v2] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(let [perms (cmd.files/get-permissions pool profile-id id)
|
||||
;; BACKWARD COMPATIBILTY with the components-v2 parameter
|
||||
features (cond-> (or features #{})
|
||||
components-v2 (conj "components/v2"))]
|
||||
|
||||
(cmd.files/check-read-permissions! perms)
|
||||
(-> (get-file conn id features)
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id id components-v2] :as params}]
|
||||
(let [perms (get-permissions pool profile-id id)]
|
||||
(check-read-permissions! perms)
|
||||
(let [file (retrieve-file cfg id components-v2)
|
||||
thumbs (retrieve-object-thumbnails cfg id)]
|
||||
(-> file
|
||||
(assoc :thumbnails thumbs)
|
||||
(assoc :permissions perms)))))
|
||||
|
||||
|
||||
;; --- QUERY: page
|
||||
|
||||
(defn- prune-objects
|
||||
"Given the page data and the object-id returns the page data with all
|
||||
other not needed objects removed from the `:objects` data
|
||||
structure."
|
||||
[{:keys [objects] :as page} object-id]
|
||||
(let [objects (cph/get-children-with-self objects object-id)]
|
||||
(assoc page :objects (d/index-by :id objects))))
|
||||
|
||||
(defn- prune-thumbnails
|
||||
"Given the page data, removes the `:thumbnail` prop from all
|
||||
shapes."
|
||||
[page]
|
||||
(update page :objects d/update-vals #(dissoc % :thumbnail)))
|
||||
|
||||
(s/def ::page-id ::us/uuid)
|
||||
(s/def ::object-id ::us/uuid)
|
||||
|
||||
(s/def ::page
|
||||
(s/and ::cmd.files/get-page
|
||||
(s/keys :opt-un [::components-v2])))
|
||||
(s/and
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::page-id ::object-id ::components-v2])
|
||||
(fn [obj]
|
||||
(if (contains? obj :object-id)
|
||||
(contains? obj :page-id)
|
||||
true))))
|
||||
|
||||
(sv/defmethod ::page
|
||||
"Retrieves the page data from file and returns it. If no page-id is
|
||||
@@ -73,112 +294,281 @@
|
||||
mandatory.
|
||||
|
||||
Mainly used for rendering purposes."
|
||||
{::doc/added "1.5"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id features components-v2] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(cmd.files/check-read-permissions! conn profile-id file-id)
|
||||
(let [;; BACKWARD COMPATIBILTY with the components-v2 parameter
|
||||
features (cond-> (or features #{})
|
||||
components-v2 (conj "components/v2"))
|
||||
params (assoc params :features features)]
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id page-id object-id components-v2] :as props}]
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(let [file (retrieve-file cfg file-id components-v2)
|
||||
page-id (or page-id (-> file :data :pages first))
|
||||
page (get-in file [:data :pages-index page-id])]
|
||||
|
||||
(cmd.files/get-page conn params))))
|
||||
(cond-> (prune-thumbnails page)
|
||||
(uuid? object-id)
|
||||
(prune-objects object-id))))
|
||||
|
||||
;; --- QUERY: file-data-for-thumbnail
|
||||
|
||||
(defn- get-file-thumbnail-data
|
||||
[cfg {:keys [data id] :as file}]
|
||||
(letfn [;; function responsible on finding the frame marked to be
|
||||
;; used as thumbnail; the returned frame always have
|
||||
;; the :page-id set to the page that it belongs.
|
||||
(get-thumbnail-frame [data]
|
||||
(d/seek :use-for-thumbnail?
|
||||
(for [page (-> data :pages-index vals)
|
||||
frame (-> page :objects ctt/get-frames)]
|
||||
(assoc frame :page-id (:id page)))))
|
||||
|
||||
;; function responsible to filter objects data structure of
|
||||
;; all unneeded shapes if a concrete frame is provided. If no
|
||||
;; frame, the objects is returned untouched.
|
||||
(filter-objects [objects frame-id]
|
||||
(d/index-by :id (cph/get-children-with-self objects frame-id)))
|
||||
|
||||
;; function responsible of assoc available thumbnails
|
||||
;; to frames and remove all children shapes from objects if
|
||||
;; thumbnails is available
|
||||
(assoc-thumbnails [objects page-id thumbnails]
|
||||
(loop [objects objects
|
||||
frames (filter cph/frame-shape? (vals objects))]
|
||||
|
||||
(if-let [frame (-> frames first)]
|
||||
(let [frame-id (:id frame)
|
||||
object-id (str page-id frame-id)
|
||||
frame (if-let [thumb (get thumbnails object-id)]
|
||||
(assoc frame :thumbnail thumb :shapes [])
|
||||
(dissoc frame :thumbnail))
|
||||
|
||||
children-ids
|
||||
(cph/get-children-ids objects frame-id)
|
||||
|
||||
bounds
|
||||
(when (:show-content frame)
|
||||
(gsh/selection-rect (concat [frame] (->> children-ids (map (d/getf objects))))))
|
||||
|
||||
frame
|
||||
(cond-> frame
|
||||
(some? bounds)
|
||||
(assoc :children-bounds bounds))]
|
||||
|
||||
(if (:thumbnail frame)
|
||||
(recur (-> objects
|
||||
(assoc frame-id frame)
|
||||
(d/without-keys children-ids))
|
||||
(rest frames))
|
||||
(recur (assoc objects frame-id frame)
|
||||
(rest frames))))
|
||||
|
||||
objects)))]
|
||||
|
||||
(let [frame (get-thumbnail-frame data)
|
||||
frame-id (:id frame)
|
||||
page-id (or (:page-id frame)
|
||||
(-> data :pages first))
|
||||
|
||||
page (dm/get-in data [:pages-index page-id])
|
||||
frame-ids (if (some? frame) (list frame-id) (map :id (ctt/get-frames (:objects page))))
|
||||
|
||||
obj-ids (map #(str page-id %) frame-ids)
|
||||
thumbs (retrieve-object-thumbnails cfg id obj-ids)]
|
||||
|
||||
(cond-> page
|
||||
;; If we have frame, we need to specify it on the page level
|
||||
;; and remove the all other unrelated objects.
|
||||
(some? frame-id)
|
||||
(-> (assoc :thumbnail-frame-id frame-id)
|
||||
(update :objects filter-objects frame-id))
|
||||
|
||||
;; Assoc the available thumbnails and prune not visible shapes
|
||||
;; for avoid transfer unnecessary data.
|
||||
:always
|
||||
(update :objects assoc-thumbnails page-id thumbs)))))
|
||||
|
||||
(s/def ::file-data-for-thumbnail
|
||||
(s/and ::cmd.files/get-file-data-for-thumbnail
|
||||
(s/keys :opt-un [::components-v2])))
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::components-v2]))
|
||||
|
||||
(sv/defmethod ::file-data-for-thumbnail
|
||||
"Retrieves the data for generate the thumbnail of the file. Used
|
||||
mainly for render thumbnails on dashboard."
|
||||
{::doc/added "1.11"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id features components-v2] :as props}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(cmd.files/check-read-permissions! conn profile-id file-id)
|
||||
(let [;; BACKWARD COMPATIBILTY with the components-v2 parameter
|
||||
features (cond-> (or features #{})
|
||||
components-v2 (conj "components/v2"))
|
||||
file (cmd.files/get-file conn file-id features)]
|
||||
{:file-id file-id
|
||||
:revn (:revn file)
|
||||
:page (cmd.files/get-file-data-for-thumbnail conn file)})))
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id components-v2] :as props}]
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(let [file (retrieve-file cfg file-id components-v2)]
|
||||
{:file-id file-id
|
||||
:revn (:revn file)
|
||||
:page (get-file-thumbnail-data cfg file)}))
|
||||
|
||||
|
||||
;; --- Query: Shared Library Files
|
||||
|
||||
(s/def ::team-shared-files ::cmd.files/get-team-shared-files)
|
||||
(def ^:private sql:team-shared-files
|
||||
"select f.id,
|
||||
f.revn,
|
||||
f.data,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared
|
||||
from file as f
|
||||
inner join project as p on (p.id = f.project_id)
|
||||
where f.is_shared = true
|
||||
and f.deleted_at is null
|
||||
and p.deleted_at is null
|
||||
and p.team_id = ?
|
||||
order by f.modified_at desc")
|
||||
|
||||
(s/def ::team-shared-files
|
||||
(s/keys :req-un [::profile-id ::team-id]))
|
||||
|
||||
(sv/defmethod ::team-shared-files
|
||||
{::doc/added "1.3"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(cmd.files/get-team-shared-files conn params)))
|
||||
[{:keys [pool] :as cfg} {:keys [team-id] :as params}]
|
||||
(let [assets-sample
|
||||
(fn [assets limit]
|
||||
(let [sorted-assets (->> (vals assets)
|
||||
(sort-by #(str/lower (:name %))))]
|
||||
|
||||
{:count (count sorted-assets)
|
||||
:sample (into [] (take limit sorted-assets))}))
|
||||
|
||||
library-summary
|
||||
(fn [data]
|
||||
{:components (assets-sample (:components data) 4)
|
||||
:colors (assets-sample (:colors data) 3)
|
||||
:typographies (assets-sample (:typographies data) 3)})
|
||||
|
||||
xform (comp
|
||||
(map decode-row)
|
||||
(map #(assoc % :library-summary (library-summary (:data %))))
|
||||
(map #(dissoc % :data)))]
|
||||
|
||||
(into #{} xform (db/exec! pool [sql:team-shared-files team-id]))))
|
||||
|
||||
|
||||
;; --- Query: File Libraries used by a File
|
||||
|
||||
(s/def ::file-libraries ::cmd.files/get-file-libraries)
|
||||
(def ^:private sql:file-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ?::uuid
|
||||
UNION
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT l.id,
|
||||
l.data,
|
||||
l.project_id,
|
||||
l.created_at,
|
||||
l.modified_at,
|
||||
l.deleted_at,
|
||||
l.name,
|
||||
l.revn,
|
||||
l.synced_at
|
||||
FROM libs AS l
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
|
||||
(defn retrieve-file-libraries
|
||||
[{:keys [pool] :as cfg} is-indirect file-id]
|
||||
(let [xform (comp
|
||||
(map #(assoc % :is-indirect is-indirect))
|
||||
(map decode-row))]
|
||||
(into #{} xform (db/exec! pool [sql:file-libraries file-id]))))
|
||||
|
||||
(s/def ::file-libraries
|
||||
(s/keys :req-un [::profile-id ::file-id]))
|
||||
|
||||
(sv/defmethod ::file-libraries
|
||||
{::doc/added "1.3"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id features] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(cmd.files/check-read-permissions! conn profile-id file-id)
|
||||
(cmd.files/get-file-libraries conn file-id features)))
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(retrieve-file-libraries cfg false file-id))
|
||||
|
||||
|
||||
;; --- Query: Files that use this File library
|
||||
|
||||
(s/def ::library-using-files ::cmd.files/get-library-file-references)
|
||||
(def ^:private sql:library-using-files
|
||||
"SELECT f.id,
|
||||
f.name
|
||||
FROM file_library_rel AS flr
|
||||
JOIN file AS f ON (f.id = flr.file_id)
|
||||
WHERE flr.library_file_id = ?
|
||||
AND (f.deleted_at IS NULL OR f.deleted_at > now())")
|
||||
|
||||
(s/def ::library-using-files
|
||||
(s/keys :req-un [::profile-id ::file-id]))
|
||||
|
||||
(sv/defmethod ::library-using-files
|
||||
{::doc/added "1.13"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(cmd.files/check-read-permissions! conn profile-id file-id)
|
||||
(cmd.files/get-library-file-references conn file-id)))
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(db/exec! pool [sql:library-using-files file-id]))
|
||||
|
||||
;; --- QUERY: team-recent-files
|
||||
|
||||
(s/def ::team-recent-files ::cmd.files/get-team-recent-files)
|
||||
(def sql:team-recent-files
|
||||
"with recent_files as (
|
||||
select f.id,
|
||||
f.revn,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
row_number() over w as row_num
|
||||
from file as f
|
||||
join project as p on (p.id = f.project_id)
|
||||
where p.team_id = ?
|
||||
and p.deleted_at is null
|
||||
and f.deleted_at is null
|
||||
window w as (partition by f.project_id order by f.modified_at desc)
|
||||
order by f.modified_at desc
|
||||
)
|
||||
select * from recent_files where row_num <= 10;")
|
||||
|
||||
|
||||
(s/def ::team-recent-files
|
||||
(s/keys :req-un [::profile-id ::team-id]))
|
||||
|
||||
(sv/defmethod ::team-recent-files
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id team-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(cmd.files/get-team-recent-files conn team-id)))
|
||||
|
||||
(teams/check-read-permissions! pool profile-id team-id)
|
||||
(db/exec! pool [sql:team-recent-files team-id]))
|
||||
|
||||
;; --- QUERY: get file thumbnail
|
||||
|
||||
(s/def ::file-thumbnail ::cmd.files/get-file-thumbnail)
|
||||
(s/def ::revn ::us/integer)
|
||||
|
||||
(s/def ::file-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::revn]))
|
||||
|
||||
(sv/defmethod ::file-thumbnail
|
||||
{::doc/added "1.13"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool]} {:keys [profile-id file-id revn]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(cmd.files/check-read-permissions! conn profile-id file-id)
|
||||
(-> (cmd.files/get-file-thumbnail conn file-id revn)
|
||||
(rph/with-http-cache cmd.files/long-cache-duration))))
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(let [sql (sql/select :file-thumbnail
|
||||
(cond-> {:file-id file-id}
|
||||
revn (assoc :revn revn))
|
||||
{:limit 1
|
||||
:order-by [[:revn :desc]]})
|
||||
|
||||
row (db/exec-one! pool sql)]
|
||||
|
||||
;; --- QUERY: search files
|
||||
(when-not row
|
||||
(ex/raise :type :not-found
|
||||
:code :file-thumbnail-not-found))
|
||||
|
||||
(s/def ::search-files ::cmd.search/search-files)
|
||||
(with-meta
|
||||
{:data (:data row)
|
||||
:props (some-> (:props row) db/decode-transit-pgobject)
|
||||
:revn (:revn row)
|
||||
:file-id (:file-id row)}
|
||||
{:transform-response (rpch/http-cache {:max-age (* 1000 60 60)})})))
|
||||
|
||||
(sv/defmethod ::search-files
|
||||
{::doc/added "1.0"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool]} {:keys [search-term] :as params}]
|
||||
(when search-term
|
||||
(cmd.search/search-files pool params)))
|
||||
;; --- Helpers
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [data changes] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
(:require
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.queries.projects :as projects]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.util.services :as sv]
|
||||
|
||||
@@ -6,26 +6,79 @@
|
||||
|
||||
(ns app.rpc.queries.viewer
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.viewer :as viewer]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.commands.comments :as comments]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.queries.share-link :as slnk]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
[clojure.spec.alpha :as s]
|
||||
[promesa.core :as p]))
|
||||
|
||||
;; --- Query: View Only Bundle
|
||||
|
||||
(defn- retrieve-project
|
||||
[pool id]
|
||||
(db/get-by-id pool :project id {:columns [:id :name :team-id]}))
|
||||
|
||||
(defn- retrieve-bundle
|
||||
[{:keys [pool] :as cfg} file-id profile-id components-v2]
|
||||
(p/let [file (files/retrieve-file cfg file-id components-v2)
|
||||
project (retrieve-project pool (:project-id file))
|
||||
libs (files/retrieve-file-libraries cfg false file-id)
|
||||
users (comments/get-file-comments-users pool file-id profile-id)
|
||||
|
||||
links (->> (db/query pool :share-link {:file-id file-id})
|
||||
(mapv slnk/decode-share-link-row))
|
||||
|
||||
fonts (db/query pool :team-font-variant
|
||||
{:team-id (:team-id project)
|
||||
:deleted-at nil})]
|
||||
{:file file
|
||||
:users users
|
||||
:fonts fonts
|
||||
:project project
|
||||
:share-links links
|
||||
:libraries libs}))
|
||||
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::share-id ::us/uuid)
|
||||
(s/def ::components-v2 ::us/boolean)
|
||||
(s/def ::view-only-bundle
|
||||
(s/and ::viewer/get-view-only-bundle
|
||||
(s/keys :opt-un [::components-v2])))
|
||||
|
||||
(sv/defmethod ::view-only-bundle
|
||||
{:auth false
|
||||
::doc/added "1.3"
|
||||
::doc/deprecated "1.17"}
|
||||
[{:keys [pool] :as cfg} {:keys [features components-v2] :as params}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(let [;; BACKWARD COMPATIBILTY with the components-v2 parameter
|
||||
features (cond-> (or features #{})
|
||||
components-v2 (conj "components/v2"))
|
||||
params (assoc params :features features)]
|
||||
(viewer/get-view-only-bundle conn params))))
|
||||
(s/def ::view-only-bundle
|
||||
(s/keys :req-un [::file-id] :opt-un [::profile-id ::share-id ::components-v2]))
|
||||
|
||||
(sv/defmethod ::view-only-bundle {:auth false}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id components-v2] :as params}]
|
||||
(p/let [slink (slnk/retrieve-share-link pool file-id share-id)
|
||||
perms (files/get-permissions pool profile-id file-id share-id)
|
||||
thumbs (files/retrieve-object-thumbnails cfg file-id)
|
||||
bundle (p/-> (retrieve-bundle cfg file-id profile-id components-v2)
|
||||
(assoc :permissions perms)
|
||||
(assoc-in [:file :thumbnails] thumbs))]
|
||||
|
||||
;; When we have neither profile nor share, we just return a not
|
||||
;; found response to the user.
|
||||
(when (and (not profile-id)
|
||||
(not slink))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found))
|
||||
|
||||
;; When we have only profile, we need to check read permissions
|
||||
;; on file.
|
||||
(when (and profile-id (not slink))
|
||||
(files/check-read-permissions! pool profile-id file-id))
|
||||
|
||||
(cond-> bundle
|
||||
(some? slink)
|
||||
(assoc :share slink)
|
||||
|
||||
(and (some? slink)
|
||||
(not (contains? (:flags slink) "view-all-pages")))
|
||||
(update-in [:file :data] (fn [data]
|
||||
(let [allowed-pages (:pages slink)]
|
||||
(-> data
|
||||
(update :pages (fn [pages] (filterv #(contains? allowed-pages %) pages)))
|
||||
(update :pages-index (fn [index] (select-keys index allowed-pages))))))))))
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
@@ -110,7 +111,7 @@
|
||||
"m" :minutes
|
||||
"s" :seconds
|
||||
"w" :weeks)
|
||||
::key (str "ratelimit.window." (d/name name))
|
||||
::key (dm/str "ratelimit.window." (d/name name))
|
||||
::opts opts})
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-window-limit-opts
|
||||
@@ -131,7 +132,7 @@
|
||||
::interval interval
|
||||
::opts opts
|
||||
::params [(dt/->seconds interval) rate capacity]
|
||||
::key (str "ratelimit.bucket." (d/name name))})
|
||||
::key (dm/str "ratelimit.bucket." (d/name name))})
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-bucket-limit-opts
|
||||
:hint (str/ffmt "looks like '%' does not have a valid format" opts)))))
|
||||
@@ -139,7 +140,7 @@
|
||||
(defmethod process-limit :bucket
|
||||
[redis user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
|
||||
(let [script (-> bucket-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id)])
|
||||
(assoc ::rscript/keys [(dm/str key "." service "." user-id)])
|
||||
(assoc ::rscript/vals (conj params (dt/->seconds now))))]
|
||||
(-> (redis/eval! redis script)
|
||||
(p/then (fn [result]
|
||||
@@ -164,7 +165,7 @@
|
||||
(let [ts (dt/truncate now unit)
|
||||
ttl (dt/diff now (dt/plus ts {unit 1}))
|
||||
script (-> window-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id "." (dt/format-instant ts))])
|
||||
(assoc ::rscript/keys [(dm/str key "." service "." user-id "." (dt/format-instant ts))])
|
||||
(assoc ::rscript/vals [nreq (dt/->seconds ttl)]))]
|
||||
(-> (redis/eval! redis script)
|
||||
(p/then (fn [result]
|
||||
@@ -196,65 +197,67 @@
|
||||
(filter (complement ::lresult/allowed?))
|
||||
(first))]
|
||||
|
||||
(when rejected
|
||||
(when (and rejected (contains? cf/flags :warn-rpc-rate-limits))
|
||||
(l/warn :hint "rejected rate limit"
|
||||
:user-id (str user-id)
|
||||
:user-id (dm/str user-id)
|
||||
:limit-service (-> rejected ::service name)
|
||||
:limit-name (-> rejected ::name name)
|
||||
:limit-strategy (-> rejected ::strategy name)))
|
||||
|
||||
{:enabled? true
|
||||
:allowed? (not (some? rejected))
|
||||
:allowed? (some? rejected)
|
||||
:headers {"x-rate-limit-remaining" remaining
|
||||
"x-rate-limit-reset" reset}})))))
|
||||
|
||||
(defn- handle-response
|
||||
[f cfg params result]
|
||||
(if (:enabled? result)
|
||||
(let [headers (:headers result)]
|
||||
(when-not (:allowed? result)
|
||||
[f cfg params rres]
|
||||
(if (:enabled? rres)
|
||||
(let [headers {"x-rate-limit-remaining" (:remaining rres)
|
||||
"x-rate-limit-reset" (:reset rres)}]
|
||||
(when-not (:allowed? rres)
|
||||
(ex/raise :type :rate-limit
|
||||
:code :request-blocked
|
||||
:hint "rate limit reached"
|
||||
::http/headers headers))
|
||||
(-> (f cfg params)
|
||||
(p/then (fn [response]
|
||||
(vary-meta response update ::http/headers merge headers)))))
|
||||
(with-meta response
|
||||
{::http/headers headers})))))
|
||||
|
||||
(f cfg params)))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [rlimit redis] :as cfg} f mdata]
|
||||
(if rlimit
|
||||
(let [skey (keyword (::rpc/type cfg) (->> mdata ::sv/spec name))
|
||||
sname (str (::rpc/type cfg) "." (->> mdata ::sv/spec name))]
|
||||
(let [skey (keyword (::rpc/type cfg) (->> mdata ::sv/spec name))
|
||||
sname (dm/str (::rpc/type cfg) "." (->> mdata ::sv/spec name))
|
||||
default-rresp (p/resolved {:enabled? false})]
|
||||
(if (or (contains? cf/flags :rpc-rate-limit)
|
||||
(contains? cf/flags :soft-rpc-rate-limit))
|
||||
(fn [cfg {:keys [::http/request] :as params}]
|
||||
(let [uid (or (:profile-id params)
|
||||
(some-> request parse-client-ip)
|
||||
uuid/zero)
|
||||
(let [user-id (or (:profile-id params)
|
||||
(some-> request parse-client-ip)
|
||||
uuid/zero)
|
||||
|
||||
rsp (when (and uid @enabled?)
|
||||
(when-let [limits (or (get-in @rlimit [::limits skey])
|
||||
(get-in @rlimit [::limits :default]))]
|
||||
(let [redis (redis/get-or-connect redis ::rlimit default-options)
|
||||
limits (map #(assoc % ::service sname) limits)
|
||||
resp (-> (process-limits redis uid limits (dt/now))
|
||||
(p/catch (fn [cause]
|
||||
;; If we have an error on processing the rate-limit we just skip
|
||||
;; it for do not cause service interruption because of redis
|
||||
;; downtime or similar situation.
|
||||
(l/error :hint "error on processing rate-limit" :cause cause)
|
||||
{:enabled? false})))]
|
||||
rresp (when (and user-id @enabled?)
|
||||
(when-let [limits (get-in @rlimit [::limits skey])]
|
||||
(let [redis (redis/get-or-connect redis ::rlimit default-options)
|
||||
limits (map #(assoc % ::service sname) limits)
|
||||
rresp (-> (process-limits redis user-id limits (dt/now))
|
||||
(p/catch (fn [cause]
|
||||
;; If we have an error on processing the
|
||||
;; rate-limit we just skip it for do not cause
|
||||
;; service interruption because of redis downtime
|
||||
;; or similar situation.
|
||||
(l/error :hint "error on processing rate-limit" :cause cause)
|
||||
{:enabled? false})))]
|
||||
|
||||
;; If soft rate are enabled, we process the rate-limit but return unprotected
|
||||
;; response.
|
||||
(if (contains? cf/flags :soft-rpc-rlimit)
|
||||
(p/resolved {:enabled? false})
|
||||
resp))))
|
||||
;; If soft rate are enabled, we process the rate-limit but return
|
||||
;; unprotected response.
|
||||
(and (contains? cf/flags :soft-rpc-rate-limit) rresp))))]
|
||||
|
||||
rsp (or rsp (p/resolved {:enabled? false}))]
|
||||
|
||||
(p/then rsp (partial handle-response f cfg params)))))
|
||||
f))
|
||||
(p/then (or rresp default-rresp)
|
||||
(partial handle-response f cfg params))))
|
||||
f)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; CONFIG WATCHER
|
||||
@@ -332,7 +335,7 @@
|
||||
::limits limits}))))
|
||||
|
||||
(defn- refresh-config
|
||||
[{:keys [state path executor scheduled-executor] :as params}]
|
||||
[{:keys [state path executor scheduler] :as params}]
|
||||
(letfn [(update-config [{:keys [::updated-at] :as state}]
|
||||
(let [updated-at' (fs/last-modified-time path)]
|
||||
(merge state
|
||||
@@ -347,7 +350,7 @@
|
||||
state)))))
|
||||
|
||||
(schedule-next [state]
|
||||
(px/schedule! scheduled-executor
|
||||
(px/schedule! scheduler
|
||||
(inst-ms (::refresh state))
|
||||
(partial refresh-config params))
|
||||
state)]
|
||||
@@ -358,7 +361,7 @@
|
||||
(defn- on-refresh-error
|
||||
[_ cause]
|
||||
(when-not (instance? java.util.concurrent.RejectedExecutionException cause)
|
||||
(if-let [explain (-> cause ex-data ex/explain)]
|
||||
(if-let [explain (-> cause ex-data us/pretty-explain)]
|
||||
(l/warn ::l/raw (str "unable to refresh config, invalid format:\n" explain)
|
||||
::l/async false)
|
||||
(l/warn :hint "unexpected exception on loading config"
|
||||
@@ -371,22 +374,22 @@
|
||||
(and (fs/exists? path) (fs/regular-file? path) path)))
|
||||
|
||||
(defmethod ig/pre-init-spec :app.rpc/rlimit [_]
|
||||
(s/keys :req-un [::wrk/executor ::wrk/scheduled-executor]))
|
||||
(s/keys :req-un [::wrk/executor ::wrk/scheduler]))
|
||||
|
||||
(defmethod ig/init-key ::rpc/rlimit
|
||||
(defmethod ig/init-key :app.rpc/rlimit
|
||||
[_ {:keys [executor] :as params}]
|
||||
(when (contains? cf/flags :rpc-rlimit)
|
||||
(let [state (agent {})]
|
||||
(set-error-handler! state on-refresh-error)
|
||||
(set-error-mode! state :continue)
|
||||
(let [state (agent {})]
|
||||
|
||||
(when-let [path (get-config-path)]
|
||||
(l/info :hint "initializing rlimit config reader" :path (str path))
|
||||
(set-error-handler! state on-refresh-error)
|
||||
(set-error-mode! state :continue)
|
||||
|
||||
;; Initialize the state with initial refresh value
|
||||
(send-via executor state (constantly {::refresh (dt/duration "5s")}))
|
||||
(when-let [path (get-config-path)]
|
||||
(l/info :hint "initializing rlimit config reader" :path (str path))
|
||||
|
||||
;; Force a refresh
|
||||
(refresh-config (assoc params :path path :state state)))
|
||||
;; Initialize the state with initial refresh value
|
||||
(send-via executor state (constantly {::refresh (dt/duration "5s")}))
|
||||
|
||||
state)))
|
||||
;; Force a refresh
|
||||
(refresh-config (assoc params :path path :state state)))
|
||||
|
||||
state))
|
||||
|
||||
149
backend/src/app/rpc/semaphore.clj
Normal file
149
backend/src/app/rpc/semaphore.clj
Normal file
@@ -0,0 +1,149 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.semaphore
|
||||
"Resource usage limits (in other words: semaphores)."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.util.locks :as locks]
|
||||
[app.util.time :as ts]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; ASYNC SEMAPHORE IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defprotocol IAsyncSemaphore
|
||||
(acquire! [_])
|
||||
(release! [_ tp]))
|
||||
|
||||
(defn create
|
||||
[& {:keys [permits metrics name executor]}]
|
||||
(let [used (volatile! 0)
|
||||
queue (volatile! (d/queue))
|
||||
labels (into-array String [(d/name name)])
|
||||
lock (locks/create)
|
||||
permits (or permits Long/MAX_VALUE)]
|
||||
|
||||
(when (>= permits Long/MAX_VALUE)
|
||||
(l/warn :hint "permits value too high" :permits permits :semaphore name))
|
||||
|
||||
^{::wrk/executor executor
|
||||
::name name}
|
||||
(reify IAsyncSemaphore
|
||||
(acquire! [_]
|
||||
(let [d (p/deferred)]
|
||||
(locks/locking lock
|
||||
(if (< @used permits)
|
||||
(do
|
||||
(vswap! used inc)
|
||||
(p/resolve! d))
|
||||
(vswap! queue conj d)))
|
||||
|
||||
(mtx/run! metrics
|
||||
:id :semaphore-used-permits
|
||||
:val @used
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :semaphore-queued-submissions
|
||||
:val (count @queue)
|
||||
:labels labels)
|
||||
d))
|
||||
|
||||
(release! [_ tp]
|
||||
(locks/locking lock
|
||||
(if-let [item (peek @queue)]
|
||||
(do
|
||||
(vswap! queue pop)
|
||||
(p/resolve! item))
|
||||
(when (pos? @used)
|
||||
(vswap! used dec))))
|
||||
|
||||
(mtx/run! metrics
|
||||
:id :semaphore-timing
|
||||
:val (inst-ms (tp))
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :semaphore-used-permits
|
||||
:val @used
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :semaphore-queued-submissions
|
||||
:val (count @queue)
|
||||
:labels labels)))))
|
||||
|
||||
(defn semaphore?
|
||||
[v]
|
||||
(satisfies? IAsyncSemaphore v))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PREDEFINED SEMAPHORES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::semaphore semaphore?)
|
||||
(s/def ::semaphores
|
||||
(s/map-of ::us/keyword ::semaphore))
|
||||
|
||||
(defmethod ig/pre-init-spec ::rpc/semaphores [_]
|
||||
(s/keys :req-un [::mtx/metrics]))
|
||||
|
||||
(defn- create-default-semaphores
|
||||
[metrics executor]
|
||||
[(create :permits (cf/get :semaphore-process-font)
|
||||
:metrics metrics
|
||||
:name :process-font
|
||||
:executor executor)
|
||||
(create :permits (cf/get :semaphore-update-file)
|
||||
:metrics metrics
|
||||
:name :update-file
|
||||
:executor executor)
|
||||
(create :permits (cf/get :semaphore-process-image)
|
||||
:metrics metrics
|
||||
:name :process-image
|
||||
:executor executor)
|
||||
(create :permits (cf/get :semaphore-auth)
|
||||
:metrics metrics
|
||||
:name :auth
|
||||
:executor executor)])
|
||||
|
||||
(defmethod ig/init-key ::rpc/semaphores
|
||||
[_ {:keys [metrics executor]}]
|
||||
(->> (create-default-semaphores metrics executor)
|
||||
(d/index-by (comp ::name meta))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmacro with-dispatch
|
||||
[queue & body]
|
||||
`(let [tpoint# (ts/tpoint)
|
||||
queue# ~queue
|
||||
executor# (-> queue# meta ::wrk/executor)]
|
||||
(-> (acquire! queue#)
|
||||
(p/then (fn [_#] ~@body) executor#)
|
||||
(p/finally (fn [_# _#]
|
||||
(release! queue# tpoint#))))))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [semaphores]} f {:keys [::queue]}]
|
||||
(let [queue' (get semaphores queue)]
|
||||
(if (semaphore? queue')
|
||||
(fn [cfg params]
|
||||
(with-dispatch queue'
|
||||
(f cfg params)))
|
||||
(do
|
||||
(when (some? queue)
|
||||
(l/warn :hint "undefined semaphore" :name queue))
|
||||
f))))
|
||||
@@ -8,10 +8,8 @@
|
||||
"Initial data setup of instance."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.main :as-alias main]
|
||||
[app.setup.builtin-templates]
|
||||
[app.setup.keys :as keys]
|
||||
[buddy.core.codecs :as bc]
|
||||
@@ -50,9 +48,6 @@
|
||||
:cause cause))))
|
||||
instance-id)))
|
||||
|
||||
(s/def ::main/props
|
||||
(s/map-of ::us/keyword some?))
|
||||
|
||||
(defmethod ig/pre-init-spec ::props [_]
|
||||
(s/keys :req-un [::db/pool]))
|
||||
|
||||
|
||||
@@ -29,8 +29,10 @@
|
||||
(s/keys :req-un [::id ::name ::thumbnail-uri ::file-uri]
|
||||
:opt-un [::path]))
|
||||
|
||||
(s/def ::http-client ::http/client)
|
||||
|
||||
(defmethod ig/pre-init-spec :app.setup/builtin-templates [_]
|
||||
(s/keys :req [::http/client]))
|
||||
(s/keys :req-un [::http-client]))
|
||||
|
||||
(defmethod ig/init-key :app.setup/builtin-templates
|
||||
[_ cfg]
|
||||
@@ -41,7 +43,7 @@
|
||||
|
||||
(defn- download-preset!
|
||||
[cfg {:keys [path file-uri] :as preset}]
|
||||
(let [response (http/req! cfg
|
||||
(let [response (http/req! (:http-client cfg)
|
||||
{:method :get
|
||||
:uri file-uri}
|
||||
{:response-type :input-stream
|
||||
|
||||
@@ -30,8 +30,6 @@
|
||||
[cuerdas.core :as str]
|
||||
[expound.alpha :as expound]))
|
||||
|
||||
(def ^:dynamic *conn*)
|
||||
|
||||
(defn reset-password!
|
||||
"Reset a password to a specific one for a concrete user or all users
|
||||
if email is `:all` keyword."
|
||||
@@ -68,21 +66,17 @@
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(let [file (db/get-by-id conn :file id {:for-update true})
|
||||
file (-> file
|
||||
(update :features db/decode-pgarray #{})
|
||||
(update :data blob/decode)
|
||||
(cond-> migrate? (update :data pmg/migrate-data)))
|
||||
file (binding [*conn* conn]
|
||||
(-> (update-fn file)
|
||||
(cond-> inc-revn? (update :revn inc))))]
|
||||
(cond-> migrate? (update :data pmg/migrate-data))
|
||||
(update :data update-fn)
|
||||
(update :data blob/encode)
|
||||
(cond-> inc-revn? (update :revn inc)))]
|
||||
(when save?
|
||||
(let [features (db/create-array conn "text" (:features file))
|
||||
data (blob/encode (:data file))]
|
||||
(db/update! conn :file
|
||||
{:data data
|
||||
:revn (:revn file)
|
||||
:features features}
|
||||
{:id id})))
|
||||
file)))
|
||||
(db/update! conn :file
|
||||
{:data (:data file)
|
||||
:revn (:revn file)}
|
||||
{:id (:id file)}))
|
||||
(update file :data blob/decode))))
|
||||
|
||||
(def ^:private sql:retrieve-files-chunk
|
||||
"SELECT id, name, created_at, data FROM file
|
||||
@@ -128,12 +122,27 @@
|
||||
(on-end state)
|
||||
state))))))
|
||||
|
||||
|
||||
(defn analyze-file-data
|
||||
[system & {:keys [id on-form on-data]}]
|
||||
(let [file (get-file system id)]
|
||||
(cond
|
||||
(fn? on-data)
|
||||
(on-data (:data file))
|
||||
|
||||
(fn? on-form)
|
||||
(walk/postwalk (fn [form]
|
||||
(on-form form)
|
||||
form)
|
||||
(:data file)))
|
||||
nil))
|
||||
|
||||
(defn update-pages
|
||||
"Apply a function to all pages of one file. The function receives a page and returns an updated page."
|
||||
[data f]
|
||||
(update data :pages-index update-vals f))
|
||||
(update data :pages-index d/update-vals f))
|
||||
|
||||
(defn update-shapes
|
||||
"Apply a function to all shapes of one page The function receives a shape and returns an updated shape"
|
||||
[page f]
|
||||
(update page :objects update-vals f))
|
||||
(update page :objects d/update-vals f))
|
||||
|
||||
@@ -16,11 +16,7 @@
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.srepl.fixes :as f]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.pprint :refer [pprint]]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
@@ -38,16 +34,6 @@
|
||||
(task-fn params)
|
||||
(println (format "no task '%s' found" name))))))
|
||||
|
||||
(defn schedule-task!
|
||||
([system name]
|
||||
(schedule-task! system name {}))
|
||||
([system name props]
|
||||
(let [pool (:app.db/pool system)]
|
||||
(wrk/submit!
|
||||
::wrk/conn pool
|
||||
::wrk/task name
|
||||
::wrk/props props))))
|
||||
|
||||
(defn send-test-email!
|
||||
[system destination]
|
||||
(us/verify!
|
||||
@@ -76,18 +62,31 @@
|
||||
(cmd.auth/send-email-verification! pool sprops profile)
|
||||
:email-sent))
|
||||
|
||||
(defn mark-profile-as-active!
|
||||
"Mark the profile blocked and removes all the http sessiones
|
||||
associated with the profile-id."
|
||||
[system email]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(when-let [profile (db/get-by-params conn :profile
|
||||
{:email (str/lower email)}
|
||||
{:columns [:id :email]
|
||||
:check-not-found false})]
|
||||
(when-not (:is-blocked profile)
|
||||
(db/update! conn :profile {:is-active true} {:id (:id profile)})
|
||||
:activated))))
|
||||
(defn update-profile!
|
||||
"Update a limited set of profile attrs."
|
||||
[system & {:keys [email id active? deleted? blocked?]}]
|
||||
|
||||
(us/verify!
|
||||
:expr (some? system)
|
||||
:hint "system should be provided")
|
||||
|
||||
(us/verify!
|
||||
:expr (or (string? email) (uuid? id))
|
||||
:hint "email or id should be provided")
|
||||
|
||||
(let [params (cond-> {}
|
||||
(true? active?) (assoc :is-active true)
|
||||
(false? active?) (assoc :is-active false)
|
||||
(true? deleted?) (assoc :deleted-at (dt/now))
|
||||
(true? blocked?) (assoc :is-blocked true)
|
||||
(false? blocked?) (assoc :is-blocked false))
|
||||
opts (cond-> {}
|
||||
(some? email) (assoc :email (str/lower email))
|
||||
(some? id) (assoc :id id))]
|
||||
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(some-> (db/update! conn :profile params opts)
|
||||
(profile/decode-profile-row)))))
|
||||
|
||||
(defn mark-profile-as-blocked!
|
||||
"Mark the profile blocked and removes all the http sessiones
|
||||
@@ -102,54 +101,3 @@
|
||||
(db/update! conn :profile {:is-blocked true} {:id (:id profile)})
|
||||
(db/delete! conn :http-session {:profile-id (:id profile)})
|
||||
:blocked))))
|
||||
|
||||
|
||||
(defn enable-objects-map-feature-on-file!
|
||||
[system & {:keys [save? id]}]
|
||||
(letfn [(update-file [{:keys [features] :as file}]
|
||||
(if (contains? features "storage/objects-map")
|
||||
file
|
||||
(-> file
|
||||
(update :data migrate-to-omap)
|
||||
(update :features conj "storage/objects-map"))))
|
||||
|
||||
(migrate-to-omap [data]
|
||||
(-> data
|
||||
(update :pages-index update-vals #(update % :objects omap/wrap))
|
||||
(update :components update-vals #(update % :objects omap/wrap))))]
|
||||
|
||||
(h/update-file! system
|
||||
:id id
|
||||
:update-fn update-file
|
||||
:save? save?)))
|
||||
|
||||
(defn enable-pointer-map-feature-on-file!
|
||||
[system & {:keys [save? id]}]
|
||||
(letfn [(update-file [{:keys [features id] :as file}]
|
||||
(if (contains? features "storage/pointer-map")
|
||||
file
|
||||
(-> file
|
||||
(update :data migrate-to-omap id)
|
||||
(update :features conj "storage/pointer-map"))))
|
||||
|
||||
(migrate-to-omap [data file-id]
|
||||
(binding [pmap/*tracked* (atom {})]
|
||||
(let [data (-> data
|
||||
(update :pages-index update-vals pmap/wrap)
|
||||
(update :components pmap/wrap))]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(db/insert! h/*conn* :file-data-fragment
|
||||
{:id id
|
||||
:file-id file-id
|
||||
:content (-> item deref blob/encode)}))
|
||||
data)))]
|
||||
|
||||
(h/update-file! system
|
||||
:id id
|
||||
:update-fn update-file
|
||||
:save? save?)))
|
||||
|
||||
(defn enable-storage-features-on-file!
|
||||
[system & {:as params}]
|
||||
(enable-objects-map-feature-on-file! system params)
|
||||
(enable-pointer-map-feature-on-file! system params))
|
||||
|
||||
@@ -75,10 +75,8 @@
|
||||
(defmethod impl/get-object-bytes :fs
|
||||
[backend object]
|
||||
(p/let [input (impl/get-object-data backend object)]
|
||||
(try
|
||||
(io/read-as-bytes input)
|
||||
(finally
|
||||
(io/close! input)))))
|
||||
(ex/with-always (io/close! input)
|
||||
(io/read-as-bytes input))))
|
||||
|
||||
(defmethod impl/get-object-url :fs
|
||||
[{:keys [uri executor] :as backend} {:keys [id] :as object} _]
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.storage :as-alias sto]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
@@ -24,43 +23,43 @@
|
||||
(declare remove-temp-file)
|
||||
(defonce queue (a/chan 128))
|
||||
|
||||
(s/def ::min-age ::dt/duration)
|
||||
|
||||
(defmethod ig/pre-init-spec ::cleaner [_]
|
||||
(s/keys :req [::sto/min-age ::wrk/scheduled-executor]))
|
||||
(s/keys :req-un [::min-age ::wrk/scheduler ::wrk/executor]))
|
||||
|
||||
(defmethod ig/prep-key ::cleaner
|
||||
[_ cfg]
|
||||
(merge {::sto/min-age (dt/duration "30m")}
|
||||
(merge {:min-age (dt/duration {:minutes 30})}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/init-key ::cleaner
|
||||
[_ {:keys [::sto/min-age ::wrk/scheduled-executor] :as cfg}]
|
||||
(px/thread
|
||||
{:name "penpot/storage-tmp-cleaner"}
|
||||
(try
|
||||
(l/info :hint "started tmp file cleaner")
|
||||
(loop []
|
||||
(when-let [path (a/<!! queue)]
|
||||
[_ {:keys [scheduler executor min-age] :as cfg}]
|
||||
(l/info :hint "starting tempfile cleaner service")
|
||||
(let [cch (a/chan)]
|
||||
(a/go-loop []
|
||||
(let [[path port] (a/alts! [queue cch])]
|
||||
(when (not= port cch)
|
||||
(l/trace :hint "schedule tempfile deletion" :path path
|
||||
:expires-at (dt/plus (dt/now) min-age))
|
||||
(px/schedule! scheduled-executor
|
||||
(px/schedule! scheduler
|
||||
(inst-ms min-age)
|
||||
(partial remove-temp-file path))
|
||||
(recur)))
|
||||
(catch InterruptedException _
|
||||
(l/debug :hint "interrupted"))
|
||||
(finally
|
||||
(l/info :hint "terminated tmp file cleaner")))))
|
||||
(partial remove-temp-file executor path))
|
||||
(recur))))
|
||||
cch))
|
||||
|
||||
(defmethod ig/halt-key! ::cleaner
|
||||
[_ thread]
|
||||
(px/interrupt! thread))
|
||||
[_ close-ch]
|
||||
(l/info :hint "stopping tempfile cleaner service")
|
||||
(some-> close-ch a/close!))
|
||||
|
||||
(defn- remove-temp-file
|
||||
"Permanently delete tempfile"
|
||||
[path]
|
||||
(l/trace :hint "permanently delete tempfile" :path path)
|
||||
(when (fs/exists? path)
|
||||
(fs/delete path)))
|
||||
[executor path]
|
||||
(px/with-dispatch executor
|
||||
(l/trace :hint "permanently delete tempfile" :path path)
|
||||
(when (fs/exists? path)
|
||||
(fs/delete path))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; API
|
||||
|
||||
@@ -17,12 +17,11 @@
|
||||
[app.common.types.shape-tree :as ctt]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(declare ^:private retrieve-candidates)
|
||||
@@ -44,10 +43,10 @@
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(fn [{:keys [file-id] :as params}]
|
||||
(fn [{:keys [id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [min-age (or (:min-age params) (:min-age cfg))
|
||||
cfg (assoc cfg :min-age min-age :conn conn :file-id file-id)]
|
||||
cfg (assoc cfg :min-age min-age :conn conn :id id)]
|
||||
(loop [total 0
|
||||
files (retrieve-candidates cfg)]
|
||||
(if-let [file (first files)]
|
||||
@@ -56,7 +55,7 @@
|
||||
(recur (inc total)
|
||||
(rest files)))
|
||||
(do
|
||||
(l/info :hint "task finished" :min-age (dt/format-duration min-age) :processed total)
|
||||
(l/info :hint "task finished" :min-age (dt/format-duration min-age) :total total)
|
||||
|
||||
;; Allow optional rollback passed by params
|
||||
(when (:rollback? params)
|
||||
@@ -73,7 +72,6 @@
|
||||
"select f.id,
|
||||
f.data,
|
||||
f.revn,
|
||||
f.features,
|
||||
f.modified_at
|
||||
from file as f
|
||||
where f.has_media_trimmed is false
|
||||
@@ -84,26 +82,21 @@
|
||||
for update skip locked")
|
||||
|
||||
(defn- retrieve-candidates
|
||||
[{:keys [conn min-age file-id] :as cfg}]
|
||||
(if (uuid? file-id)
|
||||
[{:keys [conn min-age id] :as cfg}]
|
||||
(if id
|
||||
(do
|
||||
(l/warn :hint "explicit file id passed on params" :file-id file-id)
|
||||
(->> (db/query conn :file {:id file-id})
|
||||
(map #(update % :features db/decode-pgarray #{}))))
|
||||
(l/warn :hint "explicit file id passed on params" :id id)
|
||||
(db/query conn :file {:id id}))
|
||||
(let [interval (db/interval min-age)
|
||||
get-chunk (fn [cursor]
|
||||
(let [rows (db/exec! conn [sql:retrieve-candidates-chunk interval cursor])]
|
||||
[(some->> rows peek :modified-at)
|
||||
(map #(update % :features db/decode-pgarray #{}) rows)]))]
|
||||
|
||||
[(some->> rows peek :modified-at) (seq rows)]))]
|
||||
(d/iteration get-chunk
|
||||
:vf second
|
||||
:kf first
|
||||
:initk (dt/now)))))
|
||||
|
||||
(defn collect-used-media
|
||||
"Analyzes the file data and collects all references to external
|
||||
assets. Returns a set of ids."
|
||||
[data]
|
||||
(let [xform (comp
|
||||
(map :objects)
|
||||
@@ -159,8 +152,9 @@
|
||||
unused (set/difference stored using)]
|
||||
|
||||
(when (seq unused)
|
||||
(let [sql (str "delete from file_object_thumbnail "
|
||||
" where file_id=? and object_id=ANY(?)")
|
||||
(let [sql (str/concat
|
||||
"delete from file_object_thumbnail "
|
||||
" where file_id=? and object_id=ANY(?)")
|
||||
res (db/exec-one! conn [sql file-id (db/create-array conn "text" unused)])]
|
||||
(l/debug :hint "delete file object thumbnails" :file-id file-id :total (:next.jdbc/update-count res))))))
|
||||
|
||||
@@ -239,41 +233,21 @@
|
||||
{:data new-data}
|
||||
{:id library-id})))))
|
||||
|
||||
(def ^:private sql:get-unused-fragments
|
||||
"SELECT id FROM file_data_fragment
|
||||
WHERE file_id = ? AND id != ALL(?::uuid[])")
|
||||
|
||||
(defn- clean-data-fragments!
|
||||
[conn file-id data]
|
||||
(let [used (->> (concat (vals data)
|
||||
(vals (:pages-index data)))
|
||||
(into #{} (comp (filter pmap/pointer-map?)
|
||||
(map pmap/get-id)))
|
||||
(db/create-array conn "uuid"))
|
||||
rows (db/exec! conn [sql:get-unused-fragments file-id used])]
|
||||
(doseq [fragment-id (map :id rows)]
|
||||
(l/trace :hint "remove unused file data fragment" :id (str fragment-id))
|
||||
(db/delete! conn :file-data-fragment {:id fragment-id :file-id file-id}))))
|
||||
|
||||
(defn- process-file
|
||||
[{:keys [conn] :as cfg} {:keys [id data revn modified-at features] :as file}]
|
||||
[{:keys [conn] :as cfg} {:keys [id data revn modified-at] :as file}]
|
||||
(l/debug :hint "processing file" :id id :modified-at modified-at)
|
||||
|
||||
(binding [pmap/*load-fn* (partial files/load-pointer conn id)]
|
||||
(let [data (-> (blob/decode data)
|
||||
(assoc :id id)
|
||||
(pmg/migrate-data))]
|
||||
(let [data (-> (blob/decode data)
|
||||
(assoc :id id)
|
||||
(pmg/migrate-data))]
|
||||
|
||||
(clean-file-media! conn id data)
|
||||
(clean-file-frame-thumbnails! conn id data)
|
||||
(clean-file-thumbnails! conn id revn)
|
||||
(clean-deleted-components! conn id data)
|
||||
(clean-file-media! conn id data)
|
||||
(clean-file-frame-thumbnails! conn id data)
|
||||
(clean-file-thumbnails! conn id revn)
|
||||
(clean-deleted-components! conn id data)
|
||||
|
||||
(when (contains? features "storage/pointer-map")
|
||||
(clean-data-fragments! conn id data))
|
||||
|
||||
;; Mark file as trimmed
|
||||
(db/update! conn :file
|
||||
;; Mark file as trimmed
|
||||
(db/update! conn :file
|
||||
{:has-media-trimmed true}
|
||||
{:id id})
|
||||
nil)))
|
||||
nil))
|
||||
|
||||
@@ -11,14 +11,13 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.main :as-alias main]
|
||||
[app.util.async :refer [thread-sleep]]
|
||||
[app.util.json :as json]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]))
|
||||
[integrant.core :as ig]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASK ENTRY POINT
|
||||
@@ -29,13 +28,18 @@
|
||||
(declare get-subscriptions-newsletter-updates)
|
||||
(declare get-subscriptions-newsletter-news)
|
||||
|
||||
(s/def ::http-client fn?)
|
||||
(s/def ::version ::us/string)
|
||||
(s/def ::uri ::us/string)
|
||||
(s/def ::instance-id ::us/uuid)
|
||||
(s/def ::sprops
|
||||
(s/keys :req-un [::instance-id]))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::http/client
|
||||
::db/pool
|
||||
::main/props]))
|
||||
(s/keys :req-un [::db/pool ::http-client ::version ::uri ::sprops]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [::db/pool ::main/props] :as cfg}]
|
||||
[_ {:keys [pool sprops version] :as cfg}]
|
||||
(fn [{:keys [send? enabled?] :or {send? true enabled? false}}]
|
||||
(let [subs {:newsletter-updates (get-subscriptions-newsletter-updates pool)
|
||||
:newsletter-news (get-subscriptions-newsletter-news pool)}
|
||||
@@ -44,15 +48,15 @@
|
||||
(cf/get :telemetry-enabled))
|
||||
|
||||
data {:subscriptions subs
|
||||
:version (:full cf/version)
|
||||
:instance-id (:instance-id props)}]
|
||||
:version version
|
||||
:instance-id (:instance-id sprops)}]
|
||||
(cond
|
||||
;; If we have telemetry enabled, then proceed the normal
|
||||
;; operation.
|
||||
enabled?
|
||||
(let [data (merge data (get-stats pool))]
|
||||
(when send?
|
||||
(px/sleep (rand-int 10000))
|
||||
(thread-sleep (rand-int 10000))
|
||||
(send! cfg data))
|
||||
data)
|
||||
|
||||
@@ -64,7 +68,7 @@
|
||||
(seq subs)
|
||||
(do
|
||||
(when send?
|
||||
(px/sleep (rand-int 10000))
|
||||
(thread-sleep (rand-int 10000))
|
||||
(send! cfg data))
|
||||
data)
|
||||
|
||||
@@ -76,13 +80,12 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- send!
|
||||
[cfg data]
|
||||
(let [response (http/req! cfg
|
||||
{:method :post
|
||||
:uri (cf/get :telemetry-uri)
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/encode-str data)}
|
||||
{:sync? true})]
|
||||
[{:keys [http-client uri] :as cfg} data]
|
||||
(let [response (http-client {:method :post
|
||||
:uri uri
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/write-str data)}
|
||||
{:sync? true})]
|
||||
(when (> (:status response) 206)
|
||||
(ex/raise :type :internal
|
||||
:code :invalid-response
|
||||
|
||||
@@ -110,4 +110,4 @@
|
||||
|
||||
(defn thread-sleep
|
||||
[ms]
|
||||
(Thread/sleep (long ms)))
|
||||
(Thread/sleep ms))
|
||||
|
||||
@@ -12,18 +12,14 @@
|
||||
[app.config :as cf]
|
||||
[app.util.fressian :as fres])
|
||||
(:import
|
||||
com.github.luben.zstd.Zstd
|
||||
java.io.ByteArrayInputStream
|
||||
java.io.ByteArrayOutputStream
|
||||
java.io.DataInputStream
|
||||
java.io.DataOutputStream
|
||||
java.io.InputStream
|
||||
java.io.OutputStream
|
||||
net.jpountz.lz4.LZ4Compressor
|
||||
com.github.luben.zstd.Zstd
|
||||
net.jpountz.lz4.LZ4Factory
|
||||
net.jpountz.lz4.LZ4FastDecompressor
|
||||
net.jpountz.lz4.LZ4FrameInputStream
|
||||
net.jpountz.lz4.LZ4FrameOutputStream))
|
||||
net.jpountz.lz4.LZ4Compressor))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
@@ -32,21 +28,18 @@
|
||||
(declare decode-v1)
|
||||
(declare decode-v3)
|
||||
(declare decode-v4)
|
||||
(declare decode-v5)
|
||||
(declare encode-v1)
|
||||
(declare encode-v3)
|
||||
(declare encode-v4)
|
||||
(declare encode-v5)
|
||||
|
||||
(defn encode
|
||||
([data] (encode data nil))
|
||||
([data {:keys [version]}]
|
||||
(let [version (or version (cf/get :default-blob-version 5))]
|
||||
(let [version (or version (cf/get :default-blob-version 4))]
|
||||
(case (long version)
|
||||
1 (encode-v1 data)
|
||||
3 (encode-v3 data)
|
||||
4 (encode-v4 data)
|
||||
5 (encode-v5 data)
|
||||
(throw (ex-info "unsupported version" {:version version}))))))
|
||||
|
||||
(defn decode
|
||||
@@ -60,7 +53,6 @@
|
||||
1 (decode-v1 data ulen)
|
||||
3 (decode-v3 data ulen)
|
||||
4 (decode-v4 data ulen)
|
||||
5 (decode-v5 data)
|
||||
(throw (ex-info "unsupported version" {:version version}))))))
|
||||
|
||||
;; --- IMPL
|
||||
@@ -116,17 +108,15 @@
|
||||
dlen (alength ^bytes data)
|
||||
mlen (Zstd/compressBound dlen)
|
||||
cdata (byte-array mlen)
|
||||
cdlen (alength ^bytes cdata)
|
||||
cmlen (Zstd/compressByteArray ^bytes cdata 0 mlen
|
||||
clen (Zstd/compressByteArray ^bytes cdata 0 mlen
|
||||
^bytes data 0 dlen
|
||||
0)]
|
||||
|
||||
(with-open [^ByteArrayOutputStream output (ByteArrayOutputStream. (+ cdlen 2 4))]
|
||||
(with-open [^DataOutputStream output (DataOutputStream. output)]
|
||||
(.writeShort output (short 4)) ;; version number
|
||||
(.writeInt output (int dlen))
|
||||
(.write output ^bytes cdata (int 0) (int cmlen)))
|
||||
(.toByteArray output))))
|
||||
(with-open [^ByteArrayOutputStream baos (ByteArrayOutputStream. (+ (alength cdata) 2 4))
|
||||
^DataOutputStream dos (DataOutputStream. baos)]
|
||||
(.writeShort dos (short 4)) ;; version number
|
||||
(.writeInt dos (int dlen))
|
||||
(.write dos ^bytes cdata (int 0) clen)
|
||||
(.toByteArray baos))))
|
||||
|
||||
(defn- decode-v4
|
||||
[^bytes cdata ^long ulen]
|
||||
@@ -134,21 +124,3 @@
|
||||
(Zstd/decompressByteArray ^bytes udata 0 ulen
|
||||
^bytes cdata 6 (- (alength cdata) 6))
|
||||
(fres/decode udata)))
|
||||
|
||||
(defn- encode-v5
|
||||
[data]
|
||||
(with-open [^ByteArrayOutputStream output (ByteArrayOutputStream.)]
|
||||
(with-open [^DataOutputStream output (DataOutputStream. output)]
|
||||
(.writeShort output (short 5)) ;; version number
|
||||
(.writeInt output (int -1))
|
||||
(with-open [^OutputStream output (LZ4FrameOutputStream. output)]
|
||||
(-> (fres/writer output)
|
||||
(fres/write! data))))
|
||||
(.toByteArray output)))
|
||||
|
||||
(defn- decode-v5
|
||||
[^bytes cdata]
|
||||
(with-open [^InputStream input (ByteArrayInputStream. cdata)]
|
||||
(.skip input 6)
|
||||
(with-open [^InputStream input (LZ4FrameInputStream. input)]
|
||||
(-> input fres/reader fres/read!))))
|
||||
|
||||
31
backend/src/app/util/closeable.clj
Normal file
31
backend/src/app/util/closeable.clj
Normal file
@@ -0,0 +1,31 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.util.closeable
|
||||
"A closeable abstraction. A drop in replacement for
|
||||
clojure builtin `with-open` syntax abstraction."
|
||||
(:refer-clojure :exclude [with-open]))
|
||||
|
||||
(defprotocol ICloseable
|
||||
(-close [_] "Close the resource."))
|
||||
|
||||
(defmacro with-open
|
||||
[bindings & body]
|
||||
{:pre [(vector? bindings)
|
||||
(even? (count bindings))
|
||||
(pos? (count bindings))]}
|
||||
(reduce (fn [acc bindings]
|
||||
`(let ~(vec bindings)
|
||||
(try
|
||||
~acc
|
||||
(finally
|
||||
(-close ~(first bindings))))))
|
||||
`(do ~@body)
|
||||
(reverse (partition 2 bindings))))
|
||||
|
||||
(extend-protocol ICloseable
|
||||
java.lang.AutoCloseable
|
||||
(-close [this] (.close this)))
|
||||
@@ -6,7 +6,6 @@
|
||||
|
||||
(ns app.util.fressian
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[clojure.data.fressian :as fres])
|
||||
@@ -18,13 +17,14 @@
|
||||
java.io.ByteArrayOutputStream
|
||||
java.time.Instant
|
||||
java.time.OffsetDateTime
|
||||
java.util.List
|
||||
org.fressian.Reader
|
||||
org.fressian.StreamingWriter
|
||||
org.fressian.Writer
|
||||
org.fressian.handlers.ReadHandler
|
||||
org.fressian.handlers.WriteHandler))
|
||||
|
||||
;; --- MISC
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defn str->bytes
|
||||
@@ -33,250 +33,231 @@
|
||||
([^String s, ^String encoding]
|
||||
(.getBytes s encoding)))
|
||||
|
||||
;; --- LOW LEVEL FRESSIAN API
|
||||
|
||||
(defn write-object!
|
||||
([^Writer w ^Object o]
|
||||
(.writeObject w o))
|
||||
([^Writer w ^Object o ^Boolean cache?]
|
||||
(.writeObject w o cache?)))
|
||||
|
||||
(defn read-object!
|
||||
[^Reader r]
|
||||
(.readObject r))
|
||||
|
||||
(defn write-tag!
|
||||
([^Writer w ^String n]
|
||||
(.writeTag w n 1))
|
||||
([^Writer w ^String n ^long ni]
|
||||
(.writeTag w n ni)))
|
||||
|
||||
(defn write-bytes!
|
||||
[^Writer w ^bytes data]
|
||||
(.writeBytes w data))
|
||||
|
||||
(defn write-int!
|
||||
[^Writer w ^long val]
|
||||
(.writeInt w val))
|
||||
|
||||
(defn write-list!
|
||||
[^Writer w ^List val]
|
||||
(.writeList w val))
|
||||
|
||||
;; --- READ AND WRITE HANDLERS
|
||||
|
||||
(defn read-symbol
|
||||
[r]
|
||||
(symbol (read-object! r)
|
||||
(read-object! r)))
|
||||
|
||||
(defn read-keyword
|
||||
[r]
|
||||
(keyword (read-object! r)
|
||||
(read-object! r)))
|
||||
|
||||
(defn write-named
|
||||
[tag ^Writer w s]
|
||||
(write-tag! w tag 2)
|
||||
(write-object! w (namespace s) true)
|
||||
(write-object! w (name s) true))
|
||||
(.writeTag w tag 2)
|
||||
(.writeObject w (namespace s) true)
|
||||
(.writeObject w (name s) true))
|
||||
|
||||
(defn write-list-like
|
||||
[tag ^Writer w o]
|
||||
(write-tag! w tag 1)
|
||||
(write-list! w o))
|
||||
([^Writer w tag o]
|
||||
(.writeTag w tag 1)
|
||||
(.writeList w o)))
|
||||
|
||||
(defn begin-closed-list!
|
||||
[^StreamingWriter w]
|
||||
(.beginClosedList w))
|
||||
|
||||
(defn end-list!
|
||||
[^StreamingWriter w]
|
||||
(.endList w))
|
||||
(defn read-list-like
|
||||
[^Reader rdr build-fn]
|
||||
(build-fn (.readObject rdr)))
|
||||
|
||||
(defn write-map-like
|
||||
"Writes a map as Fressian with the tag 'map' and all keys cached."
|
||||
[tag ^Writer w m]
|
||||
(write-tag! w tag 1)
|
||||
(begin-closed-list! w)
|
||||
[^Writer w tag m]
|
||||
(.writeTag w tag 1)
|
||||
(.beginClosedList ^StreamingWriter w)
|
||||
(loop [items (seq m)]
|
||||
(when-let [^clojure.lang.MapEntry item (first items)]
|
||||
(write-object! w (.key item) true)
|
||||
(write-object! w (.val item))
|
||||
(.writeObject w (.key item) true)
|
||||
(.writeObject w (.val item))
|
||||
(recur (rest items))))
|
||||
(end-list! w))
|
||||
(.endList ^StreamingWriter w))
|
||||
|
||||
(defn read-map-like
|
||||
[^Reader rdr]
|
||||
(let [kvs ^java.util.List (read-object! rdr)]
|
||||
(let [kvs ^java.util.List (.readObject rdr)]
|
||||
(if (< (.size kvs) 16)
|
||||
(clojure.lang.PersistentArrayMap. (.toArray kvs))
|
||||
(clojure.lang.PersistentHashMap/create (seq kvs)))))
|
||||
|
||||
(def ^:dynamic *write-handler-lookup* nil)
|
||||
(def ^:dynamic *read-handler-lookup* nil)
|
||||
(def write-handlers
|
||||
{ Character
|
||||
{"char"
|
||||
(reify WriteHandler
|
||||
(write [_ w ch]
|
||||
(.writeTag w "char" 1)
|
||||
(.writeInt w (int ch))))}
|
||||
|
||||
(def write-handlers (atom {}))
|
||||
(def read-handlers (atom {}))
|
||||
app.common.geom.point.Point
|
||||
{"penpot/point"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(.writeTag ^Writer w "penpot/point" 1)
|
||||
(.writeList ^Writer w (java.util.List/of (.-x ^Point o) (.-y ^Point o)))))}
|
||||
|
||||
(defn add-handlers!
|
||||
[& handlers]
|
||||
(letfn [(adapt-write-handler [{:keys [name class wfn]}]
|
||||
[class {name (reify WriteHandler
|
||||
(write [_ w o]
|
||||
(wfn name w o)))}])
|
||||
app.common.geom.matrix.Matrix
|
||||
{"penpot/matrix"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(.writeTag ^Writer w "penpot/matrix" 1)
|
||||
(.writeList ^Writer w (java.util.List/of (.-a ^Matrix o)
|
||||
(.-b ^Matrix o)
|
||||
(.-c ^Matrix o)
|
||||
(.-d ^Matrix o)
|
||||
(.-e ^Matrix o)
|
||||
(.-f ^Matrix o)))))}
|
||||
|
||||
(adapt-read-handler [{:keys [name rfn]}]
|
||||
[name (reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(rfn rdr)))])
|
||||
Instant
|
||||
{"java/instant"
|
||||
(reify WriteHandler
|
||||
(write [_ w ch]
|
||||
(.writeTag w "java/instant" 1)
|
||||
(.writeInt w (.toEpochMilli ^Instant ch))))}
|
||||
|
||||
(merge-and-clean [m1 m2]
|
||||
(-> (merge m1 m2)
|
||||
(d/without-nils)))]
|
||||
OffsetDateTime
|
||||
{"java/instant"
|
||||
(reify WriteHandler
|
||||
(write [_ w ch]
|
||||
(.writeTag w "java/instant" 1)
|
||||
(.writeInt w (.toEpochMilli ^Instant (.toInstant ^OffsetDateTime ch)))))}
|
||||
|
||||
(let [whs (into {}
|
||||
(comp
|
||||
(filter :wfn)
|
||||
(map adapt-write-handler))
|
||||
handlers)
|
||||
rhs (into {}
|
||||
(comp
|
||||
(filter :rfn)
|
||||
(map adapt-read-handler))
|
||||
handlers)
|
||||
cwh (swap! write-handlers merge-and-clean whs)
|
||||
crh (swap! read-handlers merge-and-clean rhs)]
|
||||
Ratio
|
||||
{"ratio"
|
||||
(reify WriteHandler
|
||||
(write [_ w n]
|
||||
(.writeTag w "ratio" 2)
|
||||
(.writeObject w (.numerator ^Ratio n))
|
||||
(.writeObject w (.denominator ^Ratio n))))}
|
||||
|
||||
(alter-var-root #'*write-handler-lookup* (constantly (-> cwh fres/associative-lookup fres/inheritance-lookup)))
|
||||
(alter-var-root #'*read-handler-lookup* (constantly (-> crh fres/associative-lookup)))
|
||||
nil)))
|
||||
clojure.lang.IPersistentMap
|
||||
{"clj/map"
|
||||
(reify WriteHandler
|
||||
(write [_ w d]
|
||||
(write-map-like w "clj/map" d)))}
|
||||
|
||||
(defn write-char
|
||||
[n w o]
|
||||
(write-tag! w n 1)
|
||||
(write-int! w o))
|
||||
clojure.lang.Keyword
|
||||
{"clj/keyword"
|
||||
(reify WriteHandler
|
||||
(write [_ w s]
|
||||
(write-named "clj/keyword" w s)))}
|
||||
|
||||
(defn read-char
|
||||
[rdr]
|
||||
(char (read-object! rdr)))
|
||||
clojure.lang.BigInt
|
||||
{"bigint"
|
||||
(reify WriteHandler
|
||||
(write [_ w d]
|
||||
(let [^BigInteger bi (if (instance? clojure.lang.BigInt d)
|
||||
(.toBigInteger ^clojure.lang.BigInt d)
|
||||
d)]
|
||||
(.writeTag w "bigint" 1)
|
||||
(.writeBytes w (.toByteArray bi)))))}
|
||||
|
||||
(defn write-instant
|
||||
[n w o]
|
||||
(write-tag! w n 1)
|
||||
(write-int! w (.toEpochMilli ^Instant o)))
|
||||
;; Persistent set
|
||||
clojure.lang.IPersistentSet
|
||||
{"clj/set"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/set" o)))}
|
||||
|
||||
(defn write-offset-date-time
|
||||
[n w o]
|
||||
(write-tag! w n 1)
|
||||
(write-int! w (.toEpochMilli ^Instant (.toInstant ^OffsetDateTime o))))
|
||||
;; Persistent vector
|
||||
clojure.lang.IPersistentVector
|
||||
{"clj/vector"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/vector" o)))}
|
||||
|
||||
(defn read-instant
|
||||
[rdr]
|
||||
(Instant/ofEpochMilli (.readInt ^Reader rdr)))
|
||||
;; Persistent list
|
||||
clojure.lang.IPersistentList
|
||||
{"clj/list"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/list" o)))}
|
||||
|
||||
(defn write-ratio
|
||||
[n w o]
|
||||
(write-tag! w n 2)
|
||||
(write-object! w (.numerator ^Ratio o))
|
||||
(write-object! w (.denominator ^Ratio o)))
|
||||
;; Persistent seq & lazy seqs
|
||||
clojure.lang.ISeq
|
||||
{"clj/seq"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/seq" o)))}
|
||||
})
|
||||
|
||||
(defn read-ratio
|
||||
[rdr]
|
||||
(Ratio. (biginteger (read-object! rdr))
|
||||
(biginteger (read-object! rdr))))
|
||||
|
||||
(defn write-bigint
|
||||
[n w o]
|
||||
(let [^BigInteger bi (if (instance? clojure.lang.BigInt o)
|
||||
(.toBigInteger ^clojure.lang.BigInt o)
|
||||
o)]
|
||||
(write-tag! w n 1)
|
||||
(write-bytes! w (.toByteArray bi))))
|
||||
(def read-handlers
|
||||
{"bigint"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(let [^bytes bibytes (.readObject rdr)]
|
||||
(bigint (BigInteger. bibytes)))))
|
||||
|
||||
(defn read-bigint
|
||||
[rdr]
|
||||
(let [^bytes bibytes (read-object! rdr)]
|
||||
(bigint (BigInteger. bibytes))))
|
||||
"byte"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(byte (.readObject rdr))))
|
||||
|
||||
(add-handlers!
|
||||
{:name "char"
|
||||
:class Character
|
||||
:wfn write-char
|
||||
:rfn read-char}
|
||||
"penpot/matrix"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(let [^java.util.List x (.readObject rdr)]
|
||||
(Matrix. (.get x 0) (.get x 1) (.get x 2) (.get x 3) (.get x 4) (.get x 5)))))
|
||||
|
||||
{:name "java/instant"
|
||||
:class Instant
|
||||
:wfn write-instant
|
||||
:rfn read-instant}
|
||||
"penpot/point"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(let [^java.util.List x (.readObject rdr)]
|
||||
(Point. (.get x 0) (.get x 1)))))
|
||||
|
||||
{:name "java/instant"
|
||||
:class OffsetDateTime
|
||||
:wfn write-offset-date-time
|
||||
:rfn read-instant}
|
||||
"char"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(char (.readObject rdr))))
|
||||
|
||||
;; LEGACY
|
||||
{:name "ratio"
|
||||
:rfn read-ratio}
|
||||
"java/instant"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(Instant/ofEpochMilli (.readInt rdr))))
|
||||
|
||||
{:name "clj/ratio"
|
||||
:class Ratio
|
||||
:wfn write-ratio
|
||||
:rfn read-ratio}
|
||||
|
||||
{:name "clj/map"
|
||||
:class clojure.lang.IPersistentMap
|
||||
:wfn write-map-like
|
||||
:rfn read-map-like}
|
||||
"ratio"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(Ratio. (biginteger (.readObject rdr))
|
||||
(biginteger (.readObject rdr)))))
|
||||
|
||||
{:name "clj/keyword"
|
||||
:class clojure.lang.Keyword
|
||||
:wfn write-named
|
||||
:rfn read-keyword}
|
||||
"clj/keyword"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(keyword (.readObject rdr) (.readObject rdr))))
|
||||
|
||||
{:name "clj/symbol"
|
||||
:class clojure.lang.Symbol
|
||||
:wfn write-named
|
||||
:rfn read-symbol}
|
||||
"clj/map"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-map-like rdr)))
|
||||
|
||||
;; LEGACY
|
||||
{:name "bigint"
|
||||
:rfn read-bigint}
|
||||
"clj/set"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr set)))
|
||||
|
||||
{:name "clj/bigint"
|
||||
:class clojure.lang.BigInt
|
||||
:wfn write-bigint
|
||||
:rfn read-bigint}
|
||||
"clj/vector"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr vec)))
|
||||
|
||||
{:name "clj/set"
|
||||
:class clojure.lang.IPersistentSet
|
||||
:wfn write-list-like
|
||||
:rfn (comp set read-object!)}
|
||||
"clj/list"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr #(apply list %))))
|
||||
|
||||
{:name "clj/vector"
|
||||
:class clojure.lang.IPersistentVector
|
||||
:wfn write-list-like
|
||||
:rfn (comp vec read-object!)}
|
||||
"clj/seq"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr sequence)))
|
||||
})
|
||||
|
||||
{:name "clj/list"
|
||||
:class clojure.lang.IPersistentList
|
||||
:wfn write-list-like
|
||||
:rfn #(apply list (read-object! %))}
|
||||
(def write-handler-lookup
|
||||
(-> write-handlers
|
||||
fres/associative-lookup
|
||||
fres/inheritance-lookup))
|
||||
|
||||
{:name "clj/seq"
|
||||
:class clojure.lang.ISeq
|
||||
:wfn write-list-like
|
||||
:rfn (comp sequence read-object!)})
|
||||
(def read-handler-lookup
|
||||
(-> read-handlers
|
||||
(fres/associative-lookup)))
|
||||
|
||||
;; --- PUBLIC API
|
||||
;; --- Low-Level Api
|
||||
|
||||
(defn reader
|
||||
[istream]
|
||||
(fres/create-reader istream :handlers *read-handler-lookup*))
|
||||
(fres/create-reader istream :handlers read-handler-lookup))
|
||||
|
||||
(defn writer
|
||||
[ostream]
|
||||
(fres/create-writer ostream :handlers *write-handler-lookup*))
|
||||
(fres/create-writer ostream :handlers write-handler-lookup))
|
||||
|
||||
(defn read!
|
||||
[reader]
|
||||
@@ -286,40 +267,15 @@
|
||||
[writer data]
|
||||
(fres/write-object writer data))
|
||||
|
||||
;; --- High-Level Api
|
||||
|
||||
(defn encode
|
||||
[data]
|
||||
(with-open [^ByteArrayOutputStream output (ByteArrayOutputStream.)]
|
||||
(-> (writer output)
|
||||
(write! data))
|
||||
(.toByteArray output)))
|
||||
(with-open [out (ByteArrayOutputStream.)]
|
||||
(write! (writer out) data)
|
||||
(.toByteArray out)))
|
||||
|
||||
(defn decode
|
||||
[data]
|
||||
(with-open [^ByteArrayInputStream input (ByteArrayInputStream. ^bytes data)]
|
||||
(-> input reader read!)))
|
||||
|
||||
;; --- ADDITIONAL
|
||||
|
||||
(add-handlers!
|
||||
{:name "penpot/point"
|
||||
:class app.common.geom.point.Point
|
||||
:wfn (fn [n w ^Point o]
|
||||
(write-tag! w n 1)
|
||||
(write-list! w (List/of (.-x o) (.-y o))))
|
||||
:rfn (fn [^Reader rdr]
|
||||
(let [^List x (read-object! rdr)]
|
||||
(Point. (.get x 0) (.get x 1))))}
|
||||
|
||||
{:name "penpot/matrix"
|
||||
:class app.common.geom.matrix.Matrix
|
||||
:wfn (fn [^String n ^Writer w o]
|
||||
(write-tag! w n 1)
|
||||
(write-list! w (List/of (.-a ^Matrix o)
|
||||
(.-b ^Matrix o)
|
||||
(.-c ^Matrix o)
|
||||
(.-d ^Matrix o)
|
||||
(.-e ^Matrix o)
|
||||
(.-f ^Matrix o))))
|
||||
:rfn (fn [^Reader rdr]
|
||||
(let [^List x (read-object! rdr)]
|
||||
(Matrix. (.get x 0) (.get x 1) (.get x 2) (.get x 3) (.get x 4) (.get x 5))))})
|
||||
(with-open [input (ByteArrayInputStream. ^bytes data)]
|
||||
(read! (reader input))))
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.util.json
|
||||
(:refer-clojure :exclude [read])
|
||||
(:require
|
||||
[jsonista.core :as j]))
|
||||
|
||||
@@ -12,23 +13,23 @@
|
||||
[params]
|
||||
(j/object-mapper params))
|
||||
|
||||
(defn read!
|
||||
([from] (j/read-value from j/keyword-keys-object-mapper))
|
||||
([from mapper] (j/read-value from mapper)))
|
||||
|
||||
(defn write!
|
||||
([to v] (j/write-value to v j/keyword-keys-object-mapper))
|
||||
([to v mapper] (j/write-value to v mapper)))
|
||||
|
||||
(defn encode
|
||||
(defn write
|
||||
([v] (j/write-value-as-bytes v j/keyword-keys-object-mapper))
|
||||
([v mapper] (j/write-value-as-bytes v mapper)))
|
||||
|
||||
(defn decode
|
||||
([v] (j/read-value v j/keyword-keys-object-mapper))
|
||||
([v mapper] (j/read-value v mapper)))
|
||||
|
||||
(defn encode-str
|
||||
(defn write-str
|
||||
([v] (j/write-value-as-string v j/keyword-keys-object-mapper))
|
||||
([v mapper] (j/write-value-as-string v mapper)))
|
||||
|
||||
(defn read
|
||||
([v] (j/read-value v j/keyword-keys-object-mapper))
|
||||
([v mapper] (j/read-value v mapper)))
|
||||
|
||||
(defn encode
|
||||
[v]
|
||||
(j/write-value-as-bytes v j/keyword-keys-object-mapper))
|
||||
|
||||
(defn decode
|
||||
[v]
|
||||
(j/read-value v j/keyword-keys-object-mapper))
|
||||
|
||||
|
||||
@@ -1,380 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.util.objects-map
|
||||
"Implements a specialized map-like data structure for store an UUID =>
|
||||
OBJECT mappings. The main purpose of this data structure is be able
|
||||
to serialize it on fressian as byte-array and have the ability to
|
||||
decode each field separatelly without the need to decode the whole
|
||||
map from the byte-array.
|
||||
|
||||
It works transparently, so no aditional dynamic vars are needed. It
|
||||
only works by reference equality and the hash-code is calculated
|
||||
properly from each value."
|
||||
|
||||
(:require
|
||||
;; [app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.util.fressian :as fres]
|
||||
[clojure.core :as c])
|
||||
(:import
|
||||
clojure.lang.Counted
|
||||
clojure.lang.IHashEq
|
||||
clojure.lang.IMapEntry
|
||||
clojure.lang.IObj
|
||||
clojure.lang.IPersistentCollection
|
||||
clojure.lang.IPersistentMap
|
||||
clojure.lang.Murmur3
|
||||
clojure.lang.RT
|
||||
clojure.lang.Seqable
|
||||
java.nio.ByteBuffer
|
||||
java.util.Iterator
|
||||
java.util.UUID))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(def ^:dynamic *lazy* true)
|
||||
|
||||
(def RECORD-SIZE (+ 16 8))
|
||||
|
||||
(declare create)
|
||||
|
||||
(defprotocol IObjectsMap
|
||||
(load! [_])
|
||||
(modified? [_])
|
||||
(compact! [_])
|
||||
(clone [_])
|
||||
(-get-key-hash [_ key])
|
||||
(-force-modified! [_]))
|
||||
|
||||
(deftype ObjectsMapEntry [^UUID key cmap]
|
||||
IMapEntry
|
||||
(key [_] key)
|
||||
(getKey [_] key)
|
||||
|
||||
(val [_]
|
||||
(get cmap key))
|
||||
(getValue [_]
|
||||
(get cmap key))
|
||||
|
||||
IHashEq
|
||||
(hasheq [_]
|
||||
(-get-key-hash cmap key)))
|
||||
|
||||
(deftype ObjectsMapIterator [^Iterator iterator cmap]
|
||||
Iterator
|
||||
(hasNext [_]
|
||||
(.hasNext iterator))
|
||||
|
||||
(next [_]
|
||||
(let [entry (.next iterator)]
|
||||
(ObjectsMapEntry. (key entry) cmap))))
|
||||
|
||||
(deftype ObjectsMap [^:unsynchronized-mutable metadata
|
||||
^:unsynchronized-mutable hash
|
||||
^:unsynchronized-mutable positions
|
||||
^:unsynchronized-mutable cache
|
||||
^:unsynchronized-mutable blob
|
||||
^:unsynchronized-mutable header
|
||||
^:unsynchronized-mutable content
|
||||
^:unsynchronized-mutable loaded?
|
||||
^:unsynchronized-mutable modified?]
|
||||
|
||||
IHashEq
|
||||
(hasheq [this]
|
||||
(when-not hash
|
||||
(set! hash (Murmur3/hashUnordered this)))
|
||||
hash)
|
||||
|
||||
Object
|
||||
(hashCode [this]
|
||||
(.hasheq ^IHashEq this))
|
||||
|
||||
IObjectsMap
|
||||
(modified? [_] modified?)
|
||||
|
||||
(load! [this]
|
||||
(let [hsize (.getInt ^ByteBuffer blob 0)
|
||||
header' (.slice ^ByteBuffer blob 4 hsize)
|
||||
content' (.slice ^ByteBuffer blob
|
||||
(int (+ 4 hsize))
|
||||
(int (- (.remaining ^ByteBuffer blob)
|
||||
(+ 4 hsize))))
|
||||
|
||||
nitems (long (/ (.remaining ^ByteBuffer header') RECORD-SIZE))
|
||||
positions' (reduce (fn [positions i]
|
||||
(let [hb (.slice ^ByteBuffer header'
|
||||
(int (* i RECORD-SIZE))
|
||||
(int RECORD-SIZE))
|
||||
msb (.getLong ^ByteBuffer hb)
|
||||
lsb (.getLong ^ByteBuffer hb)
|
||||
size (.getInt ^ByteBuffer hb)
|
||||
pos (.getInt ^ByteBuffer hb)
|
||||
key (uuid/custom msb lsb)
|
||||
val [size pos]]
|
||||
(assoc! positions key val)))
|
||||
(transient {})
|
||||
(range nitems))]
|
||||
(set! positions (persistent! positions'))
|
||||
(if *lazy*
|
||||
(set! cache {})
|
||||
(loop [cache' (transient {})
|
||||
entries (seq positions)]
|
||||
(if-let [[key [size pos]] (first entries)]
|
||||
(let [tmp (byte-array (- size 4))]
|
||||
(.get ^ByteBuffer content' (int (+ pos 4)) ^bytes tmp (int 0) (int (- size 4)))
|
||||
(recur (assoc! cache' key (fres/decode tmp))
|
||||
(rest entries)))
|
||||
|
||||
(set! cache (persistent! cache')))))
|
||||
|
||||
(set! header header')
|
||||
(set! content content')
|
||||
(set! loaded? true))
|
||||
this)
|
||||
|
||||
(-get-key-hash [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(if (contains? cache key)
|
||||
(c/hash (get cache key))
|
||||
(let [[_ pos] (get positions key)]
|
||||
(.getInt ^ByteBuffer content (int pos)))))
|
||||
|
||||
(-force-modified! [this]
|
||||
(set! modified? true)
|
||||
(doseq [key (keys positions)]
|
||||
(let [val (get this key)]
|
||||
(set! positions (assoc positions key nil))
|
||||
(set! cache (assoc cache key val)))))
|
||||
|
||||
(compact! [this]
|
||||
(when modified?
|
||||
(let [[total-items total-size new-items new-hashes]
|
||||
(loop [entries (seq positions)
|
||||
total-size 0
|
||||
total-items 0
|
||||
new-items {}
|
||||
new-hashes {}]
|
||||
(if-let [[key [size _ :as entry]] (first entries)]
|
||||
(if (nil? entry)
|
||||
(let [oval (get cache key)
|
||||
bval (fres/encode oval)
|
||||
size (+ (alength ^bytes bval) 4)]
|
||||
(recur (rest entries)
|
||||
(+ total-size size)
|
||||
(inc total-items)
|
||||
(assoc new-items key bval)
|
||||
(assoc new-hashes key (c/hash oval))))
|
||||
(recur (rest entries)
|
||||
(long (+ total-size size))
|
||||
(inc total-items)
|
||||
new-items
|
||||
new-hashes))
|
||||
[total-items total-size new-items new-hashes]))
|
||||
|
||||
hsize (* total-items RECORD-SIZE)
|
||||
blob' (doto (ByteBuffer/allocate (+ hsize total-size 4))
|
||||
(.putInt 0 (int hsize)))
|
||||
header' (.slice ^ByteBuffer blob' 4 (int hsize))
|
||||
content' (.slice ^ByteBuffer blob' (int (+ 4 hsize)) (int total-size))
|
||||
rbuf (ByteBuffer/allocate RECORD-SIZE)
|
||||
|
||||
positions'
|
||||
(loop [position 0
|
||||
entries (seq positions)
|
||||
positions {}]
|
||||
(if-let [[key [size prev-pos :as entry]] (first entries)]
|
||||
(do
|
||||
(doto ^ByteBuffer rbuf
|
||||
(.clear)
|
||||
(.putLong ^long (uuid/get-word-high key))
|
||||
(.putLong ^long (uuid/get-word-low key)))
|
||||
|
||||
(if (nil? entry)
|
||||
(let [bval (get new-items key)
|
||||
hval (get new-hashes key)
|
||||
size (+ (alength ^bytes bval) 4)]
|
||||
|
||||
(.putInt ^ByteBuffer rbuf (int size))
|
||||
(.putInt ^ByteBuffer rbuf (int position))
|
||||
(.rewind ^ByteBuffer rbuf)
|
||||
|
||||
(.put ^ByteBuffer header' ^ByteBuffer rbuf)
|
||||
(.putInt ^ByteBuffer content' (int hval))
|
||||
(.put ^ByteBuffer content' ^bytes bval)
|
||||
(recur (+ position size)
|
||||
(rest entries)
|
||||
(assoc positions key [size position])))
|
||||
|
||||
(let [cbuf (.slice ^ByteBuffer content (int prev-pos) (int size))]
|
||||
(.putInt ^ByteBuffer rbuf (int size))
|
||||
(.putInt ^ByteBuffer rbuf (int position))
|
||||
(.rewind ^ByteBuffer rbuf)
|
||||
|
||||
(.put ^ByteBuffer header' ^ByteBuffer rbuf)
|
||||
(.put ^ByteBuffer content' ^ByteBuffer cbuf)
|
||||
(recur (long (+ position size))
|
||||
(rest entries)
|
||||
(assoc positions key [size position])))))
|
||||
|
||||
positions))]
|
||||
|
||||
(.rewind ^ByteBuffer header')
|
||||
(.rewind ^ByteBuffer content')
|
||||
(.rewind ^ByteBuffer blob')
|
||||
|
||||
(set! positions positions')
|
||||
(set! modified? false)
|
||||
(set! blob blob')
|
||||
(set! header header')
|
||||
(set! content content')))
|
||||
this)
|
||||
|
||||
(clone [_]
|
||||
(if loaded?
|
||||
(ObjectsMap. metadata hash positions cache blob header content loaded? modified?)
|
||||
(ObjectsMap. metadata nil nil nil blob nil nil false false)))
|
||||
|
||||
clojure.lang.IDeref
|
||||
(deref [this]
|
||||
(compact! this)
|
||||
(.array ^ByteBuffer blob))
|
||||
|
||||
IObj
|
||||
(meta [_] metadata)
|
||||
(withMeta [_ metadata]
|
||||
(ObjectsMap. metadata hash positions cache blob header content loaded? modified?))
|
||||
|
||||
Seqable
|
||||
(seq [this]
|
||||
(when-not loaded? (load! this))
|
||||
(RT/chunkIteratorSeq (.iterator ^Iterable this)))
|
||||
|
||||
IPersistentCollection
|
||||
(equiv [this other]
|
||||
(identical? this other))
|
||||
|
||||
IPersistentMap
|
||||
(cons [this o]
|
||||
(when-not loaded? (load! this))
|
||||
(if (map-entry? o)
|
||||
(assoc this (key o) (val o))
|
||||
(if (vector? o)
|
||||
(assoc this (nth o 0) (nth o 1))
|
||||
(throw (UnsupportedOperationException. "invalid arguments to cons")))))
|
||||
|
||||
(empty [_]
|
||||
(create))
|
||||
|
||||
(containsKey [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(contains? positions key))
|
||||
|
||||
(entryAt [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(ObjectsMapEntry. this key))
|
||||
|
||||
(valAt [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(if (contains? cache key)
|
||||
(get cache key)
|
||||
(if (contains? positions key)
|
||||
(let [[size pos] (get positions key)
|
||||
tmp (byte-array (- size 4))]
|
||||
(.get ^ByteBuffer content (int (+ pos 4)) ^bytes tmp (int 0) (int (- size 4)))
|
||||
(let [val (fres/decode tmp)]
|
||||
(set! cache (assoc cache key val))
|
||||
val))
|
||||
(do
|
||||
(set! cache (assoc cache key nil))
|
||||
nil))))
|
||||
|
||||
(valAt [this key not-found]
|
||||
(when-not loaded? (load! this))
|
||||
(if (.containsKey ^IPersistentMap positions key)
|
||||
(.valAt this key)
|
||||
not-found))
|
||||
|
||||
(assoc [this key val]
|
||||
(when-not loaded? (load! this))
|
||||
(when-not (instance? UUID key)
|
||||
(throw (IllegalArgumentException. "key should be an instance of UUID")))
|
||||
(ObjectsMap. metadata
|
||||
nil
|
||||
(assoc positions key nil)
|
||||
(assoc cache key val)
|
||||
blob
|
||||
header
|
||||
content
|
||||
loaded?
|
||||
true))
|
||||
|
||||
(assocEx [_ _ _]
|
||||
(throw (UnsupportedOperationException. "method not implemented")))
|
||||
|
||||
(without [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(ObjectsMap. metadata
|
||||
nil
|
||||
(dissoc positions key)
|
||||
(dissoc cache key)
|
||||
blob
|
||||
header
|
||||
content
|
||||
loaded?
|
||||
true))
|
||||
|
||||
Counted
|
||||
(count [this]
|
||||
(when-not loaded? (load! this))
|
||||
(count positions))
|
||||
|
||||
Iterable
|
||||
(iterator [this]
|
||||
(when-not loaded? (load! this))
|
||||
(ObjectsMapIterator. (.iterator ^Iterable positions) this))
|
||||
)
|
||||
|
||||
(defn create
|
||||
([]
|
||||
(let [buf (ByteBuffer/allocate 4)]
|
||||
(.putInt ^ByteBuffer buf 0 0)
|
||||
(create buf)))
|
||||
([buf]
|
||||
(cond
|
||||
(bytes? buf)
|
||||
(create (ByteBuffer/wrap ^bytes buf))
|
||||
|
||||
(instance? ByteBuffer buf)
|
||||
(ObjectsMap. {} nil {} {} buf nil nil false false)
|
||||
|
||||
:else
|
||||
(throw (UnsupportedOperationException. "invalid arguments")))))
|
||||
|
||||
(defn wrap
|
||||
[objects]
|
||||
(if (instance? ObjectsMap objects)
|
||||
objects
|
||||
(into (create) objects)))
|
||||
|
||||
(defn objects-map?
|
||||
[o]
|
||||
(instance? ObjectsMap o))
|
||||
|
||||
(fres/add-handlers!
|
||||
{:name "penpot/objects-map/v1"
|
||||
:class ObjectsMap
|
||||
:wfn (fn [n w o]
|
||||
(fres/write-tag! w n)
|
||||
(fres/write-bytes! w (deref o)))
|
||||
:rfn (fn [r]
|
||||
(-> r fres/read-object! create))})
|
||||
|
||||
(t/add-handlers!
|
||||
{:id "map"
|
||||
:class ObjectsMap
|
||||
:wfn #(into {} %)})
|
||||
@@ -1,254 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.util.pointer-map
|
||||
"Implements a map-like data structure that provides an entry point for
|
||||
store its content in a separated blob storage.
|
||||
|
||||
By default it is not coupled with any storage mode and this is
|
||||
externalized using specified entry points for inspect the current
|
||||
available objects and hook the load data function.
|
||||
|
||||
Each object is identified by an UUID and metadata hashmap which can
|
||||
be used to locate it in the storage layer. The hash code of the
|
||||
object is always the hash of the UUID. And it always performs
|
||||
equality by reference (I mean, you can't deep compare two pointers
|
||||
map without completelly loads its contents and compare the content.
|
||||
|
||||
Each time you pass from not-modified to modified state, the id will
|
||||
be regenerated, and is resposability of the hashmap user to properly
|
||||
garbage collect the unused/unreferenced objects.
|
||||
|
||||
For properly work it requires some dynamic vars binded to
|
||||
appropriate values:
|
||||
|
||||
- *load-fn*: when you instantatiate an object by ID, on first access
|
||||
to the contents of the hash-map will try to load the real data by
|
||||
calling the function binded to that dynamic var.
|
||||
|
||||
- *tracked*: when you instantiate an object or modify it, it is updated
|
||||
on the atom (holding a hashmap) binded to this var; if no binding is
|
||||
available, no tracking is performed. Tracking is needed for finally
|
||||
persist to external storage all modified objects.
|
||||
"
|
||||
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.util.fressian :as fres]
|
||||
[app.util.time :as dt]
|
||||
[clojure.core :as c])
|
||||
(:import
|
||||
clojure.lang.Counted
|
||||
clojure.lang.IDeref
|
||||
clojure.lang.IHashEq
|
||||
clojure.lang.IObj
|
||||
clojure.lang.IPersistentCollection
|
||||
clojure.lang.IPersistentMap
|
||||
clojure.lang.PersistentArrayMap
|
||||
clojure.lang.PersistentHashMap
|
||||
clojure.lang.Seqable
|
||||
java.util.List))
|
||||
|
||||
(def ^:dynamic *load-fn* nil)
|
||||
(def ^:dynamic *tracked* nil)
|
||||
(def ^:dynamic *metadata* {})
|
||||
|
||||
(declare create)
|
||||
|
||||
(defprotocol IPointerMap
|
||||
(get-id [_])
|
||||
(load! [_])
|
||||
(modified? [_])
|
||||
(clone [_]))
|
||||
|
||||
(deftype PointerMap [id mdata
|
||||
^:unsynchronized-mutable odata
|
||||
^:unsynchronized-mutable modified?
|
||||
^:unsynchronized-mutable loaded?]
|
||||
|
||||
IPointerMap
|
||||
(load! [_]
|
||||
(l/trace :hint "pointer-map:load" :id id)
|
||||
(set! loaded? true)
|
||||
|
||||
(when-not *load-fn*
|
||||
(throw (UnsupportedOperationException. "load is not supported when *load-fn* is not bind")))
|
||||
|
||||
(when-let [data (*load-fn* id)]
|
||||
(set! odata data))
|
||||
(or odata {}))
|
||||
|
||||
(modified? [_] modified?)
|
||||
(get-id [_] id)
|
||||
|
||||
(clone [this]
|
||||
(when-not loaded? (load! this))
|
||||
(let [mdata (assoc mdata :created-at (dt/now))
|
||||
id (uuid/next)
|
||||
pmap (PointerMap. id
|
||||
mdata
|
||||
odata
|
||||
true
|
||||
true)]
|
||||
(some-> *tracked* (swap! assoc id pmap))
|
||||
pmap))
|
||||
|
||||
IDeref
|
||||
(deref [this]
|
||||
(when-not loaded? (load! this))
|
||||
(or odata {}))
|
||||
|
||||
;; We don't need to load the data for calculate hash because this
|
||||
;; map has specific behavior
|
||||
IHashEq
|
||||
(hasheq [_] (hash id))
|
||||
|
||||
Object
|
||||
(hashCode [this]
|
||||
(.hasheq ^IHashEq this))
|
||||
|
||||
IObj
|
||||
(meta [_]
|
||||
(or mdata {}))
|
||||
|
||||
(withMeta [_ mdata']
|
||||
(let [pmap (PointerMap. id mdata' odata (not= mdata mdata') loaded?)]
|
||||
(some-> *tracked* (swap! assoc id pmap))
|
||||
pmap))
|
||||
|
||||
Seqable
|
||||
(seq [this]
|
||||
(when-not loaded? (load! this))
|
||||
(.seq ^Seqable odata))
|
||||
|
||||
IPersistentCollection
|
||||
(equiv [this other]
|
||||
(identical? this other))
|
||||
|
||||
IPersistentMap
|
||||
(cons [this o]
|
||||
(when-not loaded? (load! this))
|
||||
(if (map-entry? o)
|
||||
(assoc this (key o) (val o))
|
||||
(if (vector? o)
|
||||
(assoc this (nth o 0) (nth o 1))
|
||||
(throw (UnsupportedOperationException. "invalid arguments to cons")))))
|
||||
|
||||
(empty [_]
|
||||
(create))
|
||||
|
||||
(containsKey [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(contains? odata key))
|
||||
|
||||
(entryAt [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(.entryAt ^IPersistentMap odata key))
|
||||
|
||||
(valAt [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(.valAt ^IPersistentMap odata key))
|
||||
|
||||
(valAt [this key not-found]
|
||||
(when-not loaded? (load! this))
|
||||
(.valAt ^IPersistentMap odata key not-found))
|
||||
|
||||
(assoc [this key val]
|
||||
(when-not loaded? (load! this))
|
||||
(let [odata (assoc odata key val)
|
||||
mdata (assoc mdata :created-at (dt/now))
|
||||
id (if modified? id (uuid/next))
|
||||
pmap (PointerMap. id
|
||||
mdata
|
||||
odata
|
||||
true
|
||||
true)]
|
||||
(some-> *tracked* (swap! assoc id pmap))
|
||||
pmap))
|
||||
|
||||
(assocEx [_ _ _]
|
||||
(throw (UnsupportedOperationException. "method not implemented")))
|
||||
|
||||
(without [this key]
|
||||
(when-not loaded? (load! this))
|
||||
(let [odata (dissoc odata key)
|
||||
mdata (assoc mdata :created-at (dt/now))
|
||||
id (if modified? id (uuid/next))
|
||||
pmap (PointerMap. id
|
||||
mdata
|
||||
odata
|
||||
true
|
||||
true)]
|
||||
(some-> *tracked* (swap! assoc id pmap))
|
||||
pmap))
|
||||
|
||||
Counted
|
||||
(count [this]
|
||||
(when-not loaded? (load! this))
|
||||
(count odata))
|
||||
|
||||
Iterable
|
||||
(iterator [this]
|
||||
(when-not loaded? (load! this))
|
||||
(.iterator ^Iterable odata)))
|
||||
|
||||
(defn create
|
||||
([]
|
||||
(let [id (uuid/next)
|
||||
mdata (assoc *metadata* :created-at (dt/now))
|
||||
pmap (PointerMap. id mdata {} true true)]
|
||||
(some-> *tracked* (swap! assoc id pmap))
|
||||
pmap))
|
||||
([id mdata]
|
||||
(let [pmap (PointerMap. id mdata {} false false)]
|
||||
(some-> *tracked* (swap! assoc id pmap))
|
||||
pmap)))
|
||||
|
||||
(defn pointer-map?
|
||||
[o]
|
||||
(instance? PointerMap o))
|
||||
|
||||
(defn wrap
|
||||
[data]
|
||||
(if (pointer-map? data)
|
||||
(do
|
||||
(some-> *tracked* (swap! assoc (get-id data) data))
|
||||
data)
|
||||
(into (create) data)))
|
||||
|
||||
(fres/add-handlers!
|
||||
{:name "penpot/pointer-map/v1"
|
||||
:class PointerMap
|
||||
:wfn (fn [n w o]
|
||||
(fres/write-tag! w n 3)
|
||||
(let [id (get-id o)]
|
||||
(fres/write-int! w (uuid/get-word-high id))
|
||||
(fres/write-int! w (uuid/get-word-low id)))
|
||||
(fres/begin-closed-list! w)
|
||||
(loop [items (-> o meta seq)]
|
||||
(when-let [^clojure.lang.MapEntry item (first items)]
|
||||
(fres/write-object! w (.key item) true)
|
||||
(fres/write-object! w (.val item))
|
||||
(recur (rest items))))
|
||||
(fres/end-list! w))
|
||||
:rfn (fn [r]
|
||||
(let [msb (fres/read-object! r)
|
||||
lsb (fres/read-object! r)
|
||||
kvs (fres/read-object! r)]
|
||||
(create (uuid/custom msb lsb)
|
||||
(if (< (.size ^List kvs) 16)
|
||||
(PersistentArrayMap. (.toArray ^List kvs))
|
||||
(PersistentHashMap/create (seq kvs))))))})
|
||||
|
||||
(t/add-handlers!
|
||||
{:id "penpot/pointer"
|
||||
:class PointerMap
|
||||
:wfn (fn [val]
|
||||
[(get-id val) (meta val)])})
|
||||
|
||||
|
||||
@@ -11,6 +11,20 @@
|
||||
[app.common.data :as d]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defrecord WrappedValue [obj]
|
||||
clojure.lang.IDeref
|
||||
(deref [_] obj))
|
||||
|
||||
(defn wrap
|
||||
([]
|
||||
(WrappedValue. nil))
|
||||
([o]
|
||||
(WrappedValue. o)))
|
||||
|
||||
(defn wrapped?
|
||||
[o]
|
||||
(instance? WrappedValue o))
|
||||
|
||||
(defmacro defmethod
|
||||
[sname & body]
|
||||
(let [[docs body] (if (string? (first body))
|
||||
|
||||
@@ -39,7 +39,9 @@
|
||||
:seconds ChronoUnit/SECONDS
|
||||
:minutes ChronoUnit/MINUTES
|
||||
:hours ChronoUnit/HOURS
|
||||
:days ChronoUnit/DAYS)))
|
||||
:days ChronoUnit/DAYS
|
||||
:weeks ChronoUnit/WEEKS
|
||||
:months ChronoUnit/MONTHS)))
|
||||
|
||||
;; --- DURATION
|
||||
|
||||
|
||||
179
backend/src/app/util/transit.clj
Normal file
179
backend/src/app/util/transit.clj
Normal file
@@ -0,0 +1,179 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.util.transit
|
||||
(:require
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[cognitect.transit :as t]
|
||||
[linked.core :as lk])
|
||||
(:import
|
||||
app.common.geom.matrix.Matrix
|
||||
app.common.geom.point.Point
|
||||
java.io.ByteArrayInputStream
|
||||
java.io.ByteArrayOutputStream
|
||||
java.io.File
|
||||
java.time.Instant
|
||||
java.time.OffsetDateTime
|
||||
linked.set.LinkedSet))
|
||||
|
||||
;; --- Handlers
|
||||
|
||||
(def ^:private file-write-handler
|
||||
(t/write-handler
|
||||
(constantly "file")
|
||||
(fn [v] (str v))))
|
||||
|
||||
;; --- GEOM
|
||||
|
||||
(def point-write-handler
|
||||
(t/write-handler
|
||||
(constantly "point")
|
||||
(fn [v] (into {} v))))
|
||||
|
||||
(def point-read-handler
|
||||
(t/read-handler gpt/map->Point))
|
||||
|
||||
(def matrix-write-handler
|
||||
(t/write-handler
|
||||
(constantly "matrix")
|
||||
(fn [v] (into {} v))))
|
||||
|
||||
(def matrix-read-handler
|
||||
(t/read-handler gmt/map->Matrix))
|
||||
|
||||
;; --- Ordered Set
|
||||
|
||||
(def ordered-set-write-handler
|
||||
(t/write-handler
|
||||
(constantly "ordered-set")
|
||||
(fn [v] (vec v))))
|
||||
|
||||
(def ordered-set-read-handler
|
||||
(t/read-handler #(into (lk/set) %)))
|
||||
|
||||
|
||||
;; --- TIME
|
||||
|
||||
(def ^:private instant-read-handler
|
||||
(t/read-handler
|
||||
(fn [v] (-> (Long/parseLong v)
|
||||
(Instant/ofEpochMilli)))))
|
||||
|
||||
(def ^:private instant-write-handler
|
||||
(t/write-handler
|
||||
(constantly "m")
|
||||
(fn [v] (str (.toEpochMilli ^Instant v)))))
|
||||
|
||||
(def ^:private offset-datetime-write-handler
|
||||
(t/write-handler
|
||||
(constantly "m")
|
||||
(fn [v] (str (.toEpochMilli (.toInstant ^OffsetDateTime v))))))
|
||||
|
||||
(def +read-handlers+
|
||||
{"matrix" matrix-read-handler
|
||||
"ordered-set" ordered-set-read-handler
|
||||
"point" point-read-handler
|
||||
"m" instant-read-handler
|
||||
"instant" instant-read-handler})
|
||||
|
||||
(def +write-handlers+
|
||||
{File file-write-handler
|
||||
LinkedSet ordered-set-write-handler
|
||||
Matrix matrix-write-handler
|
||||
Point point-write-handler
|
||||
Instant instant-write-handler
|
||||
OffsetDateTime offset-datetime-write-handler})
|
||||
|
||||
;; --- Low-Level Api
|
||||
|
||||
(defn reader
|
||||
([istream]
|
||||
(reader istream nil))
|
||||
([istream {:keys [type] :or {type :json}}]
|
||||
(t/reader istream type {:handlers +read-handlers+})))
|
||||
|
||||
(defn read!
|
||||
"Read value from streamed transit reader."
|
||||
[reader]
|
||||
(t/read reader))
|
||||
|
||||
(defn writer
|
||||
([ostream]
|
||||
(writer ostream nil))
|
||||
([ostream {:keys [type] :or {type :json}}]
|
||||
(t/writer ostream type {:handlers +write-handlers+})))
|
||||
|
||||
(defn write!
|
||||
[writer data]
|
||||
(t/write writer data))
|
||||
|
||||
;; --- High-Level Api
|
||||
|
||||
(declare str->bytes)
|
||||
(declare bytes->str)
|
||||
|
||||
(defn decode-stream
|
||||
([input]
|
||||
(decode-stream input nil))
|
||||
([input opts]
|
||||
(read! (reader input opts))))
|
||||
|
||||
(defn decode
|
||||
([data]
|
||||
(decode data nil))
|
||||
([data opts]
|
||||
(with-open [input (ByteArrayInputStream. ^bytes data)]
|
||||
(decode-stream input opts))))
|
||||
|
||||
(defn encode-stream
|
||||
([data out]
|
||||
(encode-stream data out nil))
|
||||
([data out opts]
|
||||
(let [w (writer out opts)]
|
||||
(write! w data))))
|
||||
|
||||
(defn encode
|
||||
([data]
|
||||
(encode data nil))
|
||||
([data opts]
|
||||
(with-open [out (ByteArrayOutputStream.)]
|
||||
(encode-stream data out opts)
|
||||
(.toByteArray out))))
|
||||
|
||||
(defn decode-str
|
||||
[message]
|
||||
(->> (str->bytes message)
|
||||
(decode)))
|
||||
|
||||
(defn encode-str
|
||||
([message]
|
||||
(->> (encode message)
|
||||
(bytes->str)))
|
||||
([message opts]
|
||||
(->> (encode message opts)
|
||||
(bytes->str))))
|
||||
|
||||
(defn encode-verbose-str
|
||||
[message]
|
||||
(->> (encode message {:type :json-verbose})
|
||||
(bytes->str)))
|
||||
|
||||
;; --- Helpers
|
||||
|
||||
(defn str->bytes
|
||||
"Convert string to byte array."
|
||||
([^String s]
|
||||
(str->bytes s "UTF-8"))
|
||||
([^String s, ^String encoding]
|
||||
(.getBytes s encoding)))
|
||||
|
||||
(defn bytes->str
|
||||
"Convert byte array to String."
|
||||
([^bytes data]
|
||||
(bytes->str data "UTF-8"))
|
||||
([^bytes data, ^String encoding]
|
||||
(String. data encoding)))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,12 +4,12 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns backend-tests.bounce-handling-test
|
||||
(ns app.bounce-handling-test
|
||||
(:require
|
||||
[backend-tests.helpers :as th]
|
||||
[app.db :as db]
|
||||
[app.emails :as emails]
|
||||
[app.http.awsns :as awsns]
|
||||
[app.test-helpers :as th]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[clojure.pprint :refer [pprint]]
|
||||
@@ -101,9 +101,9 @@
|
||||
|
||||
(t/deftest test-parse-bounce-report
|
||||
(let [profile (th/create-profile* 1)
|
||||
props (:app.setup/props th/*system*)
|
||||
cfg {:app.main/props props}
|
||||
report (bounce-report {:token (tokens/generate props
|
||||
sprops (:app.setup/props th/*system*)
|
||||
cfg {:sprops sprops}
|
||||
report (bounce-report {:token (tokens/generate sprops
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)})})
|
||||
result (#'awsns/parse-notification cfg report)]
|
||||
@@ -118,9 +118,9 @@
|
||||
|
||||
(t/deftest test-parse-complaint-report
|
||||
(let [profile (th/create-profile* 1)
|
||||
props (:app.setup/props th/*system*)
|
||||
cfg {:app.main/props props}
|
||||
report (complaint-report {:token (tokens/generate props
|
||||
sprops (:app.setup/props th/*system*)
|
||||
cfg {:sprops sprops}
|
||||
report (complaint-report {:token (tokens/generate sprops
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)})})
|
||||
result (#'awsns/parse-notification cfg report)]
|
||||
@@ -133,8 +133,8 @@
|
||||
))
|
||||
|
||||
(t/deftest test-parse-complaint-report-without-token
|
||||
(let [props (:app.setup/props th/*system*)
|
||||
cfg {:app.main/props props}
|
||||
(let [sprops (:app.setup/props th/*system*)
|
||||
cfg {:sprops sprops}
|
||||
report (complaint-report {:token ""})
|
||||
result (#'awsns/parse-notification cfg report)]
|
||||
(t/is (= "complaint" (:type result)))
|
||||
@@ -146,10 +146,10 @@
|
||||
|
||||
(t/deftest test-process-bounce-report
|
||||
(let [profile (th/create-profile* 1)
|
||||
props (:app.setup/props th/*system*)
|
||||
sprops (:app.setup/props th/*system*)
|
||||
pool (:app.db/pool th/*system*)
|
||||
cfg {:app.main/props props :app.db/pool pool}
|
||||
report (bounce-report {:token (tokens/generate props
|
||||
cfg {:sprops sprops :pool pool}
|
||||
report (bounce-report {:token (tokens/generate sprops
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)})})
|
||||
report (#'awsns/parse-notification cfg report)]
|
||||
@@ -175,11 +175,10 @@
|
||||
|
||||
(t/deftest test-process-complaint-report
|
||||
(let [profile (th/create-profile* 1)
|
||||
props (:app.setup/props th/*system*)
|
||||
sprops (:app.setup/props th/*system*)
|
||||
pool (:app.db/pool th/*system*)
|
||||
cfg {:app.main/props props
|
||||
:app.db/pool pool}
|
||||
report (complaint-report {:token (tokens/generate props
|
||||
cfg {:sprops sprops :pool pool}
|
||||
report (complaint-report {:token (tokens/generate sprops
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)})})
|
||||
report (#'awsns/parse-notification cfg report)]
|
||||
@@ -207,11 +206,11 @@
|
||||
|
||||
(t/deftest test-process-bounce-report-to-self
|
||||
(let [profile (th/create-profile* 1)
|
||||
props (:app.setup/props th/*system*)
|
||||
sprops (:app.setup/props th/*system*)
|
||||
pool (:app.db/pool th/*system*)
|
||||
cfg {:app.main/props props :app.db/pool pool}
|
||||
cfg {:sprops sprops :pool pool}
|
||||
report (bounce-report {:email (:email profile)
|
||||
:token (tokens/generate props
|
||||
:token (tokens/generate sprops
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)})})
|
||||
report (#'awsns/parse-notification cfg report)]
|
||||
@@ -229,11 +228,11 @@
|
||||
|
||||
(t/deftest test-process-complaint-report-to-self
|
||||
(let [profile (th/create-profile* 1)
|
||||
props (:app.setup/props th/*system*)
|
||||
sprops (:app.setup/props th/*system*)
|
||||
pool (:app.db/pool th/*system*)
|
||||
cfg {:app.main/props props :app.db/pool pool}
|
||||
cfg {:sprops sprops :pool pool}
|
||||
report (complaint-report {:email (:email profile)
|
||||
:token (tokens/generate props
|
||||
:token (tokens/generate sprops
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)})})
|
||||
report (#'awsns/parse-notification cfg report)]
|
||||
@@ -4,13 +4,13 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns backend-tests.email-sending-test
|
||||
(ns app.emails-test
|
||||
(:require
|
||||
[backend-tests.helpers :as th]
|
||||
[clojure.test :as t]
|
||||
[promesa.core :as p]
|
||||
[app.db :as db]
|
||||
[app.emails :as emails]
|
||||
[clojure.test :as t]
|
||||
[promesa.core :as p]))
|
||||
[app.test-helpers :as th]))
|
||||
|
||||
(t/use-fixtures :once th/state-init)
|
||||
(t/use-fixtures :each th/database-reset)
|
||||
@@ -4,14 +4,14 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns backend-tests.rpc-file-test
|
||||
(ns app.services-files-test
|
||||
(:require
|
||||
[backend-tests.helpers :as th]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http :as http]
|
||||
[app.storage :as sto]
|
||||
[app.test-helpers :as th]
|
||||
[app.util.time :as dt]
|
||||
[clojure.test :as t]
|
||||
[datoteka.core :as fs]))
|
||||
@@ -124,7 +124,7 @@
|
||||
(t/deftest file-gc-task
|
||||
(letfn [(create-file-media-object [{:keys [profile-id file-id]}]
|
||||
(let [mfile {:filename "sample.jpg"
|
||||
:path (th/tempfile "backend_tests/test_files/sample.jpg")
|
||||
:path (th/tempfile "app/test_files/sample.jpg")
|
||||
:mtype "image/jpeg"
|
||||
:size 312043}
|
||||
params {::th/type :upload-file-media-object
|
||||
@@ -496,8 +496,7 @@
|
||||
(t/is (contains? (:objects result) shape1-id))
|
||||
(t/is (contains? (:objects result) frame2-id))
|
||||
(t/is (contains? (:objects result) shape2-id))
|
||||
(t/is (contains? (:objects result) uuid/zero))
|
||||
)
|
||||
(t/is (contains? (:objects result) uuid/zero)))
|
||||
|
||||
;; Query :page RPC method with page-id
|
||||
(let [data {::th/type :page
|
||||
@@ -524,7 +523,6 @@
|
||||
:components-v2 true}
|
||||
{:keys [error result] :as out} (th/query! data)]
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? error))
|
||||
(t/is (map? result))
|
||||
(t/is (contains? result :objects))
|
||||
(t/is (contains? (:objects result) frame1-id))
|
||||
@@ -544,9 +542,7 @@
|
||||
(t/is (not (th/success? out)))
|
||||
(let [{:keys [type code]} (-> out :error ex-data)]
|
||||
(t/is (= :validation type))
|
||||
(t/is (= :spec-validation code))))
|
||||
|
||||
)
|
||||
(t/is (= :spec-validation code)))))
|
||||
|
||||
(t/testing "RPC :file-data-for-thumbnail"
|
||||
;; Insert a thumbnail data for the frame-id
|
||||
@@ -4,13 +4,13 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns backend-tests.rpc-font-test
|
||||
(ns app.services-fonts-test
|
||||
(:require
|
||||
[backend-tests.helpers :as th]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.storage :as sto]
|
||||
[app.test-helpers :as th]
|
||||
[clojure.test :as t]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io]))
|
||||
@@ -24,7 +24,7 @@
|
||||
proj-id (:default-project-id prof)
|
||||
font-id (uuid/custom 10 1)
|
||||
|
||||
ttfdata (-> (io/resource "backend_tests/test_files/font-1.ttf")
|
||||
ttfdata (-> (io/resource "app/test_files/font-1.ttf")
|
||||
io/input-stream
|
||||
io/read-as-bytes)
|
||||
|
||||
@@ -59,7 +59,7 @@
|
||||
proj-id (:default-project-id prof)
|
||||
font-id (uuid/custom 10 1)
|
||||
|
||||
data (-> (io/resource "backend_tests/test_files/font-1.woff")
|
||||
data (-> (io/resource "app/test_files/font-1.woff")
|
||||
io/input-stream
|
||||
io/read-as-bytes)
|
||||
|
||||
@@ -4,16 +4,16 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns backend-tests.rpc-management-test
|
||||
(ns app.services-management-test
|
||||
(:require
|
||||
[backend-tests.storage-test :refer [configure-storage-backend]]
|
||||
[backend-tests.helpers :as th]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.storage :as sto]
|
||||
[buddy.core.bytes :as b]
|
||||
[app.test-helpers :as th]
|
||||
[app.storage-test :refer [configure-storage-backend]]
|
||||
[clojure.test :as t]
|
||||
[buddy.core.bytes :as b]
|
||||
[datoteka.core :as fs]))
|
||||
|
||||
(t/use-fixtures :once th/state-init)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user