Compare commits

..

5 Commits

Author SHA1 Message Date
Alejandro Alonso
ed5875f29a Merge pull request #7154 from penpot/niwinz-staging-bug-1
🐛 Fix incorrect show request-access dialog on not-found on viewer
2025-08-22 09:19:47 +02:00
Andrey Antukh
ad38a21053 🐛 Fix incorrect show request-access dialog on not-found on viewer
When a user is not-authenticated
2025-08-20 13:35:20 +02:00
Andrey Antukh
adffac4eec Merge remote-tracking branch 'origin/main' into staging 2025-08-20 12:49:31 +02:00
Yamila Moreno
73dfe12ec9 📚 Update k8s documentation 2025-08-20 09:04:25 +02:00
Eva Marco
ff2e845f2c 🐛 Fix double click on set name input (#7096) 2025-08-13 09:23:53 +02:00
444 changed files with 7954 additions and 24872 deletions

View File

@@ -45,16 +45,10 @@
:potok/reify-type
{:level :error}
:missing-protocol-method
{:level :off}
:unresolved-namespace
{:level :warning
:exclude [data_readers]}
:unused-value
{:level :off}
:single-key-in
{:level :warning}
@@ -70,9 +64,6 @@
:redundant-nested-call
{:level :off}
:redundant-str-call
{:level :off}
:earmuffed-var-not-dynamic
{:level :off}

View File

@@ -1,29 +1,5 @@
# CHANGELOG
## 2.10.0 (Unreleased)
### :rocket: Epics and highlights
### :boom: Breaking changes & Deprecations
### :heart: Community contributions (Thank you!)
### :sparkles: New features & Enhancements
- Add defaults for artboard drawing [Taiga #494](https://tree.taiga.io/project/penpot/us/494?milestone=465047)
- Continuous display of distances between elements when moving a layer with the keyboard [Taiga #1780](https://tree.taiga.io/project/penpot/us/1780)
### :bug: Bugs fixed
- Display strokes information in inspect tab [Taiga #11154](https://tree.taiga.io/project/penpot/issue/11154)
- Fix problem with booleans selection [Taiga #11627](https://tree.taiga.io/project/penpot/issue/11627)
- Fix missing font when copy&paste a chunk of text [Taiga #11522](https://tree.taiga.io/project/penpot/issue/11522)
- Fix bad swap slot after two swaps [Taiga #11659](https://tree.taiga.io/project/penpot/issue/11659)
- Fix missing package for the `penpot_exporter` Docker image [GitHub #7205](https://github.com/penpot/penpot/issues/7025)
- Fix issue where multiple dropdown menus could be opened simultaneously on the dashboard page [Taiga #11500](https://tree.taiga.io/project/penpot/issue/11500)
- Fix font size/variant not updated when editing a text [Taiga #11552](https://tree.taiga.io/project/penpot/issue/11552)
- Fix issue where Alt + arrow keys shortcut interferes with letter-spacing when moving text layers [Taiga #11552](https://tree.taiga.io/project/penpot/issue/11771)
- Fix consistency issues on how font variants are visualized [Taiga #11499](https://tree.taiga.io/project/penpot/us/11499)
## 2.9.0 (Unreleased)
### :rocket: Epics and highlights
@@ -52,6 +28,8 @@
- Add the ability to show login dialog on profile settings [Github #6871](https://github.com/penpot/penpot/pull/6871)
- Improve the application of tokens with object specific tokens [Taiga #10209](https://tree.taiga.io/project/penpot/us/10209)
- Add info to apply-token event [Taiga #11710](https://tree.taiga.io/project/penpot/task/11710)
- Fix double click on set name input [Taiga #11747](https://tree.taiga.io/project/penpot/issue/11747)
### :bug: Bugs fixed

View File

@@ -30,7 +30,7 @@
[app.srepl.helpers :as srepl.helpers]
[app.srepl.main :as srepl]
[app.util.blob :as blob]
[app.common.time :as ct]
[app.util.time :as dt]
[clj-async-profiler.core :as prof]
[clojure.contrib.humanize :as hum]
[clojure.java.io :as io]

View File

@@ -12,7 +12,7 @@ export PENPOT_FLAGS="\
enable-login-with-gitlab \
enable-backend-worker \
enable-backend-asserts \
disable-feature-fdata-pointer-map \
enable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
enable-audit-log \
enable-transit-readable-response \
@@ -28,11 +28,11 @@ export PENPOT_FLAGS="\
enable-auto-file-snapshot \
enable-webhooks \
enable-access-tokens \
disable-tiered-file-data-storage \
enable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation \
enable-subscriptions \
disable-subscriptions-old";
enable-subscriptions-old";
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"

View File

@@ -13,7 +13,7 @@ export PENPOT_FLAGS="\
enable-login-with-ldap \
enable-transit-readable-response \
enable-demo-users \
disable-feature-fdata-pointer-map \
enable-feature-fdata-pointer-map \
enable-feature-fdata-objects-map \
disable-secure-session-cookies \
enable-rpc-climit \
@@ -21,11 +21,11 @@ export PENPOT_FLAGS="\
enable-quotes \
enable-file-snapshot \
enable-access-tokens \
disable-tiered-file-data-storage \
enable-tiered-file-data-storage \
enable-file-validation \
enable-file-schema-validation \
enable-subscriptions \
disable-subscriptions-old";
enable-subscriptions-old ";
# Default deletion delay for devenv
export PENPOT_DELETION_DELAY="24h"

View File

@@ -13,7 +13,6 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
@@ -29,6 +28,7 @@
[app.tokens :as tokens]
[app.util.inet :as inet]
[app.util.json :as json]
[app.util.time :as dt]
[buddy.sign.jwk :as jwk]
[buddy.sign.jwt :as jwt]
[clojure.set :as set]
@@ -514,7 +514,7 @@
[cfg info request]
(let [info (assoc info
:iss :prepared-register
:exp (ct/in-future {:hours 48}))
:exp (dt/in-future {:hours 48}))
params {:token (tokens/generate (::setup/props cfg) info)
:provider (:provider (:path-params request))
@@ -571,7 +571,7 @@
token (or (:invitation-token info)
(tokens/generate (::setup/props cfg)
{:iss :auth
:exp (ct/in-future "15m")
:exp (dt/in-future "15m")
:profile-id (:id profile)}))
props (audit/profile->props profile)
context (d/without-nils {:external-session-id (:external-session-id info)})]
@@ -619,7 +619,7 @@
:invitation-token (:invitation-token params)
:external-session-id esid
:props props
:exp (ct/in-future "4h")}
:exp (dt/in-future "4h")}
state (tokens/generate (::setup/props cfg)
(d/without-nils params))
uri (build-auth-uri cfg state)]

View File

@@ -15,32 +15,29 @@
[app.common.files.migrations :as fmg]
[app.common.files.validate :as fval]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.features.fdata :as fdata]
[app.features.file-migrations :as fmigr]
[app.features.fdata :as feat.fdata]
[app.features.file-migrations :as feat.fmigr]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.set :as set]
[cuerdas.core :as str]
[datoteka.fs :as fs]
[datoteka.io :as io]
[promesa.exec :as px]))
[datoteka.io :as io]))
(set! *warn-on-reflection* true)
(def ^:dynamic *state* nil)
(def ^:dynamic *options* nil)
(def ^:dynamic *reference-file* nil)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
@@ -56,12 +53,17 @@
(* 1024 1024 100))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare get-resolved-file-libraries)
(declare update-file!)
(def file-attrs
(sm/keys ctf/schema:file))
#{:id
:name
:migrations
:features
:project-id
:is-shared
:version
:data})
(defn parse-file-format
[template]
@@ -141,157 +143,33 @@
([index coll attr]
(reduce #(index-object %1 %2 attr) index coll)))
(defn- decode-row-features
[{:keys [features] :as row}]
(defn decode-row
[{:keys [data changes features] :as row}]
(when row
(cond-> row
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
features (assoc :features (db/decode-pgarray features #{}))
changes (assoc :changes (blob/decode changes))
data (assoc :data (blob/decode data)))))
(def sql:get-minimal-file
"SELECT f.id,
f.revn,
f.modified_at,
f.deleted_at
FROM file AS f
WHERE f.id = ?")
(defn get-minimal-file
[cfg id & {:as opts}]
(db/get-with-sql cfg [sql:get-minimal-file id] opts))
;; DEPRECATED
(defn decode-file
"A general purpose file decoding function that resolves all external
pointers, run migrations and return plain vanilla file map"
[cfg {:keys [id] :as file} & {:keys [migrate?] :or {migrate? true}}]
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(let [file (->> file
(fmigr/resolve-applied-migrations cfg)
(fdata/resolve-file-data cfg)
(fdata/decode-file-data cfg))
(feat.fmigr/resolve-applied-migrations cfg)
(feat.fdata/resolve-file-data cfg))
libs (delay (get-resolved-file-libraries cfg file))]
(-> file
(update :features db/decode-pgarray #{})
(update :data blob/decode)
(update :data fdata/process-pointers deref)
(update :data fdata/process-objects (partial into {}))
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(update :data assoc :id id)
(cond-> migrate? (fmg/migrate-file libs))))))
(def sql:get-file
"SELECT f.id,
f.project_id,
f.created_at,
f.modified_at,
f.deleted_at,
f.name,
f.is_shared,
f.has_media_trimmed,
f.revn,
f.data AS legacy_data,
f.ignore_sync_until,
f.comment_thread_seqn,
f.features,
f.version,
f.vern,
p.team_id,
coalesce(fd.backend, 'db') AS backend,
fd.metadata AS metadata,
fd.data AS data
FROM file AS f
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
INNER JOIN project AS p ON (p.id = f.project_id)
WHERE f.id = ?")
(defn- migrate-file
[{:keys [::db/conn] :as cfg} {:keys [read-only?]} {:keys [id] :as file}]
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)
pmap/*tracked* (pmap/create-tracked)]
(let [libs (delay (get-resolved-file-libraries cfg file))
;; For avoid unnecesary overhead of creating multiple
;; pointers and handly internally with objects map in their
;; worst case (when probably all shapes and all pointers
;; will be readed in any case), we just realize/resolve them
;; before applying the migration to the file.
file (-> (fdata/realize cfg file)
(fmg/migrate-file libs))]
(if (or read-only? (db/read-only? conn))
file
(do ;; When file is migrated, we break the rule of no
;; perform mutations on get operations and update the
;; file with all migrations applied
(update-file! cfg file)
(fmigr/resolve-applied-migrations cfg file))))))
;; FIXME: filter by project-id
(defn- get-file*
[{:keys [::db/conn] :as cfg} id
{:keys [#_project-id
migrate?
realize?
decode?
skip-locked?
include-deleted?
throw-if-not-exists?
lock-for-update?]
:or {lock-for-update? false
migrate? true
decode? true
include-deleted? false
throw-if-not-exists? true
realize? false}
:as options}]
(assert (db/connection? conn) "expected cfg with valid connection")
(let [sql
(if lock-for-update?
(str sql:get-file " FOR UPDATE of f")
sql:get-file)
sql
(if skip-locked?
(str sql " SKIP LOCKED")
sql)
file
(db/get-with-sql conn [sql id]
{::db/throw-if-not-exists false
::db/remove-deleted (not include-deleted?)})
file
(-> file
(d/update-when :features db/decode-pgarray #{})
(d/update-when :metadata fdata/decode-metadata))]
(if file
(let [file
(->> file
(fmigr/resolve-applied-migrations cfg)
(fdata/resolve-file-data cfg))
will-migrate?
(and migrate? (fmg/need-migration? file))]
(if decode?
(cond->> (fdata/decode-file-data cfg file)
(and realize? (not will-migrate?))
(fdata/realize cfg)
will-migrate?
(migrate-file cfg options))
file))
(when-not (or skip-locked? (not throw-if-not-exists?))
(ex/raise :type :not-found
:code :object-not-found
:hint "database object not found"
:table :file
:file-id id)))))
(defn get-file
"Get file, resolve all features and apply migrations.
@@ -299,7 +177,10 @@
operations on file, because it removes the ovehead of lazy fetching
and decoding."
[cfg file-id & {:as opts}]
(db/run! cfg get-file* file-id opts))
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(some->> (db/get* conn :file {:id file-id}
(assoc opts ::db/remove-deleted false))
(decode-file cfg)))))
(defn clean-file-features
[file]
@@ -323,12 +204,12 @@
(let [conn (db/get-connection cfg)
ids (db/create-array conn "uuid" ids)]
(->> (db/exec! conn [sql:get-teams ids])
(map decode-row-features))))
(map decode-row))))
(defn get-team
[cfg team-id]
(-> (db/get cfg :team {:id team-id})
(decode-row-features)))
(decode-row)))
(defn get-fonts
[cfg team-id]
@@ -540,27 +421,6 @@
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
(defn invalidate-thumbnails
[cfg file-id]
(let [storage (sto/resolve cfg)
sql-1
(str "update file_tagged_object_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
sql-2
(str "update file_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")]
(run! #(sto/touch-object! storage %)
(sequence
(keep :media-id)
(concat
(db/exec! cfg [sql-1 file-id])
(db/exec! cfg [sql-2 file-id]))))))
(defn process-file
[cfg {:keys [id] :as file}]
(let [libs (delay (get-resolved-file-libraries cfg file))]
@@ -585,104 +445,77 @@
(vary-meta dissoc ::fmg/migrated))))
(defn encode-file
[{:keys [::wrk/executor] :as cfg} {:keys [id features] :as file}]
(let [file (if (and (contains? features "fdata/objects-map")
(:data file))
(fdata/enable-objects-map file)
[{:keys [::db/conn] :as cfg} {:keys [id features] :as file}]
(let [file (if (contains? features "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (and (contains? features "fdata/pointer-map")
(:data file))
(binding [pmap/*tracked* (pmap/create-tracked :inherit true)]
(let [file (fdata/enable-pointer-map file)]
(fdata/persist-pointers! cfg id)
file (if (contains? features "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)]
(-> file
(d/update-when :features into-array)
(d/update-when :data (fn [data] (px/invoke! executor #(blob/encode data)))))))
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))))
(defn- file->params
(defn get-params-from-file
[file]
(-> (select-keys file file-attrs)
(assoc :data nil)
(dissoc :team-id)
(dissoc :migrations)))
(let [params {:has-media-trimmed (:has-media-trimmed file)
:ignore-sync-until (:ignore-sync-until file)
:project-id (:project-id file)
:features (:features file)
:name (:name file)
:is-shared (:is-shared file)
:version (:version file)
:data (:data file)
:id (:id file)
:deleted-at (:deleted-at file)
:created-at (:created-at file)
:modified-at (:modified-at file)
:revn (:revn file)
:vern (:vern file)}]
(defn file->file-data-params
[{:keys [id backend] :as file} & {:as opts}]
(let [created-at (or (:created-at file) (ct/now))
modified-at (or (:modified-at file) created-at)
backend (if (and (::overwrite-storage-backend opts) backend)
backend
(cf/get :file-storage-backend))]
(d/without-nils
{:id id
:type "main"
:file-id id
:data (:data file)
:metadata (:metadata file)
:backend backend
:created-at created-at
:modified-at modified-at})))
(-> (d/without-nils params)
(assoc :data-backend nil)
(assoc :data-ref-id nil))))
(defn insert-file!
"Insert a new file into the database table. Expectes a not-encoded file.
Returns nil."
"Insert a new file into the database table"
[{:keys [::db/conn] :as cfg} file & {:as opts}]
(when (:migrations file)
(fmigr/upsert-migrations! conn file))
(let [file (encode-file cfg file)]
(db/insert! conn :file
(file->params file)
(assoc opts ::db/return-keys false))
(->> (file->file-data-params file)
(fdata/update! cfg))
nil))
(feat.fmigr/upsert-migrations! conn file)
(let [params (-> (encode-file cfg file)
(get-params-from-file))]
(db/insert! conn :file params opts)))
(defn update-file!
"Update an existing file on the database. Expects not encoded file."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} & {:as opts}]
"Update an existing file on the database."
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file} & {:as opts}]
(let [file (encode-file cfg file)
params (-> (get-params-from-file file)
(dissoc :id))]
(if (::reset-migrations opts false)
(fmigr/reset-migrations! conn file)
(fmigr/upsert-migrations! conn file))
;; If file was already offloaded, we touch the underlying storage
;; object for properly trigger storage-gc-touched task
(when (feat.fdata/offloaded? file)
(some->> (:data-ref-id file) (sto/touch-object! storage)))
(let [file
(encode-file cfg file)
file-params
(file->params (dissoc file :id))
file-data-params
(file->file-data-params file)]
(db/update! conn :file file-params
{:id id}
{::db/return-keys false})
(fdata/update! cfg file-data-params)
nil))
(feat.fmigr/upsert-migrations! conn file)
(db/update! conn :file params {:id id} opts)))
(defn save-file!
"Applies all the final validations and perist the file, binfile
specific, should not be used outside of binfile domain.
Returns nil"
specific, should not be used outside of binfile domain"
[{:keys [::timestamp] :as cfg} file & {:as opts}]
(assert (ct/inst? timestamp) "expected valid timestamp")
(assert (dt/instant? timestamp) "expected valid timestamp")
(let [file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(cond-> (not (::overwrite cfg))
(assoc :ignore-sync-until (ct/plus timestamp (ct/duration {:seconds 5}))))
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
(update :features
(fn [features]
(-> (::features cfg #{})
@@ -699,9 +532,8 @@
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
(if (::overwrite cfg)
(update-file! cfg file (assoc opts ::reset-migrations true))
(insert-file! cfg file opts))))
(insert-file! cfg file opts)))
(def ^:private sql:get-file-libraries
"WITH RECURSIVE libs AS (
@@ -726,8 +558,7 @@
l.revn,
l.vern,
l.synced_at,
l.is_shared,
l.version
l.is_shared
FROM libs AS l
INNER JOIN project AS p ON (p.id = l.project_id)
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
@@ -739,11 +570,9 @@
;; FIXME: :is-indirect set to false to all rows looks
;; completly useless
(map #(assoc % :is-indirect false))
(map decode-row-features))
(map decode-row))
(db/exec! conn [sql:get-file-libraries file-id])))
;; FIXME: this will use a lot of memory if file uses too many big
;; libraries, we should load required libraries on demand
(defn get-resolved-file-libraries
"A helper for preload file libraries"
[{:keys [::db/conn] :as cfg} file]

View File

@@ -17,7 +17,6 @@
[app.common.fressian :as fres]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -31,6 +30,7 @@
[app.storage.tmp :as tmp]
[app.tasks.file-gc]
[app.util.events :as events]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.java.io :as jio]
[clojure.set :as set]
@@ -346,7 +346,7 @@
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
(mapv #(dissoc % :file-id)))
file (cond-> (bfc/get-file cfg file-id :realize? true)
file (cond-> (bfc/get-file cfg file-id)
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
@@ -434,7 +434,7 @@
(defn read-import!
"Do the importation of the specified resource in penpot custom binary
format."
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (ct/now)} :as options}]
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
(dm/assert!
"expected input stream"
@@ -442,7 +442,7 @@
(dm/assert!
"expected valid instant"
(ct/inst? timestamp))
(dt/instant? timestamp))
(let [version (read-header! input)]
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
@@ -682,7 +682,7 @@
(io/coercible? output))
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
ab (volatile! false)
cs (volatile! nil)]
(try
@@ -720,7 +720,7 @@
(satisfies? jio/IOFactory input))
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
cs (volatile! nil)]
(l/info :hint "import: started" :id (str id))
@@ -742,6 +742,6 @@
(finally
(l/info :hint "import: terminated"
:id (str id)
:elapsed (ct/format-duration (tp))
:elapsed (dt/format-duration (tp))
:error? (some? @cs))))))

View File

@@ -13,7 +13,6 @@
[app.common.data :as d]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -24,6 +23,7 @@
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.events :as events]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.set :as set]
[cuerdas.core :as str]
@@ -153,7 +153,7 @@
(defn- write-file!
[cfg file-id]
(let [file (bfc/get-file cfg file-id :realize? true)
(let [file (bfc/get-file cfg file-id)
thumbs (bfc/get-file-object-thumbnails cfg file-id)
media (bfc/get-file-media cfg file)
rels (bfc/get-files-rels cfg #{file-id})]
@@ -344,7 +344,7 @@
(defn export-team!
[cfg team-id]
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
cfg (create-database cfg)]
(l/inf :hint "start"
@@ -378,15 +378,15 @@
(l/inf :hint "end"
:operation "export"
:id (str id)
:elapsed (ct/format-duration elapsed)))))))
:elapsed (dt/format-duration elapsed)))))))
(defn import-team!
[cfg path]
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
cfg (-> (create-database cfg path)
(assoc ::bfc/timestamp (ct/now)))]
(assoc ::bfc/timestamp (dt/now)))]
(l/inf :hint "start"
:operation "import"
@@ -434,4 +434,4 @@
(l/inf :hint "end"
:operation "import"
:id (str id)
:elapsed (ct/format-duration elapsed)))))))
:elapsed (dt/format-duration elapsed)))))))

View File

@@ -20,7 +20,6 @@
[app.common.media :as cmedia]
[app.common.schema :as sm]
[app.common.thumbnails :as cth]
[app.common.time :as ct]
[app.common.types.color :as ctcl]
[app.common.types.component :as ctc]
[app.common.types.file :as ctf]
@@ -36,6 +35,7 @@
[app.storage :as sto]
[app.storage.impl :as sto.impl]
[app.util.events :as events]
[app.util.time :as dt]
[clojure.java.io :as jio]
[cuerdas.core :as str]
[datoteka.fs :as fs]
@@ -92,7 +92,7 @@
(defn- default-now
[o]
(or o (ct/now)))
(or o (dt/now)))
;; --- ENCODERS
@@ -222,11 +222,9 @@
(throw (IllegalArgumentException.
"the `include-libraries` and `embed-assets` are mutally excluding options")))
(let [detach? (and (not embed-assets) (not include-libraries))]
(let [detach? (and (not embed-assets) (not include-libraries))]
(db/tx-run! cfg (fn [cfg]
(cond-> (bfc/get-file cfg file-id
{:realize? true
:lock-for-update? true})
(cond-> (bfc/get-file cfg file-id {::sql/for-update true})
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
@@ -286,12 +284,10 @@
(assoc :options (:options data))
:always
(dissoc :data))
(dissoc :data)
file (cond-> file
:always
(encode-file))
path (str "files/" file-id ".json")]
(write-entry! output path file))
@@ -548,18 +544,15 @@
(json/read reader)))
(defn- read-file
[{:keys [::bfc/input ::bfc/timestamp]} file-id]
[{:keys [::bfc/input ::file-id]}]
(let [path (str "files/" file-id ".json")
entry (get-zip-entry input path)]
(-> (read-entry input entry)
(decode-file)
(update :revn d/nilv 1)
(update :created-at d/nilv timestamp)
(update :modified-at d/nilv timestamp)
(validate-file))))
(defn- read-file-plugin-data
[{:keys [::bfc/input]} file-id]
[{:keys [::bfc/input ::file-id]}]
(let [path (str "files/" file-id "/plugin-data.json")
entry (get-zip-entry* input path)]
(some->> entry
@@ -568,7 +561,7 @@
(validate-plugin-data))))
(defn- read-file-media
[{:keys [::bfc/input ::entries]} file-id]
[{:keys [::bfc/input ::file-id ::entries]}]
(->> (keep (match-media-entry-fn file-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
@@ -588,7 +581,7 @@
(not-empty)))
(defn- read-file-colors
[{:keys [::bfc/input ::entries]} file-id]
[{:keys [::bfc/input ::file-id ::entries]}]
(->> (keep (match-color-entry-fn file-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
@@ -601,7 +594,7 @@
(not-empty)))
(defn- read-file-components
[{:keys [::bfc/input ::entries]} file-id]
[{:keys [::bfc/input ::file-id ::entries]}]
(let [clean-component-post-decode
(fn [component]
(d/update-when component :objects
@@ -632,7 +625,7 @@
(not-empty))))
(defn- read-file-typographies
[{:keys [::bfc/input ::entries]} file-id]
[{:keys [::bfc/input ::file-id ::entries]}]
(->> (keep (match-typography-entry-fn file-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
@@ -645,14 +638,14 @@
(not-empty)))
(defn- read-file-tokens-lib
[{:keys [::bfc/input ::entries]} file-id]
[{:keys [::bfc/input ::file-id ::entries]}]
(when-let [entry (d/seek (match-tokens-lib-entry-fn file-id) entries)]
(->> (read-plain-entry input entry)
(decode-tokens-lib)
(validate-tokens-lib))))
(defn- read-file-shapes
[{:keys [::bfc/input ::entries] :as cfg} file-id page-id]
[{:keys [::bfc/input ::file-id ::page-id ::entries] :as cfg}]
(->> (keep (match-shape-entry-fn file-id page-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
@@ -666,14 +659,15 @@
(not-empty)))
(defn- read-file-pages
[{:keys [::bfc/input ::entries] :as cfg} file-id]
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
(->> (keep (match-page-entry-fn file-id) entries)
(keep (fn [{:keys [id entry]}]
(let [page (->> (read-entry input entry)
(decode-page))
page (dissoc page :options)]
(when (= id (:id page))
(let [objects (read-file-shapes cfg file-id id)]
(let [objects (-> (assoc cfg ::page-id id)
(read-file-shapes))]
(assoc page :objects objects))))))
(sort-by :index)
(reduce (fn [result {:keys [id] :as page}]
@@ -681,7 +675,7 @@
(d/ordered-map))))
(defn- read-file-thumbnails
[{:keys [::bfc/input ::entries] :as cfg} file-id]
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
(->> (keep (match-thumbnail-entry-fn file-id) entries)
(reduce (fn [result {:keys [page-id frame-id tag entry]}]
(let [object (->> (read-entry input entry)
@@ -696,13 +690,13 @@
(not-empty)))
(defn- read-file-data
[cfg file-id]
(let [colors (read-file-colors cfg file-id)
typographies (read-file-typographies cfg file-id)
tokens-lib (read-file-tokens-lib cfg file-id)
components (read-file-components cfg file-id)
plugin-data (read-file-plugin-data cfg file-id)
pages (read-file-pages cfg file-id)]
[cfg]
(let [colors (read-file-colors cfg)
typographies (read-file-typographies cfg)
tokens-lib (read-file-tokens-lib cfg)
components (read-file-components cfg)
plugin-data (read-file-plugin-data cfg)
pages (read-file-pages cfg)]
{:pages (-> pages keys vec)
:pages-index (into {} pages)
:colors colors
@@ -712,11 +706,11 @@
:plugin-data plugin-data}))
(defn- import-file
[{:keys [::bfc/project-id] :as cfg} {file-id :id file-name :name}]
[{:keys [::bfc/project-id ::file-id ::file-name] :as cfg}]
(let [file-id' (bfc/lookup-index file-id)
file (read-file cfg file-id)
media (read-file-media cfg file-id)
thumbnails (read-file-thumbnails cfg file-id)]
file (read-file cfg)
media (read-file-media cfg)
thumbnails (read-file-thumbnails cfg)]
(l/dbg :hint "processing file"
:id (str file-id')
@@ -746,7 +740,7 @@
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
(vswap! bfc/*state* update :thumbnails into thumbnails))
(let [data (-> (read-file-data cfg file-id)
(let [data (-> (read-file-data cfg)
(d/without-nils)
(assoc :id file-id')
(cond-> (:options file)
@@ -763,7 +757,7 @@
file (ctf/check-file file)]
(bfm/register-pending-migrations! cfg file)
(bfc/save-file! cfg file)
(bfc/save-file! cfg file ::db/return-keys false)
file-id')))
@@ -859,8 +853,7 @@
:file-id (str (:file-id params))
::l/sync? true)
(db/insert! conn :file-media-object params
::db/on-conflict-do-nothing? (::bfc/overwrite cfg)))))
(db/insert! conn :file-media-object params))))
(defn- import-file-thumbnails
[{:keys [::db/conn] :as cfg}]
@@ -880,77 +873,17 @@
:media-id (str media-id)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail params
{::db/on-conflict-do-nothing? true}))))
(defn- import-files*
[{:keys [::manifest] :as cfg}]
(bfc/disable-database-timeouts! cfg)
(vswap! bfc/*state* update :index bfc/update-index (:files manifest) :id)
(let [files (get manifest :files)
result (reduce (fn [result {:keys [id] :as file}]
(let [name' (get file :name)
name' (if (map? name)
(get name id)
name')
file (assoc file :name name')]
(conj result (import-file cfg file))))
[]
files)]
(import-file-relations cfg)
(import-storage-objects cfg)
(import-file-media cfg)
(import-file-thumbnails cfg)
(bfm/apply-pending-migrations! cfg)
result))
(defn- import-file-and-overwrite*
[{:keys [::manifest ::bfc/file-id] :as cfg}]
(when (not= 1 (count (:files manifest)))
(ex/raise :type :validation
:code :invalid-condition
:hint "unable to perform in-place update with binfile containing more than 1 file"
:manifest manifest))
(bfc/disable-database-timeouts! cfg)
(let [ref-file (bfc/get-minimal-file cfg file-id ::db/for-update true)
file (first (get manifest :files))
cfg (assoc cfg ::bfc/overwrite true)]
(vswap! bfc/*state* update :index assoc (:id file) file-id)
(binding [bfc/*options* cfg
bfc/*reference-file* ref-file]
(import-file cfg file)
(import-storage-objects cfg)
(import-file-media cfg)
(bfc/invalidate-thumbnails cfg file-id)
(bfm/apply-pending-migrations! cfg)
[file-id])))
(db/insert! conn :file-tagged-object-thumbnail params))))
(defn- import-files
[{:keys [::bfc/timestamp ::bfc/input] :or {timestamp (ct/now)} :as cfg}]
[{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}]
(assert (instance? ZipFile input) "expected zip file")
(assert (ct/inst? timestamp) "expected valid instant")
(assert (dt/instant? timestamp) "expected valid instant")
(let [manifest (-> (read-manifest input)
(validate-manifest))
entries (read-zip-entries input)
cfg (-> cfg
(assoc ::entries entries)
(assoc ::manifest manifest)
(assoc ::bfc/timestamp timestamp))]
entries (read-zip-entries input)]
(when-not (= "penpot/export-files" (:type manifest))
(ex/raise :type :validation
@@ -958,6 +891,7 @@
:hint "unexpected type on manifest"
:manifest manifest))
;; Check if all files referenced on manifest are present
(doseq [{file-id :id features :features} (:files manifest)]
(let [path (str "files/" file-id ".json")]
@@ -973,10 +907,35 @@
(events/tap :progress {:section :manifest})
(binding [bfc/*state* (volatile! {:media [] :index {}})]
(if (::bfc/file-id cfg)
(db/tx-run! cfg import-file-and-overwrite*)
(db/tx-run! cfg import-files*)))))
(let [index (bfc/update-index (map :id (:files manifest)))
state {:media [] :index index}
cfg (-> cfg
(assoc ::entries entries)
(assoc ::manifest manifest)
(assoc ::bfc/timestamp timestamp))]
(binding [bfc/*state* (volatile! state)]
(db/tx-run! cfg (fn [cfg]
(bfc/disable-database-timeouts! cfg)
(let [ids (->> (:files manifest)
(reduce (fn [result {:keys [id] :as file}]
(let [name' (get file :name)
name' (if (map? name)
(get name id)
name')]
(conj result (-> cfg
(assoc ::file-id id)
(assoc ::file-name name')
(import-file)))))
[]))]
(import-file-relations cfg)
(import-storage-objects cfg)
(import-file-media cfg)
(import-file-thumbnails cfg)
(bfm/apply-pending-migrations! cfg)
ids)))))))
;; --- PUBLIC API
@@ -1002,7 +961,7 @@
"expected instance of jio/IOFactory for `input`")
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
ab (volatile! false)
cs (volatile! nil)]
(try
@@ -1048,7 +1007,7 @@
"expected instance of jio/IOFactory for `input`")
(let [id (uuid/next)
tp (ct/tpoint)
tp (dt/tpoint)
cs (volatile! nil)]
(l/info :hint "import: started" :id (str id))
@@ -1063,7 +1022,7 @@
(finally
(l/info :hint "import: terminated"
:id (str id)
:elapsed (ct/format-duration (tp))
:elapsed (dt/format-duration (tp))
:error? (some? @cs))))))
(defn get-manifest

View File

@@ -12,10 +12,10 @@
[app.common.exceptions :as ex]
[app.common.flags :as flags]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.version :as v]
[app.util.overrides]
[app.util.time :as dt]
[clojure.core :as c]
[clojure.java.io :as io]
[cuerdas.core :as str]
@@ -52,8 +52,6 @@
:redis-uri "redis://redis/0"
:file-storage-backend "db"
:objects-storage-backend "fs"
:objects-storage-fs-directory "assets"
@@ -61,10 +59,10 @@
:smtp-default-reply-to "Penpot <no-reply@example.com>"
:smtp-default-from "Penpot <no-reply@example.com>"
:profile-complaint-max-age (ct/duration {:days 7})
:profile-complaint-max-age (dt/duration {:days 7})
:profile-complaint-threshold 2
:profile-bounce-max-age (ct/duration {:days 7})
:profile-bounce-max-age (dt/duration {:days 7})
:profile-bounce-threshold 10
:telemetry-uri "https://telemetry.penpot.app/"
@@ -104,11 +102,10 @@
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
[:auto-file-snapshot-every {:optional true} ::sm/int]
[:auto-file-snapshot-timeout {:optional true} ::ct/duration]
[:auto-file-snapshot-timeout {:optional true} ::dt/duration]
[:media-max-file-size {:optional true} ::sm/int]
[:deletion-delay {:optional true} ::ct/duration]
[:file-clean-delay {:optional true} ::ct/duration]
[:deletion-delay {:optional true} ::dt/duration] ;; REVIEW
[:telemetry-enabled {:optional true} ::sm/boolean]
[:default-blob-version {:optional true} ::sm/int]
[:allow-demo-users {:optional true} ::sm/boolean]
@@ -151,10 +148,10 @@
[:auth-data-cookie-domain {:optional true} :string]
[:auth-token-cookie-name {:optional true} :string]
[:auth-token-cookie-max-age {:optional true} ::ct/duration]
[:auth-token-cookie-max-age {:optional true} ::dt/duration]
[:registration-domain-whitelist {:optional true} [::sm/set :string]]
[:email-verify-threshold {:optional true} ::ct/duration]
[:email-verify-threshold {:optional true} ::dt/duration]
[:github-client-id {:optional true} :string]
[:github-client-secret {:optional true} :string]
@@ -189,9 +186,9 @@
[:ldap-starttls {:optional true} ::sm/boolean]
[:ldap-user-query {:optional true} :string]
[:profile-bounce-max-age {:optional true} ::ct/duration]
[:profile-bounce-max-age {:optional true} ::dt/duration]
[:profile-bounce-threshold {:optional true} ::sm/int]
[:profile-complaint-max-age {:optional true} ::ct/duration]
[:profile-complaint-max-age {:optional true} ::dt/duration]
[:profile-complaint-threshold {:optional true} ::sm/int]
[:redis-uri {:optional true} ::sm/uri]
@@ -213,8 +210,6 @@
[:prepl-host {:optional true} :string]
[:prepl-port {:optional true} ::sm/int]
[:file-storage-backend :string]
[:media-directory {:optional true} :string] ;; REVIEW
[:media-uri {:optional true} :string]
[:assets-path {:optional true} :string]
@@ -303,12 +298,7 @@
(defn get-deletion-delay
[]
(or (c/get config :deletion-delay)
(ct/duration {:days 7})))
(defn get-file-clean-delay
[]
(or (c/get config :file-clean-delay)
(ct/duration {:days 2})))
(dt/duration {:days 7})))
(defn get
"A configuration getter. Helps code be more testable."

View File

@@ -10,20 +10,19 @@
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.geom.point :as gpt]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.db.sql :as sql]
[app.metrics :as mtx]
[app.util.json :as json]
[app.util.time :as dt]
[clojure.java.io :as io]
[clojure.set :as set]
[integrant.core :as ig]
[next.jdbc :as jdbc]
[next.jdbc.date-time :as jdbc-dt]
[next.jdbc.prepare :as jdbc.prepare]
[next.jdbc.transaction])
(:import
com.zaxxer.hikari.HikariConfig
@@ -34,7 +33,6 @@
java.io.InputStream
java.io.OutputStream
java.sql.Connection
java.sql.PreparedStatement
java.sql.Savepoint
org.postgresql.PGConnection
org.postgresql.geometric.PGpoint
@@ -379,9 +377,9 @@
(defn is-row-deleted?
[{:keys [deleted-at]}]
(and (ct/inst? deleted-at)
(and (dt/instant? deleted-at)
(< (inst-ms deleted-at)
(inst-ms (ct/now)))))
(inst-ms (dt/now)))))
(defn get*
"Retrieve a single row from database that matches a simple filters. Do
@@ -406,24 +404,6 @@
:hint "database object not found"))
row))
(defn get-with-sql
[ds sql & {:as opts}]
(let [rows (cond->> (exec! ds sql opts)
(::remove-deleted opts true)
(remove is-row-deleted?)
:always
(not-empty))]
(when (and (not rows) (::throw-if-not-exists opts true))
(ex/raise :type :not-found
:code :object-not-found
:hint "database object not found"))
(first rows)))
(def ^:private default-plan-opts
(-> default-opts
(assoc :fetch-size 1000)
@@ -605,7 +585,7 @@
(string? o)
(pginterval o)
(ct/duration? o)
(dt/duration? o)
(interval (inst-ms o))
:else
@@ -619,7 +599,7 @@
val (.getValue o)]
(if (or (= typ "json")
(= typ "jsonb"))
(json/decode val :key-fn keyword)
(json/decode val)
val))))
(defn decode-transit-pgobject
@@ -660,7 +640,7 @@
(when data
(doto (org.postgresql.util.PGobject.)
(.setType "jsonb")
(.setValue (json/encode data)))))
(.setValue (json/encode-str data)))))
;; --- Locks
@@ -706,8 +686,3 @@
[cause]
(and (sql-exception? cause)
(= "40001" (.getSQLState ^java.sql.SQLException cause))))
(extend-protocol jdbc.prepare/SettableParameter
clojure.lang.Keyword
(set-parameter [^clojure.lang.Keyword v ^PreparedStatement s ^long i]
(.setObject s i ^String (d/name v))))

View File

@@ -12,18 +12,21 @@
[app.common.files.helpers :as cfh]
[app.common.files.migrations :as fmg]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.path :as path]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.objects-map :as omap]
[app.util.pointer-map :as pmap]
[app.worker :as wrk]
[promesa.exec :as px]))
[app.util.pointer-map :as pmap]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OFFLOAD
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn offloaded?
[file]
(= "objects-storage" (:data-backend file)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OBJECTS-MAP
@@ -60,25 +63,30 @@
objects)))))
fdata))
(defn realize-objects
"Process a file and remove all instances of objects mao realizing them
to a plain data. Used in operation where is more efficient have the
whole file loaded in memory or we going to persist it in an
alterantive storage."
[_cfg file]
(update file :data process-objects (partial into {})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; POINTER-MAP
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn get-file-data
"Get file data given a file instance."
[system file]
(if (offloaded? file)
(let [storage (sto/resolve system ::db/reuse-conn true)]
(->> (sto/get-object storage (:data-ref-id file))
(sto/get-object-bytes storage)))
(:data file)))
(defn resolve-file-data
[system file]
(let [data (get-file-data system file)]
(assoc file :data data)))
(defn load-pointer
"A database loader pointer helper"
[cfg file-id id]
(let [fragment (db/get* cfg :file-data
{:id id :file-id file-id :type "fragment"}
{::sql/columns [:content :backend :id]})]
[system file-id id]
(let [fragment (db/get* system :file-data-fragment
{:id id :file-id file-id}
{::sql/columns [:data :data-backend :data-ref-id :id]})]
(l/trc :hint "load pointer"
:file-id (str file-id)
@@ -92,22 +100,22 @@
:file-id file-id
:fragment-id id))
;; FIXME: conditional thread scheduling for decoding big objects
(blob/decode (:data fragment))))
(let [data (get-file-data system fragment)]
;; FIXME: conditional thread scheduling for decoding big objects
(blob/decode data))))
(defn persist-pointers!
"Persist all currently tracked pointer objects"
[cfg file-id]
(let [conn (db/get-connection cfg)]
[system file-id]
(let [conn (db/get-connection system)]
(doseq [[id item] @pmap/*tracked*]
(when (pmap/modified? item)
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
(let [content (-> item deref blob/encode)]
(db/insert! conn :file-data
(db/insert! conn :file-data-fragment
{:id id
:file-id file-id
:type "fragment"
:content content}))))))
:data content}))))))
(defn process-pointers
"Apply a function to all pointers on the file. Usuly used for
@@ -121,14 +129,6 @@
(d/update-vals update-fn')
(update :pages-index d/update-vals update-fn'))))
(defn realize-pointers
"Process a file and remove all instances of pointers realizing them to
a plain data. Used in operation where is more efficient have the
whole file loaded in memory."
[cfg {:keys [id] :as file}]
(binding [pmap/*load-fn* (partial load-pointer cfg id)]
(update file :data process-pointers deref)))
(defn get-used-pointer-ids
"Given a file, return all pointer ids used in the data."
[fdata]
@@ -192,314 +192,3 @@
(update :features disj "fdata/path-data")
(update :migrations disj "0003-convert-path-content")
(vary-meta update ::fmg/migrated disj "0003-convert-path-content"))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; GENERAL PURPOSE HELPERS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn realize
"A helper that combines realize-pointers and realize-objects"
[cfg file]
(->> file
(realize-pointers cfg)
(realize-objects cfg)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; STORAGE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmulti resolve-file-data
(fn [_cfg file] (or (get file :backend) "db")))
(defmethod resolve-file-data "db"
[_cfg {:keys [legacy-data data] :as file}]
(if (and (some? legacy-data) (not data))
(-> file
(assoc :data legacy-data)
(dissoc :legacy-data))
(dissoc file :legacy-data)))
(defmethod resolve-file-data "storage"
[cfg object]
(let [storage (sto/resolve cfg ::db/reuse-conn true)
ref-id (-> object :metadata :storage-ref-id)
data (->> (sto/get-object storage ref-id)
(sto/get-object-bytes storage))]
(-> object
(assoc :data data)
(dissoc :legacy-data))))
(defn decode-file-data
[{:keys [::wrk/executor]} {:keys [data] :as file}]
(cond-> file
(bytes? data)
(assoc :data (px/invoke! executor #(blob/decode data)))))
(def ^:private sql:insert-file-data
"INSERT INTO file_data (file_id, id, created_at, modified_at,
type, backend, metadata, data)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)")
(def ^:private sql:upsert-file-data
(str sql:insert-file-data
" ON CONFLICT (file_id, id)
DO UPDATE SET modified_at=?,
backend=?,
metadata=?,
data=?;"))
(defn- create-in-database
[cfg {:keys [id file-id created-at modified-at type backend data metadata]}]
(let [metadata (some-> metadata db/json)
created-at (or created-at (ct/now))
modified-at (or modified-at created-at)]
(db/exec-one! cfg [sql:insert-file-data
file-id id
created-at
modified-at
type
backend
metadata
data])))
(defn- upsert-in-database
[cfg {:keys [id file-id created-at modified-at type backend data metadata]}]
(let [metadata (some-> metadata db/json)
created-at (or created-at (ct/now))
modified-at (or modified-at created-at)]
(db/exec-one! cfg [sql:upsert-file-data
file-id id
created-at
modified-at
type
backend
metadata
data
modified-at
backend
metadata
data])))
(defmulti ^:private handle-persistence
(fn [_cfg params] (:backend params)))
(defmethod handle-persistence "db"
[_ params]
(dissoc params :metadata))
(defmethod handle-persistence "storage"
[{:keys [::sto/storage] :as cfg}
{:keys [id file-id data] :as params}]
(let [content (sto/content data)
sobject (sto/put-object! storage
{::sto/content content
::sto/touch true
:bucket "file-data"
:content-type "application/octet-stream"
:file-id file-id
:id id})
metadata {:storage-ref-id (:id sobject)}]
(-> params
(assoc :metadata metadata)
(assoc :data nil))))
(defn- process-metadata
[cfg metadata]
(when-let [storage-id (:storage-ref-id metadata)]
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
(sto/touch-object! storage storage-id))))
(defn- default-backend
[backend]
(or backend (cf/get :file-storage-backend "db")))
(def ^:private schema:metadata
[:map {:title "Metadata"}
[:storage-ref-id {:optional true} ::sm/uuid]])
(def decode-metadata-with-schema
(sm/decoder schema:metadata sm/json-transformer))
(defn decode-metadata
[metadata]
(some-> metadata
(db/decode-json-pgobject)
(decode-metadata-with-schema)))
(def ^:private schema:update-params
[:map {:closed true}
[:id ::sm/uuid]
[:type [:enum "main" "snapshot"]]
[:file-id ::sm/uuid]
[:backend {:optional true} [:enum "db" "storage"]]
[:metadata {:optional true} [:maybe schema:metadata]]
[:data {:optional true} bytes?]
[:created-at {:optional true} ::ct/inst]
[:modified-at {:optional true} ::ct/inst]])
(def ^:private check-update-params
(sm/check-fn schema:update-params :hint "invalid params received for update"))
(defn update!
[cfg params & {:keys [throw-if-not-exists?]}]
(let [params (-> (check-update-params params)
(update :backend default-backend))]
(some->> (:metadata params) (process-metadata cfg))
(let [result (handle-persistence cfg params)
result (if throw-if-not-exists?
(create-in-database cfg result)
(upsert-in-database cfg result))]
(-> result db/get-update-count pos?))))
(defn create!
[cfg params]
(update! cfg params :throw-on-conflict? true))
(def ^:private schema:delete-params
[:map {:closed true}
[:id ::sm/uuid]
[:type [:enum "main" "snapshot"]]
[:file-id ::sm/uuid]])
(def check-delete-params
(sm/check-fn schema:delete-params :hint "invalid params received for delete"))
(defn delete!
[cfg params]
(when-let [fdata (db/get* cfg :file-data
(check-delete-params params))]
(some->> (get fdata :metadata)
(decode-metadata)
(process-metadata cfg))
(-> (db/delete! cfg :file-data params)
(db/get-update-count)
(pos?))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SCRIPTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private sql:get-unmigrated-files
"SELECT f.id, f.data, f.created_at, f.modified_at
FROM file AS f
WHERE f.data IS NOT NULL
ORDER BY f.modified_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn migrate-files-to-storage
"Migrate the current existing files to store data in new storage
tables."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(reduce (fn [total {:keys [id data index created-at modified-at]}]
(l/dbg :hint "migrating file" :file-id (str id))
(db/update! conn :file {:data nil} {:id id} ::db/return-keys false)
(db/insert! conn :file-data
{:backend "db"
:metadata nil
:type "main"
:data data
:created-at created-at
:modified-at modified-at
:file-id id
:id id}
{::db/return-keys false})
(inc total))
0
(db/plan conn [sql:get-unmigrated-files chunk-size]
{:fetch-size 1})))))
(def ^:private sql:get-migrated-files
"SELECT f.id, f.data
FROM file_data AS f
WHERE f.data IS NOT NULL
AND f.id = f.file_id
ORDER BY f.id ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn rollback-files-from-storage
"Migrate back to the file table storage."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(reduce (fn [total {:keys [id data]}]
(l/dbg :hint "rollback file" :file-id (str id))
(db/update! conn :file {:data data} {:id id} ::db/return-keys false)
(db/delete! conn :file-data {:id id} ::db/return-keys false)
(inc total))
0
(db/plan conn [sql:get-migrated-files chunk-size]
{:fetch-size 1})))))
(def ^:private sql:get-unmigrated-snapshots
"SELECT fc.id, fc.data, fc.file_id, fc.created_at, fc.updated_at AS modified_at
FROM file_change AS fc
WHERE fc.data IS NOT NULL
AND f.label IS NOT NULL
ORDER BY f.id ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn migrate-snapshots-to-storage
"Migrate the current existing files to store data in new storage
tables."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(reduce (fn [total {:keys [id file-id data created-at modified-at]}]
(l/dbg :hint "migrating snapshot" :file-id (str file-id) :id (str id))
(db/update! conn :file-change {:data nil} {:id id :file-id file-id} ::db/return-keys false)
(db/insert! conn :file-data
{:backend "db"
:metadata nil
:type "snapshot"
:data data
:created-at created-at
:modified-at modified-at
:file-id file-id
:id id}
{::db/return-keys false})
(inc total))
0
(db/plan conn [sql:get-unmigrated-snapshots chunk-size]
{:fetch-size 1})))))
(def ^:private sql:get-migrated-snapshots
"SELECT f.id, f.data, f.file_id
FROM file_data AS f
WHERE f.data IS NOT NULL
AND f.type = 'snapshot'
AND f.id != f.file_id
ORDER BY f.id ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn rollback-snapshots-from-storage
"Migrate back to the file table storage."
[system & {:keys [chunk-size] :or {chunk-size 100}}]
(db/tx-run! system
(fn [{:keys [::db/conn]}]
(db/exec! conn ["SET statement_timeout = 0"])
(db/exec! conn ["SET idle_in_transaction_session_timeout = 0"])
(reduce (fn [total {:keys [id file-id data]}]
(l/dbg :hint "rollback snapshot" :file-id (str id) :id (str id))
(db/update! conn :file-change {:data data} {:id id :file-id file-id} ::db/return-keys false)
(db/delete! conn :file-data {:id id :file-id file-id} ::db/return-keys false)
(inc total))
0
(db/plan conn [sql:get-migrated-snapshots chunk-size]
{:fetch-size 1})))))

View File

@@ -8,7 +8,6 @@
"Backend specific code for file migrations. Implemented as permanent feature of files."
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.files.migrations :as fmg :refer [xf:map-name]]
[app.db :as db]
[app.db.sql :as-alias sql]))
@@ -27,19 +26,12 @@
(defn upsert-migrations!
"Persist or update file migrations. Return the updated/inserted number
of rows"
[cfg {:keys [id] :as file}]
(let [conn (db/get-connection cfg)
migrations (or (-> file meta ::fmg/migrated)
(-> file :migrations))
[conn {:keys [id] :as file}]
(let [migrations (or (-> file meta ::fmg/migrated)
(-> file :migrations not-empty)
fmg/available-migrations)
columns [:file-id :name]
rows (->> migrations
(mapv (fn [name] [id name]))
(not-empty))]
(when-not rows
(ex/raise :type :internal
:code :missing-migrations
:hint "no migrations available on file"))
rows (mapv (fn [name] [id name]) migrations)]
(-> (db/insert-many! conn :file-migration columns rows
{::db/return-keys false
@@ -48,6 +40,6 @@
(defn reset-migrations!
"Replace file migrations"
[cfg {:keys [id] :as file}]
(db/delete! cfg :file-migration {:file-id id})
(upsert-migrations! cfg file))
[conn {:keys [id] :as file}]
(db/delete! conn :file-migration {:file-id id})
(upsert-migrations! conn file))

View File

@@ -1,373 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.features.file-snapshots
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.features :as-alias cfeat]
[app.common.files.migrations :as fmg]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.fdata :as fdata]
[app.storage :as sto]
[app.util.blob :as blob]
[app.worker :as wrk]
[cuerdas.core :as str]
[promesa.exec :as px]))
(def sql:snapshots
"SELECT c.id,
c.label,
c.created_at,
c.updated_at AS modified_at,
c.deleted_at,
c.profile_id,
c.created_by,
c.locked_by,
c.revn,
c.features,
c.migrations,
c.version,
c.file_id,
c.data AS legacy_data,
fd.data AS data,
coalesce(fd.backend, 'db') AS backend,
fd.metadata AS metadata
FROM file_change AS c
LEFT JOIN file_data AS fd ON (fd.file_id = c.file_id
AND fd.id = c.id
AND fd.type = 'snapshot')
WHERE c.label IS NOT NULL")
(def ^:private sql:get-snapshot
(str sql:snapshots " AND c.file_id = ? AND c.id = ?"))
(def ^:private sql:get-snapshots
(str sql:snapshots " AND c.file_id = ?"))
(def ^:private sql:get-snapshot-without-data
(str "WITH snapshots AS (" sql:snapshots ")"
"SELECT c.id,
c.label,
c.revn,
c.created_at,
c.modified_at,
c.deleted_at,
c.profile_id,
c.created_by,
c.features,
c.metadata,
c.migrations,
c.version,
c.file_id
FROM snapshots AS c
WHERE c.id = ?"))
(defn- decode-snapshot
[snapshot]
(some-> snapshot (-> (d/update-when :metadata fdata/decode-metadata)
(d/update-when :migrations db/decode-pgarray [])
(d/update-when :features db/decode-pgarray #{}))))
(def sql:get-minimal-file
"SELECT f.id,
f.revn,
f.modified_at,
f.deleted_at,
fd.backend AS backend,
fd.metadata AS metadata
FROM file AS f
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
WHERE f.id = ?")
(defn get-minimal-file
[cfg id & {:as opts}]
(-> (db/get-with-sql cfg [sql:get-minimal-file id] opts)
(d/update-when :metadata fdata/decode-metadata)))
(defn get-minimal-snapshot
[cfg snapshot-id]
(-> (db/get-with-sql cfg [sql:get-snapshot-without-data snapshot-id])
(decode-snapshot)))
(defn get-snapshot
"Get snapshot with decoded data"
[cfg file-id snapshot-id]
(->> (db/get-with-sql cfg [sql:get-snapshot file-id snapshot-id])
(decode-snapshot)
(fdata/resolve-file-data cfg)
(fdata/decode-file-data cfg)))
(def ^:private sql:get-visible-snapshots
(str "WITH "
"snapshots1 AS ( " sql:snapshots "),"
"snapshots2 AS (
SELECT c.id,
c.label,
c.version,
c.created_at,
c.modified_at,
c.created_by,
c.locked_by,
c.profile_id
FROM snapshots1 AS c
WHERE c.file_id = ?
AND (c.deleted_at IS NULL OR deleted_at > now())
), snapshots3 AS (
(SELECT * FROM snapshots2 WHERE created_by = 'system' LIMIT 1000)
UNION ALL
(SELECT * FROM snapshots2 WHERE created_by != 'system' LIMIT 1000)
)
SELECT * FROM snapshots3
ORDER BY created_at DESC;"))
(defn get-visible-snapshots
"Return a list of snapshots fecheable from the API, it has a limited
set of fields and applies big but safe limits over all available
snapshots. It return a ordered vector by the snapshot date of
creation."
[cfg file-id]
(->> (db/exec! cfg [sql:get-visible-snapshots file-id])
(mapv decode-snapshot)))
(def ^:private schema:decoded-file
[:map {:title "DecodedFile"}
[:id ::sm/uuid]
[:revn :int]
[:vern :int]
[:data :map]
[:version :int]
[:features ::cfeat/features]
[:migrations [::sm/set :string]]])
(def ^:private schema:snapshot
[:map {:title "Snapshot"}
[:id ::sm/uuid]
[:revn [::sm/int {:min 0}]]
[:version [::sm/int {:min 0}]]
[:features ::cfeat/features]
[:migrations [::sm/set ::sm/text]]
[:profile-id {:optional true} ::sm/uuid]
[:label ::sm/text]
[:file-id ::sm/uuid]
[:created-by [:enum "system" "user" "admin"]]
[:deleted-at {:optional true} ::ct/inst]
[:modified-at ::ct/inst]
[:created-at ::ct/inst]])
(def ^:private schema:snapshot-params
[:map {:title "SnapshotParams"}
[:id ::sm/uuid]
[:file-id ::sm/uuid]
[:label ::sm/text]
[:modified-at {:optional true} ::ct/inst]])
(def ^:private check-snapshot
(sm/check-fn schema:snapshot))
(def ^:private check-snapshot-params
(sm/check-fn schema:snapshot-params))
(def ^:private check-decoded-file
(sm/check-fn schema:decoded-file))
(defn- generate-snapshot-label
[]
(let [ts (-> (ct/now)
(ct/format-inst)
(str/replace #"[T:\.]" "-")
(str/rtrim "Z"))]
(str "snapshot-" ts)))
(defn create!
"Create a file snapshot; expects a non-encoded file."
[cfg file & {:keys [label created-by deleted-at profile-id session-id]
:or {deleted-at :default
created-by "system"}}]
(let [file (check-decoded-file file)
snapshot-id (uuid/next)
created-at (ct/now)
deleted-at (cond
(= deleted-at :default)
(ct/plus (ct/now) (cf/get-deletion-delay))
(ct/inst? deleted-at)
deleted-at
:else
nil)
label (or label (generate-snapshot-label))
data (px/invoke! (::wrk/executor cfg) #(blob/encode (:data file)))
features (:features file)
migrations (:migrations file)
snapshot {:id snapshot-id
:revn (:revn file)
:version (:version file)
:file-id (:id file)
:features features
:migrations migrations
:label label
:created-at created-at
:modified-at created-at
:created-by created-by}
snapshot (cond-> snapshot
deleted-at
(assoc :deleted-at deleted-at)
:always
(check-snapshot))]
(db/insert! cfg :file-change
(-> snapshot
(update :features into-array)
(update :migrations into-array)
(assoc :updated-at created-at)
(assoc :profile-id profile-id)
(assoc :session-id session-id)
(dissoc :modified-at))
{::db/return-keys false})
(fdata/create! cfg
{:id snapshot-id
:file-id (:id file)
:type "snapshot"
:data data
:created-at created-at
:modified-at created-at})
snapshot))
(defn update!
[cfg params]
(let [{:keys [id file-id label modified-at]}
(check-snapshot-params params)
modified-at
(or modified-at (ct/now))]
(-> (db/update! cfg :file-change
{:label label
:created-by "user"
:updated-at modified-at
:deleted-at nil}
{:file-id file-id
:id id}
{::db/return-keys false})
(db/get-update-count)
(pos?))))
(defn restore!
[{:keys [::db/conn] :as cfg} file-id snapshot-id]
(let [file (get-minimal-file conn file-id {::db/for-update true})
vern (rand-int Integer/MAX_VALUE)
storage
(sto/resolve cfg {::db/reuse-conn true})
snapshot
(get-snapshot cfg file-id snapshot-id)]
(when-not snapshot
(ex/raise :type :not-found
:code :snapshot-not-found
:hint "unable to find snapshot with the provided label"
:snapshot-id snapshot-id
:file-id file-id))
(when-not (:data snapshot)
(ex/raise :type :internal
:code :snapshot-without-data
:hint "snapshot has no data"
:label (:label snapshot)
:file-id file-id))
(let [;; If the snapshot has applied migrations stored, we reuse
;; them, if not, we take a safest set of migrations as
;; starting point. This is because, at the time of
;; implementing snapshots, migrations were not taken into
;; account so we need to make this backward compatible in
;; some way.
migrations
(or (:migrations snapshot)
(fmg/generate-migrations-from-version 67))
file
(-> file
(update :revn inc)
(assoc :migrations migrations)
(assoc :data (:data snapshot))
(assoc :vern vern)
(assoc :version (:version snapshot))
(assoc :has-media-trimmed false)
(assoc :modified-at (:modified-at snapshot))
(assoc :features (:features snapshot)))]
(l/dbg :hint "restoring snapshot"
:file-id (str file-id)
:label (:label snapshot)
:snapshot-id (str (:id snapshot)))
;; In the same way, on reseting the file data, we need to restore
;; the applied migrations on the moment of taking the snapshot
(bfc/update-file! cfg file ::bfc/reset-migrations true)
;; FIXME: this should be separated functions, we should not have
;; inline sql here.
;; clean object thumbnails
(let [sql (str "update file_tagged_object_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
;; clean file thumbnails
(let [sql (str "update file_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
vern)))
(defn delete!
[cfg {:keys [id file-id]}]
(let [deleted-at (ct/now)]
(db/update! cfg :file-change
{:deleted-at deleted-at}
{:id id :file-id file-id}
{::db/return-keys false})
true))
(defn reduce-snapshots
"Process the file snapshots using efficient reduction."
[cfg file-id xform f init]
(let [conn (db/get-connection cfg)
xform (comp
(map (partial fdata/resolve-file-data cfg))
(map (partial fdata/decode-file-data cfg))
xform)]
(->> (db/plan conn [sql:get-snapshots file-id] {:fetch-size 1})
(transduce xform f init))))

View File

@@ -7,8 +7,8 @@
(ns app.features.logical-deletion
"A code related to handle logical deletion mechanism"
(:require
[app.common.time :as ct]
[app.config :as cf]))
[app.config :as cf]
[app.util.time :as dt]))
(def ^:private canceled-status
#{"canceled" "unpaid"})
@@ -20,10 +20,10 @@
(if-let [{:keys [type status]} (get team :subscription)]
(cond
(and (= "unlimited" type) (not (contains? canceled-status status)))
(ct/duration {:days 30})
(dt/duration {:days 30})
(and (= "enterprise" type) (not (contains? canceled-status status)))
(ct/duration {:days 90})
(dt/duration {:days 90})
:else
(cf/get-deletion-delay))

View File

@@ -9,18 +9,18 @@
(:require
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.time :as ct]
[app.common.uri :as u]
[app.db :as db]
[app.storage :as sto]
[app.util.time :as dt]
[integrant.core :as ig]
[yetti.response :as-alias yres]))
(def ^:private cache-max-age
(ct/duration {:hours 24}))
(dt/duration {:hours 24}))
(def ^:private signature-max-age
(ct/duration {:hours 24 :minutes 15}))
(dt/duration {:hours 24 :minutes 15}))
(defn get-id
[{:keys [path-params]}]

View File

@@ -15,7 +15,6 @@
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -32,6 +31,7 @@
[app.storage.tmp :as tmp]
[app.util.blob :as blob]
[app.util.template :as tmpl]
[app.util.time :as dt]
[cuerdas.core :as str]
[datoteka.io :as io]
[emoji.core :as emj]
@@ -137,7 +137,7 @@
file (some-> params :file :path io/read* t/decode)]
(if (and file project-id)
(let [fname (str "Imported: " (:name file) "(" (ct/now) ")")
(let [fname (str "Imported: " (:name file) "(" (dt/now) ")")
reuse-id? (contains? params :reuseid)
file-id (or (and reuse-id? (ex/ignoring (-> params :file :filename parse-uuid)))
(uuid/next))]
@@ -222,7 +222,7 @@
(-> (io/resource "app/templates/error-report.v3.tmpl")
(tmpl/render (-> content
(assoc :id id)
(assoc :created-at (ct/format-inst created-at :rfc1123))))))]
(assoc :created-at (dt/format-instant created-at :rfc1123))))))]
(if-let [report (get-report request)]
(let [result (case (:version report)
@@ -246,7 +246,7 @@
(defn error-list-handler
[{:keys [::db/pool]} _request]
(let [items (->> (db/exec! pool [sql:error-reports])
(map #(update % :created-at ct/format-inst :rfc1123)))]
(map #(update % :created-at dt/format-instant :rfc1123)))]
{::yres/status 200
::yres/body (-> (io/resource "app/templates/error-list.tmpl")
(tmpl/render {:items items}))

View File

@@ -10,7 +10,6 @@
[app.common.data :as d]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.config :as cf]
[app.db :as db]
@@ -19,6 +18,7 @@
[app.main :as-alias main]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.util.time :as dt]
[cuerdas.core :as str]
[integrant.core :as ig]
[yetti.request :as yreq]))
@@ -35,10 +35,10 @@
(def default-auth-data-cookie-name "auth-data")
;; Default value for cookie max-age
(def default-cookie-max-age (ct/duration {:days 7}))
(def default-cookie-max-age (dt/duration {:days 7}))
;; Default age for automatic session renewal
(def default-renewal-max-age (ct/duration {:hours 6}))
(def default-renewal-max-age (dt/duration {:hours 6}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; PROTOCOLS
@@ -66,7 +66,7 @@
[:map {:title "session-params"}
[:user-agent ::sm/text]
[:profile-id ::sm/uuid]
[:created-at ::ct/inst]])
[:created-at ::sm/inst]])
(def ^:private valid-params?
(sm/validator schema:params))
@@ -95,7 +95,7 @@
params))
(update! [_ params]
(let [updated-at (ct/now)]
(let [updated-at (dt/now)]
(db/update! pool :http-session
{:updated-at updated-at}
{:id (:id params)})
@@ -118,7 +118,7 @@
params))
(update! [_ params]
(let [updated-at (ct/now)]
(let [updated-at (dt/now)]
(swap! cache update (:id params) assoc :updated-at updated-at)
(assoc params :updated-at updated-at)))
@@ -158,7 +158,7 @@
(let [uagent (yreq/get-header request "user-agent")
params {:profile-id profile-id
:user-agent uagent
:created-at (ct/now)}
:created-at (dt/now)}
token (gen-token props params)
session (write! manager token params)]
(l/trace :hint "create" :profile-id (str profile-id))
@@ -203,8 +203,8 @@
(defn- renew-session?
[{:keys [updated-at] :as session}]
(and (ct/inst? updated-at)
(let [elapsed (ct/diff updated-at (ct/now))]
(and (dt/instant? updated-at)
(let [elapsed (dt/diff updated-at (dt/now))]
(neg? (compare default-renewal-max-age elapsed)))))
(defn- wrap-soft-auth
@@ -256,14 +256,14 @@
(defn- assign-auth-token-cookie
[response {token :id updated-at :updated-at}]
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
created-at (or updated-at (ct/now))
renewal (ct/plus created-at default-renewal-max-age)
expires (ct/plus created-at max-age)
created-at (or updated-at (dt/now))
renewal (dt/plus created-at default-renewal-max-age)
expires (dt/plus created-at max-age)
secure? (contains? cf/flags :secure-session-cookies)
strict? (contains? cf/flags :strict-session-cookies)
cors? (contains? cf/flags :cors)
name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
comment (str "Renewal at: " (ct/format-inst renewal :rfc1123))
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
cookie {:path "/"
:http-only true
:expires expires
@@ -279,11 +279,11 @@
domain (cf/get :auth-data-cookie-domain)
cname default-auth-data-cookie-name
created-at (or updated-at (ct/now))
renewal (ct/plus created-at default-renewal-max-age)
expires (ct/plus created-at max-age)
created-at (or updated-at (dt/now))
renewal (dt/plus created-at default-renewal-max-age)
expires (dt/plus created-at max-age)
comment (str "Renewal at: " (ct/format-inst renewal :rfc1123))
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
secure? (contains? cf/flags :secure-session-cookies)
strict? (contains? cf/flags :strict-session-cookies)
cors? (contains? cf/flags :cors)
@@ -323,7 +323,7 @@
(defmethod ig/assert-key ::tasks/gc
[_ params]
(assert (db/pool? (::db/pool params)) "expected valid database pool")
(assert (ct/duration? (::tasks/max-age params))))
(assert (dt/duration? (::tasks/max-age params))))
(defmethod ig/expand-key ::tasks/gc
[k v]

View File

@@ -11,12 +11,12 @@
[app.common.logging :as l]
[app.common.pprint :as pp]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.http.session :as session]
[app.metrics :as mtx]
[app.msgbus :as mbus]
[app.util.time :as dt]
[app.util.websocket :as ws]
[integrant.core :as ig]
[promesa.exec.csp :as sp]
@@ -239,7 +239,7 @@
(defn- on-connect
[{:keys [::mtx/metrics]} {:keys [::ws/id] :as wsp}]
(let [created-at (ct/now)]
(let [created-at (dt/now)]
(l/trace :fn "on-connect" :conn-id id)
(swap! state assoc id wsp)
(mtx/run! metrics
@@ -253,7 +253,7 @@
(mtx/run! metrics :id :websocket-active-connections :dec 1)
(mtx/run! metrics
:id :websocket-session-timing
:val (/ (inst-ms (ct/diff created-at (ct/now))) 1000.0))))))
:val (/ (inst-ms (dt/diff created-at (dt/now))) 1000.0))))))
(defn- on-rcv-message
[{:keys [::mtx/metrics ::profile-id ::session-id]} message]

View File

@@ -11,7 +11,6 @@
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -24,6 +23,7 @@
[app.setup :as-alias setup]
[app.util.inet :as inet]
[app.util.services :as-alias sv]
[app.util.time :as dt]
[app.worker :as wrk]
[cuerdas.core :as str]))
@@ -108,9 +108,9 @@
[::ip-addr {:optional true} ::sm/text]
[::props {:optional true} [:map-of :keyword :any]]
[::context {:optional true} [:map-of :keyword :any]]
[::tracked-at {:optional true} ::ct/inst]
[::tracked-at {:optional true} ::sm/inst]
[::webhooks/event? {:optional true} ::sm/boolean]
[::webhooks/batch-timeout {:optional true} ::ct/duration]
[::webhooks/batch-timeout {:optional true} ::dt/duration]
[::webhooks/batch-key {:optional true}
[:or ::sm/fn ::sm/text :keyword]]])
@@ -199,7 +199,7 @@
(defn- handle-event!
[cfg event]
(let [params (event->params event)
tnow (ct/now)]
tnow (dt/now)]
(when (contains? cf/flags :audit-log)
;; NOTE: this operation may cause primary key conflicts on inserts
@@ -273,7 +273,7 @@
(let [event (-> (d/without-nils event)
(check-event))]
(db/run! cfg (fn [cfg]
(let [tnow (ct/now)
(let [tnow (dt/now)
params (-> (event->params event)
(assoc :created-at tnow)
(update :tracked-at #(or % tnow)))]

View File

@@ -9,7 +9,6 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -17,6 +16,7 @@
[app.http.client :as http]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.util.time :as dt]
[integrant.core :as ig]
[lambdaisland.uri :as u]
[promesa.exec :as px]))
@@ -55,7 +55,7 @@
[{:keys [::uri] :as cfg} events]
(let [token (tokens/generate (::setup/props cfg)
{:iss "authentication"
:iat (ct/now)
:iat (dt/now)
:uid uuid/zero})
body (t/encode {:events events})
headers {"content-type" "application/transit+json"

View File

@@ -10,13 +10,13 @@
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uri :as uri]
[app.config :as cf]
[app.db :as db]
[app.http.client :as http]
[app.loggers.audit :as audit]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.data.json :as json]
[cuerdas.core :as str]
@@ -124,7 +124,7 @@
{:id (:id whook)})))
(db/update! pool :webhook
{:updated-at (ct/now)
{:updated-at (dt/now)
:error-code nil
:error-count 0}
{:id (:id whook)})))
@@ -132,7 +132,7 @@
(report-delivery! [whook req rsp err]
(db/insert! pool :webhook-delivery
{:webhook-id (:id whook)
:created-at (ct/now)
:created-at (dt/now)
:error-code err
:req-data (db/tjson req)
:rsp-data (db/tjson rsp)}))]
@@ -155,7 +155,7 @@
(let [req {:uri (:uri whook)
:headers {"content-type" (:mtype whook)
"user-agent" (str/ffmt "penpot/%" (:main cf/version))}
:timeout (ct/duration "4s")
:timeout (dt/duration "4s")
:method :post
:body body}]
(try

View File

@@ -11,7 +11,6 @@
[app.auth.oidc.providers :as-alias oidc.providers]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as-alias db]
[app.email :as-alias email]
@@ -39,7 +38,7 @@
[app.storage.gc-touched :as-alias sto.gc-touched]
[app.storage.s3 :as-alias sto.s3]
[app.svgo :as-alias svgo]
[app.util.cron]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.test :as test]
[clojure.tools.namespace.repl :as repl]
@@ -300,8 +299,8 @@
:app.http.assets/routes
{::http.assets/path (cf/get :assets-path)
::http.assets/cache-max-age (ct/duration {:hours 24})
::http.assets/cache-max-agesignature-max-age (ct/duration {:hours 24 :minutes 5})
::http.assets/cache-max-age (dt/duration {:hours 24})
::http.assets/cache-max-agesignature-max-age (dt/duration {:hours 24 :minutes 5})
::sto/storage (ig/ref ::sto/storage)}
::rpc/climit
@@ -482,33 +481,33 @@
{::wrk/registry (ig/ref ::wrk/registry)
::db/pool (ig/ref ::db/pool)
::wrk/entries
[{:cron #penpot/cron "0 0 0 * * ?" ;; daily
[{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :session-gc}
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :objects-gc}
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :storage-gc-deleted}
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :storage-gc-touched}
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :tasks-gc}
{:cron #penpot/cron "0 0 2 * * ?" ;; daily
{:cron #app/cron "0 0 2 * * ?" ;; daily
:task :file-gc-scheduler}
{:cron #penpot/cron "0 30 */3,23 * * ?"
{:cron #app/cron "0 30 */3,23 * * ?"
:task :telemetry}
(when (contains? cf/flags :audit-log-archive)
{:cron #penpot/cron "0 */5 * * * ?" ;; every 5m
{:cron #app/cron "0 */5 * * * ?" ;; every 5m
:task :audit-log-archive})
(when (contains? cf/flags :audit-log-gc)
{:cron #penpot/cron "30 */5 * * * ?" ;; every 5m
{:cron #app/cron "30 */5 * * * ?" ;; every 5m
:task :audit-log-gc})]}
::wrk/dispatcher

View File

@@ -14,11 +14,11 @@
[app.common.media :as cm]
[app.common.schema :as sm]
[app.common.schema.openapi :as-alias oapi]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as-alias db]
[app.storage :as-alias sto]
[app.storage.tmp :as tmp]
[app.util.time :as dt]
[buddy.core.bytes :as bb]
[buddy.core.codecs :as bc]
[clojure.java.shell :as sh]
@@ -243,7 +243,7 @@
(ex/raise :type :validation
:code :invalid-svg-file
:hint "uploaded svg does not provides dimensions"))
(merge input info {:ts (ct/now)}))
(merge input info {:ts (dt/now)}))
(let [instance (Info. (str path))
mtype' (.getProperty instance "Mime type")]
@@ -263,7 +263,7 @@
(assoc input
:width width
:height height
:ts (ct/now)))))))
:ts (dt/now)))))))
(defmethod process-error org.im4java.core.InfoException
[error]

View File

@@ -441,13 +441,7 @@
:fn (mg/resource "app/migrations/sql/0139-mod-file-change-table.sql")}
{:name "0140-mod-file-change-table.sql"
:fn (mg/resource "app/migrations/sql/0140-mod-file-change-table.sql")}
{:name "0140-add-locked-by-column-to-file-change-table"
:fn (mg/resource "app/migrations/sql/0140-add-locked-by-column-to-file-change-table.sql")}
{:name "0141-add-file-data-table.sql"
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}])
:fn (mg/resource "app/migrations/sql/0140-mod-file-change-table.sql")}])
(defn apply-migrations!
[pool name migrations]

View File

@@ -1,11 +0,0 @@
-- Add locked_by column to file_change table for version locking feature
-- This allows users to lock their own saved versions to prevent deletion by others
ALTER TABLE file_change
ADD COLUMN locked_by uuid NULL REFERENCES profile(id) ON DELETE SET NULL DEFERRABLE;
-- Create index for locked versions queries
CREATE INDEX file_change__locked_by__idx ON file_change (locked_by) WHERE locked_by IS NOT NULL;
-- Add comment for documentation
COMMENT ON COLUMN file_change.locked_by IS 'Profile ID of user who has locked this version. Only the creator can lock/unlock their own versions. Locked versions cannot be deleted by others.';

View File

@@ -1,33 +0,0 @@
CREATE TABLE file_data (
file_id uuid NOT NULL REFERENCES file(id) DEFERRABLE,
id uuid NOT NULL,
created_at timestamptz NOT NULL DEFAULT now(),
modified_at timestamptz NOT NULL DEFAULT now(),
type text NULL,
backend text NULL,
metadata jsonb NULL,
data bytea NULL,
PRIMARY KEY (file_id, id)
) PARTITION BY HASH (file_id, id);
CREATE TABLE file_data_00 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 0);
CREATE TABLE file_data_01 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 1);
CREATE TABLE file_data_02 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 2);
CREATE TABLE file_data_03 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 3);
CREATE TABLE file_data_04 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 4);
CREATE TABLE file_data_05 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 5);
CREATE TABLE file_data_06 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 6);
CREATE TABLE file_data_07 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 7);
CREATE TABLE file_data_08 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 8);
CREATE TABLE file_data_09 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 9);
CREATE TABLE file_data_10 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 10);
CREATE TABLE file_data_11 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 11);
CREATE TABLE file_data_12 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 12);
CREATE TABLE file_data_13 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 13);
CREATE TABLE file_data_14 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 14);
CREATE TABLE file_data_15 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 15);

View File

@@ -10,10 +10,10 @@
[app.common.data :as d]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.transit :as t]
[app.config :as cfg]
[app.redis :as rds]
[app.util.time :as dt]
[app.worker :as wrk]
[integrant.core :as ig]
[promesa.core :as p]
@@ -56,7 +56,7 @@
[k v]
{k (-> (d/without-nils v)
(assoc ::buffer-size 128)
(assoc ::timeout (ct/duration {:seconds 30})))})
(assoc ::timeout (dt/duration {:seconds 30})))})
(def ^:private schema:params
[:map ::rds/redis ::wrk/executor])

View File

@@ -12,10 +12,10 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.metrics :as mtx]
[app.redis.script :as-alias rscript]
[app.util.cache :as cache]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.core :as c]
[clojure.java.io :as io]
@@ -114,7 +114,7 @@
(let [cpus (px/get-available-processors)
threads (max 1 (int (* cpus 0.2)))]
{k (-> (d/without-nils v)
(assoc ::timeout (ct/duration "10s"))
(assoc ::timeout (dt/duration "10s"))
(assoc ::io-threads (max 3 threads))
(assoc ::worker-threads (max 3 threads)))}))
@@ -125,7 +125,7 @@
[::uri ::sm/uri]
[::worker-threads ::sm/int]
[::io-threads ::sm/int]
[::timeout ::ct/duration]])
[::timeout ::dt/duration]])
(defmethod ig/assert-key ::redis
[_ params]
@@ -331,7 +331,7 @@
(p/rejected cause))))
(eval-script [sha]
(let [tpoint (ct/tpoint)]
(let [tpoint (dt/tpoint)]
(->> (.evalsha ^RedisScriptingAsyncCommands cmd
^String sha
^ScriptOutputType ScriptOutputType/MULTI
@@ -346,7 +346,7 @@
:name (name sname)
:sha sha
:params (str/join "," (::rscript/vals script))
:elapsed (ct/format-duration elapsed))
:elapsed (dt/format-duration elapsed))
result)))
(p/merr on-error))))

View File

@@ -12,7 +12,6 @@
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.spec :as us]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.http :as-alias http]
@@ -32,6 +31,7 @@
[app.storage :as-alias sto]
[app.util.inet :as inet]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[integrant.core :as ig]
@@ -103,7 +103,7 @@
data (-> params
(assoc ::handler-name handler-name)
(assoc ::ip-addr ip-addr)
(assoc ::request-at (ct/now))
(assoc ::request-at (dt/now))
(assoc ::external-session-id session-id)
(assoc ::external-event-origin event-origin)
(assoc ::session/id (::session/id request))
@@ -130,7 +130,7 @@
[{:keys [::mtx/metrics ::metrics-id]} f mdata]
(let [labels (into-array String [(::sv/name mdata)])]
(fn [cfg params]
(let [tp (ct/tpoint)]
(let [tp (dt/tpoint)]
(try
(f cfg params)
(finally
@@ -239,6 +239,7 @@
'app.rpc.commands.files
'app.rpc.commands.files-create
'app.rpc.commands.files-share
'app.rpc.commands.files-temp
'app.rpc.commands.files-update
'app.rpc.commands.files-snapshot
'app.rpc.commands.files-thumbnails

View File

@@ -11,11 +11,11 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.metrics :as mtx]
[app.rpc :as-alias rpc]
[app.util.cache :as cache]
[app.util.services :as-alias sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.edn :as edn]
[clojure.set :as set]
@@ -154,7 +154,7 @@
:id limit-id
:label limit-label
:queue queue
:elapsed (some-> elapsed ct/format-duration)
:elapsed (some-> elapsed dt/format-duration)
:params @limit-params)))
(def ^:private idseq (AtomicLong. 0))
@@ -171,7 +171,7 @@
mlabels (into-array String [(id->str limit-id)])
limit-id (id->str limit-id limit-key)
limiter (cache/get cache limit-id (partial create-limiter config))
tpoint (ct/tpoint)
tpoint (dt/tpoint)
req-id (.incrementAndGet ^AtomicLong idseq)]
(try
(let [stats (pbh/get-stats limiter)]

View File

@@ -7,7 +7,6 @@
(ns app.rpc.commands.access-token
(:require
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.main :as-alias main]
@@ -16,7 +15,8 @@
[app.rpc.quotes :as quotes]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.util.services :as sv]))
[app.util.services :as sv]
[app.util.time :as dt]))
(defn- decode-row
[row]
@@ -24,13 +24,13 @@
(defn create-access-token
[{:keys [::db/conn ::setup/props]} profile-id name expiration]
(let [created-at (ct/now)
(let [created-at (dt/now)
token-id (uuid/next)
token (tokens/generate props {:iss "access-token"
:tid token-id
:iat created-at})
expires-at (some-> expiration ct/in-future)
expires-at (some-> expiration dt/in-future)
token (db/insert! conn :access-token
{:id token-id
:name name
@@ -49,7 +49,7 @@
(def ^:private schema:create-access-token
[:map {:title "create-access-token"}
[:name [:string {:max 250 :min 1}]]
[:expiration {:optional true} ::ct/duration]])
[:expiration {:optional true} ::dt/duration]])
(sv/defmethod ::create-access-token
{::doc/added "1.18"

View File

@@ -10,7 +10,6 @@
[app.common.data :as d]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -21,7 +20,8 @@
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.util.inet :as inet]
[app.util.services :as sv]))
[app.util.services :as sv]
[app.util.time :as dt]))
(def ^:private event-columns
[:id
@@ -49,7 +49,7 @@
(defn- adjust-timestamp
[{:keys [timestamp created-at] :as event}]
(let [margin (inst-ms (ct/diff timestamp created-at))]
(let [margin (inst-ms (dt/diff timestamp created-at))]
(if (or (neg? margin)
(> margin 3600000))
;; If event is in future or lags more than 1 hour, we reasign
@@ -63,7 +63,7 @@
[{:keys [::db/pool]} {:keys [::rpc/profile-id events] :as params}]
(let [request (-> params meta ::http/request)
ip-addr (inet/parse-request request)
tnow (ct/now)
tnow (dt/now)
xform (comp
(map (fn [event]
(-> event

View File

@@ -12,7 +12,6 @@
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -31,6 +30,7 @@
[app.setup.welcome-file :refer [create-welcome-file]]
[app.tokens :as tokens]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]
[cuerdas.core :as str]))
@@ -42,7 +42,7 @@
(defn- elapsed-verify-threshold?
[profile]
(let [elapsed (ct/diff (:modified-at profile) (ct/now))
(let [elapsed (dt/diff (:modified-at profile) (dt/now))
verify-threshold (cf/get :email-verify-threshold)]
(pos? (compare elapsed verify-threshold))))
@@ -85,7 +85,7 @@
(ex/raise :type :validation
:code :wrong-credentials))
(when-let [deleted-at (:deleted-at profile)]
(when (ct/is-after? (ct/now) deleted-at)
(when (dt/is-after? (dt/now) deleted-at)
(ex/raise :type :validation
:code :wrong-credentials)))
@@ -244,7 +244,7 @@
:backend "penpot"
:iss :prepared-register
:profile-id (:id profile)
:exp (ct/in-future {:days 7})
:exp (dt/in-future {:days 7})
:props {:newsletter-updates (or accept-newsletter-updates false)}}
params (d/without-nils params)
@@ -344,7 +344,7 @@
[{:keys [::db/conn] :as cfg} profile]
(let [vtoken (tokens/generate (::setup/props cfg)
{:iss :verify-email
:exp (ct/in-future "72h")
:exp (dt/in-future "72h")
:profile-id (:id profile)
:email (:email profile)})
;; NOTE: this token is mainly used for possible complains
@@ -352,7 +352,7 @@
ptoken (tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (ct/in-future {:days 30})})]
:exp (dt/in-future {:days 30})})]
(eml/send! {::eml/conn conn
::eml/factory eml/register
:public-uri (cf/get :public-uri)
@@ -466,7 +466,7 @@
(when (= action "resend-email-verification")
(db/update! conn :profile
{:modified-at (ct/now)}
{:modified-at (dt/now)}
{:id (:id profile)})
(send-email-verification! cfg profile))
@@ -495,7 +495,7 @@
(letfn [(create-recovery-token [{:keys [id] :as profile}]
(let [token (tokens/generate (::setup/props cfg)
{:iss :password-recovery
:exp (ct/in-future "15m")
:exp (dt/in-future "15m")
:profile-id id})]
(assoc profile :token token)))
@@ -503,7 +503,7 @@
(let [ptoken (tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (ct/in-future {:days 30})})]
:exp (dt/in-future {:days 30})})]
(eml/send! {::eml/conn conn
::eml/factory eml/password-recovery
:public-uri (cf/get :public-uri)
@@ -544,7 +544,7 @@
:else
(do
(db/update! conn :profile
{:modified-at (ct/now)}
{:modified-at (dt/now)}
{:id (:id profile)})
(->> profile
(create-recovery-token)

View File

@@ -13,7 +13,6 @@
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.http.sse :as sse]
@@ -27,6 +26,7 @@
[app.rpc.doc :as-alias doc]
[app.tasks.file-gc]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[promesa.exec :as px]
[yetti.response :as yres]))
@@ -114,7 +114,7 @@
3 (px/invoke! executor (partial bf.v3/import-files! cfg)))]
(db/update! pool :project
{:modified-at (ct/now)}
{:modified-at (dt/now)}
{:id project-id}
{::db/return-keys false})
@@ -125,35 +125,21 @@
[:name [:or [:string {:max 250}]
[:map-of ::sm/uuid [:string {:max 250}]]]]
[:project-id ::sm/uuid]
[:file-id {:optional true} ::sm/uuid]
[:version {:optional true} ::sm/int]
[:file ::media/upload]])
(sv/defmethod ::import-binfile
"Import a penpot file in a binary format. If `file-id` is provided,
an in-place import will be performed instead of creating a new file.
The in-place imports are only supported for binfile-v3 and when a
.penpot file only contains one penpot file.
"
"Import a penpot file in a binary format."
{::doc/added "1.15"
::doc/changes ["1.20" "Add file-id param for in-place import"
"1.20" "Set default version to 3"]
::webhooks/event? true
::sse/stream? true
::sm/params schema:import-binfile}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id version file-id file] :as params}]
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id version file] :as params}]
(projects/check-edition-permissions! pool profile-id project-id)
(let [version (or version 3)
(let [version (or version 1)
params (-> params
(assoc :profile-id profile-id)
(assoc :version version))
cfg (cond-> cfg
(uuid? file-id)
(assoc ::bfc/file-id file-id))
manifest (case (int version)
1 nil
3 (bf.v3/get-manifest (:path file)))]
@@ -161,6 +147,5 @@
(with-meta
(sse/response (partial import-binfile cfg params))
{::audit/props {:file nil
:file-id file-id
:generated-by (:generated-by manifest)
:referer (:referer manifest)}})))

View File

@@ -11,7 +11,6 @@
[app.common.exceptions :as ex]
[app.common.geom.point :as gpt]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as uri]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -30,6 +29,7 @@
[app.rpc.retry :as rtry]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.set :as set]
[cuerdas.core :as str]))
@@ -184,8 +184,8 @@
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(let [file (->> file
(feat.fdata/resolve-file-data cfg)
(feat.fdata/decode-file-data cfg))
(files/decode-row)
(feat.fdata/resolve-file-data cfg))
data (get file :data)]
(-> file
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
@@ -222,7 +222,7 @@
(defn upsert-comment-thread-status!
([conn profile-id thread-id]
(upsert-comment-thread-status! conn profile-id thread-id (ct/in-future "1s")))
(upsert-comment-thread-status! conn profile-id thread-id (dt/in-future "1s")))
([conn profile-id thread-id mod-at]
(db/exec-one! conn [sql:upsert-comment-thread-status thread-id profile-id mod-at mod-at])))

View File

@@ -8,7 +8,6 @@
"A demo specific mutations."
(:require
[app.common.exceptions :as ex]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.loggers.audit :as audit]
@@ -17,6 +16,7 @@
[app.rpc.commands.profile :as profile]
[app.rpc.doc :as-alias doc]
[app.util.services :as sv]
[app.util.time :as dt]
[buddy.core.codecs :as bc]
[buddy.core.nonce :as bn]))
@@ -45,13 +45,15 @@
params {:email email
:fullname fullname
:is-active true
:deleted-at (ct/in-future (cf/get-deletion-delay))
:deleted-at (dt/in-future (cf/get-deletion-delay))
:password (profile/derive-password cfg password)
:props {}}
profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
(->> (auth/create-profile! conn params)
(auth/create-profile-rels! conn))))]
(with-meta {:email email
:password password}
{::audit/profile-id (:id profile)})))
:props {}}]
(let [profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
(->> (auth/create-profile! conn params)
(auth/create-profile-rels! conn))))]
(with-meta {:email email
:password password}
{::audit/profile-id (:id profile)}))))

View File

@@ -16,7 +16,6 @@
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.schema.desc-js-like :as-alias smdj]
[app.common.time :as ct]
[app.common.types.components-list :as ctkl]
[app.common.types.file :as ctf]
[app.common.uri :as uri]
@@ -24,6 +23,7 @@
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.fdata :as feat.fdata]
[app.features.file-migrations :as feat.fmigr]
[app.features.logical-deletion :as ldel]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
@@ -37,8 +37,10 @@
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]
[cuerdas.core :as str]))
[cuerdas.core :as str]
[promesa.exec :as px]))
;; --- FEATURES
@@ -50,13 +52,15 @@
;; --- HELPERS
(def long-cache-duration
(ct/duration {:days 7}))
(dt/duration {:days 7}))
(defn decode-row
[{:keys [features] :as row}]
[{:keys [data changes features] :as row}]
(when row
(cond-> row
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
features (assoc :features (db/decode-pgarray features #{}))
changes (assoc :changes (blob/decode changes))
data (assoc :data (blob/decode data)))))
(defn check-version!
[file]
@@ -183,10 +187,10 @@
[:name [:string {:max 250}]]
[:revn [::sm/int {:min 0}]]
[:vern [::sm/int {:min 0}]]
[:modified-at ::ct/inst]
[:modified-at ::dt/instant]
[:is-shared ::sm/boolean]
[:project-id ::sm/uuid]
[:created-at ::ct/inst]
[:created-at ::dt/instant]
[:data {:optional true} ::sm/any]])
(def schema:permissions-mixin
@@ -205,9 +209,90 @@
[:id ::sm/uuid]
[:project-id {:optional true} ::sm/uuid]])
(defn- migrate-file
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} {:keys [read-only?]}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
pmap/*tracked* (pmap/create-tracked)]
(let [libs (delay (bfc/get-resolved-file-libraries cfg file))
;; For avoid unnecesary overhead of creating multiple pointers and
;; handly internally with objects map in their worst case (when
;; probably all shapes and all pointers will be readed in any
;; case), we just realize/resolve them before applying the
;; migration to the file
file (-> file
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(fmg/migrate-file libs))]
(if (or read-only? (db/read-only? conn))
file
(let [;; When file is migrated, we break the rule of no perform
;; mutations on get operations and update the file with all
;; migrations applied
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(feat.fdata/enable-pointer-map file)
file)]
(db/update! conn :file
{:data (blob/encode (:data file))
:version (:version file)
:features (db/create-array conn "text" (:features file))}
{:id id}
{::db/return-keys false})
(when (contains? (:features file) "fdata/pointer-map")
(feat.fdata/persist-pointers! cfg id))
(feat.fmigr/upsert-migrations! conn file)
(feat.fmigr/resolve-applied-migrations cfg file))))))
(defn get-file
[{:keys [::db/conn ::wrk/executor] :as cfg} id
& {:keys [project-id
migrate?
include-deleted?
lock-for-update?
preload-pointers?]
:or {include-deleted? false
lock-for-update? false
migrate? true
preload-pointers? false}
:as options}]
(assert (db/connection? conn) "expected cfg with valid connection")
(let [params (merge {:id id}
(when (some? project-id)
{:project-id project-id}))
file (->> (db/get conn :file params
{::db/check-deleted (not include-deleted?)
::db/remove-deleted (not include-deleted?)
::sql/for-update lock-for-update?})
(feat.fmigr/resolve-applied-migrations cfg)
(feat.fdata/resolve-file-data cfg))
;; NOTE: we perform the file decoding in a separate thread
;; because it has heavy and synchronous operations for
;; decoding file body that are not very friendly with virtual
;; threads.
file (px/invoke! executor #(decode-row file))
file (if (and migrate? (fmg/need-migration? file))
(migrate-file cfg file options)
file)]
(if preload-pointers?
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(update file :data feat.fdata/process-pointers deref))
file)))
(defn get-minimal-file
[cfg id & {:as opts}]
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern])]
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern :data-ref-id :data-backend])]
(db/get cfg :file {:id id} opts)))
(defn- get-minimal-file-with-perms
@@ -219,7 +304,7 @@
(defn get-file-etag
[{:keys [::rpc/profile-id]} {:keys [modified-at revn vern permissions]}]
(str profile-id "/" revn "/" vern "/" (hash fmg/available-migrations) "/"
(ct/format-inst modified-at :iso)
(dt/format-instant modified-at :iso)
"/"
(uri/map->query-string permissions)))
@@ -247,9 +332,9 @@
:project-id project-id
:file-id id)
file (-> (bfc/get-file cfg id
:project-id project-id)
file (-> (get-file cfg id :project-id project-id)
(assoc :permissions perms)
(assoc :team-id (:id team))
(check-version!))]
(-> (cfeat/get-team-enabled-features cf/flags team)
@@ -261,7 +346,8 @@
;; pointers on backend and return a complete file.
(if (and (contains? (:features file) "fdata/pointer-map")
(not (contains? (:features params) "fdata/pointer-map")))
(feat.fdata/realize-pointers cfg file)
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(update file :data feat.fdata/process-pointers deref))
file))))
;; --- COMMAND QUERY: get-file-fragment (by id)
@@ -270,8 +356,8 @@
[:map {:title "FileFragment"}
[:id ::sm/uuid]
[:file-id ::sm/uuid]
[:created-at ::ct/inst]
[:data any?]])
[:created-at ::dt/instant]
[:content any?]])
(def schema:get-file-fragment
[:map {:title "get-file-fragment"}
@@ -281,8 +367,10 @@
(defn- get-file-fragment
[cfg file-id fragment-id]
(some-> (db/get cfg :file-data {:file-id file-id :id fragment-id :type "fragment"})
(update :data blob/decode)))
(let [resolve-file-data (partial feat.fdata/resolve-file-data cfg)]
(some-> (db/get cfg :file-data-fragment {:file-id file-id :id fragment-id})
(resolve-file-data)
(update :data blob/decode))))
(sv/defmethod ::get-file-fragment
"Retrieve a file fragment by its ID. Only authenticated users."
@@ -407,7 +495,7 @@
(let [perms (get-permissions conn profile-id file-id share-id)
file (bfc/get-file cfg file-id :read-only? true)
file (get-file cfg file-id :read-only? true)
proj (db/get conn :project {:id (:project-id file)})
@@ -633,9 +721,9 @@
:project-id project-id
:file-id id)
file (bfc/get-file cfg id
:project-id project-id
:read-only? true)]
file (get-file cfg id
:project-id project-id
:read-only? true)]
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-client-features! (:features params))
@@ -682,7 +770,7 @@
[conn {:keys [id name]}]
(db/update! conn :file
{:name name
:modified-at (ct/now)}
:modified-at (dt/now)}
{:id id}
{::db/return-keys true}))
@@ -695,8 +783,8 @@
[:id ::sm/uuid]
[:project-id ::sm/uuid]
[:name [:string {:max 250}]]
[:created-at ::ct/inst]
[:modified-at ::ct/inst]]
[:created-at ::dt/instant]
[:modified-at ::dt/instant]]
::sm/params
[:map {:title "RenameFileParams"}
@@ -707,8 +795,8 @@
[:map {:title "SimplifiedFile"}
[:id ::sm/uuid]
[:name [:string {:max 250}]]
[:created-at ::ct/inst]
[:modified-at ::ct/inst]]
[:created-at ::dt/instant]
[:modified-at ::dt/instant]]
::db/transaction true}
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id id] :as params}]
@@ -722,7 +810,7 @@
;; --- MUTATION COMMAND: set-file-shared
(def ^:private sql:get-referenced-files
(def sql:get-referenced-files
"SELECT f.id
FROM file_library_rel AS flr
INNER JOIN file AS f ON (f.id = flr.file_id)
@@ -733,51 +821,56 @@
(defn- absorb-library-by-file!
[cfg ldata file-id]
(assert (db/connection-map? cfg)
"expected cfg with valid connection")
(dm/assert!
"expected cfg with valid connection"
(db/connection-map? cfg))
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)
pmap/*tracked* (pmap/create-tracked)]
(let [file (-> (bfc/get-file cfg file-id
:include-deleted? true
:lock-for-update? true)
(let [file (-> (get-file cfg file-id
:include-deleted? true
:lock-for-update? true)
(update :data ctf/absorb-assets ldata))]
(l/trc :hint "library absorbed"
:library-id (str (:id ldata))
:file-id (str file-id))
(bfc/update-file! cfg {:id file-id
:migrations (:migrations file)
:revn (inc (:revn file))
:data (:data file)
:modified-at (ct/now)
:has-media-trimmed false}))))
(db/update! cfg :file
{:revn (inc (:revn file))
:data (blob/encode (:data file))
:modified-at (dt/now)
:has-media-trimmed false}
{:id file-id})
(feat.fdata/persist-pointers! cfg file-id))))
(defn- absorb-library
"Find all files using a shared library, and absorb all library assets
into the file local libraries"
[cfg {:keys [id data] :as library}]
[cfg {:keys [id] :as library}]
(assert (db/connection-map? cfg)
"expected cfg with valid connection")
(dm/assert!
"expected cfg with valid connection"
(db/connection-map? cfg))
(let [ids (->> (db/exec! cfg [sql:get-referenced-files id])
(sequence bfc/xf-map-id))]
(let [ldata (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(-> library :data (feat.fdata/process-pointers deref)))
ids (->> (db/exec! cfg [sql:get-referenced-files id])
(map :id))]
(l/trc :hint "absorbing library"
:library-id (str id)
:files (str/join "," (map str ids)))
(run! (partial absorb-library-by-file! cfg data) ids)
(run! (partial absorb-library-by-file! cfg ldata) ids)
library))
(defn absorb-library!
[{:keys [::db/conn] :as cfg} id]
(let [file (-> (bfc/get-file cfg id
:realize? true
:lock-for-update? true
:include-deleted? true)
(let [file (-> (get-file cfg id
:lock-for-update? true
:include-deleted? true)
(check-version!))
proj (db/get* conn :project {:id (:project-id file)}
@@ -807,7 +900,7 @@
(db/delete! conn :file-library-rel {:library-file-id id})
(db/update! conn :file
{:is-shared false
:modified-at (ct/now)}
:modified-at (dt/now)}
{:id id})
(select-keys file [:id :name :is-shared]))
@@ -816,7 +909,7 @@
(let [file (assoc file :is-shared true)]
(db/update! conn :file
{:is-shared true
:modified-at (ct/now)}
:modified-at (dt/now)}
{:id id})
file)
@@ -852,7 +945,7 @@
[conn team file-id]
(let [delay (ldel/get-deletion-delay team)
file (db/update! conn :file
{:deleted-at (ct/in-future delay)}
{:deleted-at (dt/in-future delay)}
{:id file-id}
{::db/return-keys [:id :name :is-shared :deleted-at
:project-id :created-at :modified-at]})]
@@ -950,7 +1043,7 @@
(defn update-sync
[conn {:keys [file-id library-id] :as params}]
(db/update! conn :file-library-rel
{:synced-at (ct/now)}
{:synced-at (dt/now)}
{:file-id file-id
:library-file-id library-id}
{::db/return-keys true}))
@@ -975,14 +1068,14 @@
[conn {:keys [file-id date] :as params}]
(db/update! conn :file
{:ignore-sync-until date
:modified-at (ct/now)}
:modified-at (dt/now)}
{:id file-id}
{::db/return-keys true}))
(def ^:private schema:ignore-file-library-sync-status
[:map {:title "ignore-file-library-sync-status"}
[:file-id ::sm/uuid]
[:date ::ct/inst]])
[:date ::dt/instant]])
;; TODO: improve naming
(sv/defmethod ::ignore-file-library-sync-status

View File

@@ -8,9 +8,7 @@
(:require
[app.binfile.common :as bfc]
[app.common.features :as cfeat]
[app.common.files.migrations :as fmg]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.file :as ctf]
[app.config :as cf]
[app.db :as db]
@@ -24,13 +22,13 @@
[app.rpc.quotes :as quotes]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.set :as set]))
(defn create-file-role!
[conn {:keys [file-id profile-id role]}]
(let [params {:file-id file-id
:profile-id profile-id}]
(->> (perms/assign-role-flags params role)
(db/insert! conn :file-profile-rel))))
@@ -52,23 +50,22 @@
:revn revn
:is-shared is-shared
:features features
:migrations fmg/available-migrations
:ignore-sync-until ignore-sync-until
:created-at modified-at
:modified-at modified-at
:deleted-at deleted-at}
{:create-page create-page
:page-id page-id})]
(bfc/insert-file! cfg file)
:page-id page-id})
file (-> (bfc/insert-file! cfg file)
(bfc/decode-row))]
(->> (assoc params :file-id (:id file) :role :owner)
(create-file-role! conn))
(db/update! conn :project
{:modified-at (ct/now)}
{:modified-at (dt/now)}
{:id project-id})
(bfc/get-file cfg (:id file)))))
file)))
(def ^:private schema:create-file
[:map {:title "create-file"}

View File

@@ -8,17 +8,52 @@
(:require
[app.binfile.common :as bfc]
[app.common.exceptions :as ex]
[app.common.files.migrations :as fmg]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.file-snapshots :as fsnap]
[app.features.fdata :as feat.fdata]
[app.features.file-migrations :refer [reset-migrations!]]
[app.main :as-alias main]
[app.msgbus :as mbus]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.doc :as-alias doc]
[app.rpc.quotes :as quotes]
[app.util.services :as sv]))
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.services :as sv]
[app.util.time :as dt]
[cuerdas.core :as str]))
(defn decode-row
[{:keys [migrations] :as row}]
(when row
(cond-> row
(some? migrations)
(assoc :migrations (db/decode-pgarray migrations)))))
(def sql:get-file-snapshots
"WITH changes AS (
SELECT id, label, revn, created_at, created_by, profile_id
FROM file_change
WHERE file_id = ?
AND data IS NOT NULL
AND (deleted_at IS NULL OR deleted_at > now())
), versions AS (
(SELECT * FROM changes WHERE created_by = 'system' LIMIT 1000)
UNION ALL
(SELECT * FROM changes WHERE created_by != 'system' LIMIT 1000)
)
SELECT * FROM versions
ORDER BY created_at DESC;")
(defn get-file-snapshots
[conn file-id]
(db/exec! conn [sql:get-file-snapshots file-id]))
(def ^:private schema:get-file-snapshots
[:map {:title "get-file-snapshots"}
@@ -30,7 +65,73 @@
[cfg {:keys [::rpc/profile-id file-id] :as params}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(files/check-read-permissions! conn profile-id file-id)
(fsnap/get-visible-snapshots conn file-id))))
(get-file-snapshots conn file-id))))
(defn- generate-snapshot-label
[]
(let [ts (-> (dt/now)
(dt/format-instant)
(str/replace #"[T:\.]" "-")
(str/rtrim "Z"))]
(str "snapshot-" ts)))
(defn create-file-snapshot!
[cfg file & {:keys [label created-by deleted-at profile-id]
:or {deleted-at :default
created-by :system}}]
(assert (#{:system :user :admin} created-by)
"expected valid keyword for created-by")
(let [created-by
(name created-by)
deleted-at
(cond
(= deleted-at :default)
(dt/plus (dt/now) (cf/get-deletion-delay))
(dt/instant? deleted-at)
deleted-at
:else
nil)
label
(or label (generate-snapshot-label))
snapshot-id
(uuid/next)
data
(blob/encode (:data file))
features
(into-array (:features file))
migrations
(into-array (:migrations file))]
(l/dbg :hint "creating file snapshot"
:file-id (str (:id file))
:id (str snapshot-id)
:label label)
(db/insert! cfg :file-change
{:id snapshot-id
:revn (:revn file)
:data data
:version (:version file)
:features features
:migrations migrations
:profile-id profile-id
:file-id (:id file)
:label label
:deleted-at deleted-at
:created-by created-by}
{::db/return-keys false})
{:id snapshot-id :label label}))
(def ^:private schema:create-file-snapshot
[:map
@@ -43,7 +144,7 @@
::db/transaction true}
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id file-id label]}]
(files/check-edition-permissions! conn profile-id file-id)
(let [file (bfc/get-file cfg file-id :realize? true)
(let [file (bfc/get-file cfg file-id)
project (db/get-by-id cfg :project (:project-id file))]
(-> cfg
@@ -54,10 +155,96 @@
(quotes/check! {::quotes/id ::quotes/snapshots-per-file}
{::quotes/id ::quotes/snapshots-per-team}))
(fsnap/create! cfg file
{:label label
:profile-id profile-id
:created-by "user"})))
(create-file-snapshot! cfg file
{:label label
:profile-id profile-id
:created-by :user})))
(defn restore-file-snapshot!
[{:keys [::db/conn ::mbus/msgbus] :as cfg} file-id snapshot-id]
(let [storage (sto/resolve cfg {::db/reuse-conn true})
file (files/get-minimal-file conn file-id {::db/for-update true})
vern (rand-int Integer/MAX_VALUE)
snapshot (some->> (db/get* conn :file-change
{:file-id file-id
:id snapshot-id}
{::db/for-share true})
(feat.fdata/resolve-file-data cfg)
(decode-row))
;; If snapshot has tracked applied migrations, we reuse them,
;; if not we take a safest set of migrations as starting
;; point. This is because, at the time of implementing
;; snapshots, migrations were not taken into account so we
;; need to make this backward compatible in some way.
file (assoc file :migrations
(or (:migrations snapshot)
(fmg/generate-migrations-from-version 67)))]
(when-not snapshot
(ex/raise :type :not-found
:code :snapshot-not-found
:hint "unable to find snapshot with the provided label"
:snapshot-id snapshot-id
:file-id file-id))
(when-not (:data snapshot)
(ex/raise :type :validation
:code :snapshot-without-data
:hint "snapshot has no data"
:label (:label snapshot)
:file-id file-id))
(l/dbg :hint "restoring snapshot"
:file-id (str file-id)
:label (:label snapshot)
:snapshot-id (str (:id snapshot)))
;; If the file was already offloaded, on restoring the snapshot we
;; are going to replace the file data, so we need to touch the old
;; referenced storage object and avoid possible leaks
(when (feat.fdata/offloaded? file)
(sto/touch-object! storage (:data-ref-id file)))
;; In the same way, on reseting the file data, we need to restore
;; the applied migrations on the moment of taking the snapshot
(reset-migrations! conn file)
(db/update! conn :file
{:data (:data snapshot)
:revn (inc (:revn file))
:vern vern
:version (:version snapshot)
:data-backend nil
:data-ref-id nil
:has-media-trimmed false
:features (:features snapshot)}
{:id file-id})
;; clean object thumbnails
(let [sql (str "update file_tagged_object_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
;; clean file thumbnails
(let [sql (str "update file_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
;; Send to the clients a notification to reload the file
(mbus/pub! msgbus
:topic (:id file)
:message {:type :file-restore
:file-id (:id file)
:vern vern})
{:id (:id snapshot)
:label (:label snapshot)}))
(def ^:private schema:restore-file-snapshot
[:map {:title "restore-file-snapshot"}
@@ -66,151 +253,75 @@
(sv/defmethod ::restore-file-snapshot
{::doc/added "1.20"
::sm/params schema:restore-file-snapshot
::db/transaction true}
[{:keys [::db/conn ::mbus/msgbus] :as cfg} {:keys [::rpc/profile-id file-id id] :as params}]
(files/check-edition-permissions! conn profile-id file-id)
(let [file (bfc/get-file cfg file-id)]
(fsnap/create! cfg file
{:profile-id profile-id
:created-by "system"})
(let [vern (fsnap/restore! cfg file-id id)]
;; Send to the clients a notification to reload the file
(mbus/pub! msgbus
:topic (:id file)
:message {:type :file-restore
:file-id (:id file)
:vern vern})
nil)))
::sm/params schema:restore-file-snapshot}
[cfg {:keys [::rpc/profile-id file-id id] :as params}]
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(files/check-edition-permissions! conn profile-id file-id)
(let [file (bfc/get-file cfg file-id)]
(create-file-snapshot! cfg file
{:profile-id profile-id
:created-by :system})
(restore-file-snapshot! cfg file-id id)))))
(def ^:private schema:update-file-snapshot
[:map {:title "update-file-snapshot"}
[:id ::sm/uuid]
[:label ::sm/text]])
(defn- update-file-snapshot!
[conn snapshot-id label]
(-> (db/update! conn :file-change
{:label label
:created-by "user"
:deleted-at nil}
{:id snapshot-id}
{::db/return-keys true})
(dissoc :data :features :migrations)))
(defn- get-snapshot
"Get a minimal snapshot from database and lock for update"
[conn id]
(db/get conn :file-change
{:id id}
{::sql/columns [:id :file-id :created-by :deleted-at]
::db/for-update true}))
(sv/defmethod ::update-file-snapshot
{::doc/added "1.20"
::sm/params schema:update-file-snapshot
::db/transaction true}
[{:keys [::db/conn]} {:keys [::rpc/profile-id id label]}]
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
(fsnap/update! conn (assoc snapshot :label label))))
::sm/params schema:update-file-snapshot}
[cfg {:keys [::rpc/profile-id id label]}]
(db/tx-run! cfg
(fn [{:keys [::db/conn]}]
(let [snapshot (get-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
(update-file-snapshot! conn id label)))))
(def ^:private schema:remove-file-snapshot
[:map {:title "remove-file-snapshot"}
[:id ::sm/uuid]])
(sv/defmethod ::delete-file-snapshot
{::doc/added "1.20"
::sm/params schema:remove-file-snapshot
::db/transaction true}
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
(when (not= (:created-by snapshot) "user")
(ex/raise :type :validation
:code :system-snapshots-cant-be-deleted
:file-id (:file-id snapshot)
:snapshot-id id
:profile-id profile-id))
(fsnap/delete! conn snapshot)))
;;; Lock/unlock version endpoints
(def ^:private schema:lock-file-snapshot
[:map {:title "lock-file-snapshot"}
[:id ::sm/uuid]])
;; MOVE to fsnap
(defn- lock-file-snapshot!
[conn snapshot-id profile-id]
(db/update! conn :file-change
{:locked-by profile-id}
{:id snapshot-id}
{::db/return-keys false})
nil)
(sv/defmethod ::lock-file-snapshot
{::doc/added "1.20"
::sm/params schema:lock-file-snapshot
::db/transaction true}
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
(when (not= (:created-by snapshot) "user")
(ex/raise :type :validation
:code :system-snapshots-cant-be-locked
:hint "Only user-created versions can be locked"
:snapshot-id id
:profile-id profile-id))
;; Only the creator can lock their own version
(when (not= (:profile-id snapshot) profile-id)
(ex/raise :type :validation
:code :only-creator-can-lock
:hint "Only the version creator can lock it"
:snapshot-id id
:profile-id profile-id
:creator-id (:profile-id snapshot)))
;; Check if already locked
(when (:locked-by snapshot)
(ex/raise :type :validation
:code :snapshot-already-locked
:hint "Version is already locked"
:snapshot-id id
:profile-id profile-id
:locked-by (:locked-by snapshot)))
(lock-file-snapshot! conn id profile-id)))
(def ^:private schema:unlock-file-snapshot
[:map {:title "unlock-file-snapshot"}
[:id ::sm/uuid]])
;; MOVE to fsnap
(defn- unlock-file-snapshot!
(defn- delete-file-snapshot!
[conn snapshot-id]
(db/update! conn :file-change
{:locked-by nil}
{:deleted-at (dt/now)}
{:id snapshot-id}
{::db/return-keys false})
nil)
(sv/defmethod ::unlock-file-snapshot
(sv/defmethod ::delete-file-snapshot
{::doc/added "1.20"
::sm/params schema:unlock-file-snapshot
::db/transaction true}
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
::sm/params schema:remove-file-snapshot}
[cfg {:keys [::rpc/profile-id id]}]
(db/tx-run! cfg
(fn [{:keys [::db/conn]}]
(let [snapshot (get-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
(when (not= (:created-by snapshot) "user")
(ex/raise :type :validation
:code :system-snapshots-cant-be-unlocked
:hint "Only user-created versions can be unlocked"
:snapshot-id id
:profile-id profile-id))
(when (not= (:created-by snapshot) "user")
(ex/raise :type :validation
:code :system-snapshots-cant-be-deleted
:snapshot-id id
:profile-id profile-id))
;; Only the creator can unlock their own version
(when (not= (:profile-id snapshot) profile-id)
(ex/raise :type :validation
:code :only-creator-can-unlock
:hint "Only the version creator can unlock it"
:snapshot-id id
:profile-id profile-id
:creator-id (:profile-id snapshot)))
;; Check if not locked
(when (not (:locked-by snapshot))
(ex/raise :type :validation
:code :snapshot-not-locked
:hint "Version is not locked"
:snapshot-id id
:profile-id profile-id))
(unlock-file-snapshot! conn id)))
(delete-file-snapshot! conn id)))))

View File

@@ -0,0 +1,161 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.rpc.commands.files-temp
(:require
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.files.changes :as cpc]
[app.common.schema :as sm]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.features.fdata :as fdata]
[app.loggers.audit :as audit]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.files-create :as files.create]
[app.rpc.commands.files-update :as-alias files.update]
[app.rpc.commands.projects :as projects]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[clojure.set :as set]))
;; --- MUTATION COMMAND: create-temp-file
(def ^:private schema:create-temp-file
[:map {:title "create-temp-file"}
[:name [:string {:max 250}]]
[:project-id ::sm/uuid]
[:id {:optional true} ::sm/uuid]
[:is-shared ::sm/boolean]
[:features ::cfeat/features]
[:create-page ::sm/boolean]])
(sv/defmethod ::create-temp-file
{::doc/added "1.17"
::doc/module :files
::sm/params schema:create-temp-file
::db/transaction true}
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id project-id] :as params}]
(projects/check-edition-permissions! conn profile-id project-id)
(let [team (teams/get-team conn :profile-id profile-id :project-id project-id)
;; When we create files, we only need to respect the team
;; features, because some features can be enabled
;; globally, but the team is still not migrated properly.
input-features
(:features params #{})
;; If the imported project doesn't contain v2 we need to remove it
team-features
(cond-> (cfeat/get-team-enabled-features cf/flags team)
(not (contains? input-features "components/v2"))
(disj "components/v2"))
;; We also include all no migration features declared by
;; client; that enables the ability to enable a runtime
;; feature on frontend and make it permanent on file
features
(-> input-features
(set/intersection cfeat/no-migration-features)
(set/union team-features))
params
(-> params
(assoc :profile-id profile-id)
(assoc :deleted-at (dt/in-future {:days 1}))
(assoc :features features))]
(files.create/create-file cfg params)))
;; --- MUTATION COMMAND: update-temp-file
(def ^:private schema:update-temp-file
[:map {:title "update-temp-file"}
[:changes [:vector ::cpc/change]]
[:revn [::sm/int {:min 0}]]
[:session-id ::sm/uuid]
[:id ::sm/uuid]])
(sv/defmethod ::update-temp-file
{::doc/added "1.17"
::doc/module :files
::sm/params schema:update-temp-file}
[cfg {:keys [::rpc/profile-id session-id id revn changes] :as params}]
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
(db/insert! conn :file-change
{:id (uuid/next)
:session-id session-id
:profile-id profile-id
:created-at (dt/now)
:file-id id
:revn revn
:data nil
:changes (blob/encode changes)})
(rph/with-meta (rph/wrap nil)
{::audit/replace-props {:file-id id
:revn revn}}))))
;; --- MUTATION COMMAND: persist-temp-file
(defn persist-temp-file
[{:keys [::db/conn] :as cfg} {:keys [id] :as params}]
(let [file (files/get-file cfg id
:migrate? false
:lock-for-update? true)]
(when (nil? (:deleted-at file))
(ex/raise :type :validation
:code :cant-persist-already-persisted-file))
(let [changes (->> (db/cursor conn
(sql/select :file-change {:file-id id}
{:order-by [[:revn :asc]]})
{:chunk-size 10})
(sequence (mapcat (comp blob/decode :changes))))
file (update file :data cpc/process-changes changes)
file (if (contains? (:features file) "fdata/objects-map")
(fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (fdata/enable-pointer-map file)]
(fdata/persist-pointers! cfg id)
file))
file)]
;; Delete changes from the changes history
(db/delete! conn :file-change {:file-id id})
(db/update! conn :file
{:deleted-at nil
:revn 1
:data (blob/encode (:data file))}
{:id id})
nil)))
(def ^:private schema:persist-temp-file
[:map {:title "persist-temp-file"}
[:id ::sm/uuid]])
(sv/defmethod ::persist-temp-file
{::doc/added "1.17"
::doc/module :files
::sm/params schema:persist-temp-file}
[cfg {:keys [::rpc/profile-id id] :as params}]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/check-edition-permissions! conn profile-id id)
(persist-temp-file cfg params))))

View File

@@ -6,7 +6,6 @@
(ns app.rpc.commands.files-thumbnails
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.features :as cfeat]
@@ -14,7 +13,6 @@
[app.common.geom.shapes :as gsh]
[app.common.schema :as sm]
[app.common.thumbnails :as thc]
[app.common.time :as ct]
[app.common.types.shape-tree :as ctt]
[app.config :as cf]
[app.db :as db]
@@ -32,12 +30,13 @@
[app.storage :as sto]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[cuerdas.core :as str]))
;; --- FEATURES
(def long-cache-duration
(ct/duration {:days 7}))
(dt/duration {:days 7}))
;; --- COMMAND QUERY: get-file-object-thumbnails
@@ -203,9 +202,9 @@
:profile-id profile-id
:file-id file-id)
file (bfc/get-file cfg file-id
:realize? true
:read-only? true)]
file (files/get-file cfg file-id
:preload-pointers? true
:read-only? true)]
(-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-file-features! (:features file)))
@@ -248,7 +247,7 @@
(defn- create-file-object-thumbnail!
[{:keys [::sto/storage] :as cfg} file object-id media tag]
(let [file-id (:id file)
timestamp (ct/now)
timestamp (dt/now)
media (persist-thumbnail! storage media timestamp)
[th1 th2] (db/tx-run! cfg (fn [{:keys [::db/conn]}]
(let [th1 (db/exec-one! conn [sql:get-file-object-thumbnail file-id object-id tag])
@@ -303,7 +302,7 @@
{::sql/for-update true})]
(sto/touch-object! storage media-id)
(db/update! conn :file-tagged-object-thumbnail
{:deleted-at (ct/now)}
{:deleted-at (dt/now)}
{:file-id file-id
:object-id object-id
:tag tag})))
@@ -339,8 +338,7 @@
hash (sto/calculate-hash path)
data (-> (sto/content path)
(sto/wrap-with-hash hash))
tnow (ct/now)
tnow (dt/now)
media (sto/put-object! storage
{::sto/content data
::sto/deduplicate? true

View File

@@ -15,13 +15,11 @@
[app.common.files.validate :as val]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.features.fdata :as fdata]
[app.features.fdata :as feat.fdata]
[app.features.file-migrations :as feat.fmigr]
[app.features.file-snapshots :as fsnap]
[app.features.logical-deletion :as ldel]
[app.http.errors :as errors]
[app.loggers.audit :as audit]
@@ -34,9 +32,11 @@
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.rpc.helpers :as rph]
[app.storage :as sto]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.set :as set]
[promesa.exec :as px]))
@@ -123,84 +123,83 @@
[:update-file/global]]
::webhooks/event? true
::webhooks/batch-timeout (ct/duration "2m")
::webhooks/batch-timeout (dt/duration "2m")
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
::sm/params schema:update-file
::sm/result schema:update-file-result
::doc/module :files
::doc/added "1.17"
::db/transaction true}
[{:keys [::mtx/metrics ::db/conn] :as cfg}
::doc/added "1.17"}
[{:keys [::mtx/metrics] :as cfg}
{:keys [::rpc/profile-id id changes changes-with-metadata] :as params}]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/check-edition-permissions! conn profile-id id)
(db/xact-lock! conn id)
(files/check-edition-permissions! conn profile-id id)
(db/xact-lock! conn id)
(let [file (get-file conn id)
team (teams/get-team conn
:profile-id profile-id
:team-id (:team-id file))
(let [file (get-file cfg id)
team (teams/get-team conn
:profile-id profile-id
:team-id (:team-id file))
features (-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-client-features! (:features params))
(cfeat/check-file-features! (:features file)))
features (-> (cfeat/get-team-enabled-features cf/flags team)
(cfeat/check-client-features! (:features params))
(cfeat/check-file-features! (:features file)))
changes (if changes-with-metadata
(->> changes-with-metadata (mapcat :changes) vec)
(vec changes))
changes (if changes-with-metadata
(->> changes-with-metadata (mapcat :changes) vec)
(vec changes))
params (-> params
(assoc :profile-id profile-id)
(assoc :features (set/difference features cfeat/frontend-only-features))
(assoc :team team)
(assoc :file file)
(assoc :changes changes))
params (-> params
(assoc :profile-id profile-id)
(assoc :features (set/difference features cfeat/frontend-only-features))
(assoc :team team)
(assoc :file file)
(assoc :changes changes))
cfg (assoc cfg ::timestamp (dt/now))
cfg (assoc cfg ::timestamp (ct/now))
tpoint (ct/tpoint)]
tpoint (dt/tpoint)]
(when (not= (:vern params)
(:vern file))
(ex/raise :type :validation
:code :vern-conflict
:hint "A different version has been restored for the file."
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
(when (not= (:vern params)
(:vern file))
(ex/raise :type :validation
:code :vern-conflict
:hint "A different version has been restored for the file."
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
(when (> (:revn params)
(:revn file))
(ex/raise :type :validation
:code :revn-conflict
:hint "The incoming revision number is greater that stored version."
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
(when (> (:revn params)
(:revn file))
(ex/raise :type :validation
:code :revn-conflict
:hint "The incoming revision number is greater that stored version."
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
;; When newly computed features does not match exactly with
;; the features defined on team row, we update it
(when-let [features (-> features
(set/difference (:features team))
(set/difference cfeat/no-team-inheritable-features)
(not-empty))]
(let [features (->> features
(set/union (:features team))
(db/create-array conn "text"))]
(db/update! conn :team
{:features features}
{:id (:id team)}
{::db/return-keys false})))
;; When newly computed features does not match exactly with
;; the features defined on team row, we update it
(when-let [features (-> features
(set/difference (:features team))
(set/difference cfeat/no-team-inheritable-features)
(not-empty))]
(let [features (->> features
(set/union (:features team))
(db/create-array conn "text"))]
(db/update! conn :team
{:features features}
{:id (:id team)}
{::db/return-keys false})))
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
(binding [l/*context* (some-> (meta params)
(get :app.http/request)
(errors/request->context))]
(-> (update-file* cfg params)
(rph/with-defer #(let [elapsed (tpoint)]
(l/trace :hint "update-file" :time (ct/format-duration elapsed))))))))
(binding [l/*context* (some-> (meta params)
(get :app.http/request)
(errors/request->context))]
(-> (update-file* cfg params)
(rph/with-defer #(let [elapsed (tpoint)]
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))))
(defn- update-file*
"Internal function, part of the update-file process, that encapsulates
@@ -213,44 +212,28 @@
[{:keys [::db/conn ::wrk/executor ::timestamp] :as cfg}
{:keys [profile-id file team features changes session-id skip-validate] :as params}]
(binding [pmap/*tracked* (pmap/create-tracked)
pmap/*load-fn* (partial fdata/load-pointer cfg (:id file))]
(let [;; Retrieve the file data
file (feat.fmigr/resolve-applied-migrations cfg file)
file (feat.fdata/resolve-file-data cfg file)
file (assoc file :features
(-> features
(set/difference cfeat/frontend-only-features)
(set/union (:features file))))]
(let [file (assoc file :features
(-> features
(set/difference cfeat/frontend-only-features)
(set/union (:features file))))
;; We create a new lexycal scope for clearly delimit the result of
;; executing this update file operation and all its side effects
(let [file (px/invoke! executor
(fn []
;; Process the file data on separated thread for avoid to do
;; the CPU intensive operation on vthread.
(binding [cfeat/*current* features
cfeat/*previous* (:features file)]
(update-file-data! cfg file
process-changes-and-validate
changes skip-validate))))]
;; We need to preserve the original revn for the response
revn
(get file :revn)
;; We create a new lexical scope for clearly delimit the result of
;; executing this update file operation and all its side effects
file
(px/invoke! executor
(fn []
;; Process the file data on separated thread
;; for avoid to do the CPU intensive operation
;; on vthread.
(binding [cfeat/*current* features
cfeat/*previous* (:features file)]
(update-file-data! cfg file
process-changes-and-validate
changes skip-validate))))
deleted-at
(ct/plus timestamp (ct/duration {:hours 1}))]
(when-let [file (::snapshot file)]
(let [deleted-at (ct/plus timestamp (ldel/get-deletion-delay team))
label (str "internal/snapshot/" revn)]
(fsnap/create! cfg file
{:label label
:deleted-at deleted-at
:profile-id profile-id
:session-id session-id})))
(feat.fmigr/upsert-migrations! conn file)
(persist-file! cfg file)
;; Insert change (xlog) with deleted_at in a future data for
;; make them automatically eleggible for GC once they expires
@@ -260,28 +243,34 @@
:profile-id profile-id
:created-at timestamp
:updated-at timestamp
:deleted-at deleted-at
:deleted-at (if (::snapshot-data file)
(dt/plus timestamp (ldel/get-deletion-delay team))
(dt/plus timestamp (dt/duration {:hours 1})))
:file-id (:id file)
:revn (:revn file)
:version (:version file)
:features (into-array (:features file))
:features (:features file)
:label (::snapshot-label file)
:data (::snapshot-data file)
:changes (blob/encode changes)}
{::db/return-keys false})
(persist-file! cfg file)
;; Send asynchronous notifications
(send-notifications! cfg params file)
(send-notifications! cfg params file))
(with-meta {:revn revn :lagged (get-lagged-changes conn params)}
{::audit/replace-props
{:id (:id file)
:name (:name file)
:features (:features file)
:project-id (:project-id file)
:team-id (:team-id file)}}))))
(when (feat.fdata/offloaded? file)
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
(some->> (:data-ref-id file) (sto/touch-object! storage))))
(let [response {:revn (:revn file)
:lagged (get-lagged-changes conn params)}]
(vary-meta response assoc ::audit/replace-props
{:id (:id file)
:name (:name file)
:features (:features file)
:project-id (:project-id file)
:team-id (:team-id file)}))))
;: FIXME: DEPRECATED
(defn update-file!
"A public api that allows apply a transformation to a file with all context setup."
[{:keys [::db/conn] :as cfg} file-id update-fn & args]
@@ -290,42 +279,51 @@
(feat.fmigr/upsert-migrations! conn file)
(persist-file! cfg file)))
(def ^:private sql:get-file
"SELECT f.*, p.team_id
FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE f.id = ?
AND (f.deleted_at IS NULL OR
f.deleted_at > now())
FOR KEY SHARE")
(defn get-file
"Get not-decoded file, only decodes the features set."
[cfg id]
;; FIXME: lock for share
(bfc/get-file cfg id :decode? false :lock-for-update? true))
[conn id]
(let [file (db/exec-one! conn [sql:get-file id])]
(when-not file
(ex/raise :type :not-found
:code :object-not-found
:hint (format "file with id '%s' does not exists" id)))
(update file :features db/decode-pgarray #{})))
(defn persist-file!
"Function responsible of persisting already encoded file. Should be
used together with `get-file` and `update-file-data!`.
It also updates the project modified-at attr."
[{:keys [::db/conn ::timestamp] :as cfg} file]
[{:keys [::db/conn ::timestamp]} file]
(let [;; The timestamp can be nil because this function is also
;; intended to be used outside of this module
modified-at
(or timestamp (ct/now))
file
(-> file
(dissoc ::snapshot)
(assoc :modified-at modified-at)
(assoc :has-media-trimmed false))]
modified-at (or timestamp (dt/now))]
(db/update! conn :project
{:modified-at modified-at}
{:id (:project-id file)}
{::db/return-keys false})
(bfc/update-file! cfg file)))
(defn- attach-snapshot
"Attach snapshot data to the file. This should be called before the
upcoming file operations are applied to the file."
[file migrated? cfg]
(let [snapshot (if migrated? file (update file :data (partial fdata/realize cfg)))]
(assoc file ::snapshot snapshot)))
(db/update! conn :file
{:revn (:revn file)
:data (:data file)
:version (:version file)
:features (:features file)
:data-backend nil
:data-ref-id nil
:modified-at modified-at
:has-media-trimmed false}
{:id (:id file)}
{::db/return-keys false})))
(defn- update-file-data!
"Perform a file data transformation in with all update context setup.
@@ -337,35 +335,52 @@
fdata/pointer-map modified fragments."
[cfg {:keys [id] :as file} update-fn & args]
(let [file (update file :data (fn [data]
(-> data
(blob/decode)
(assoc :id id))))
libs (delay (bfc/get-resolved-file-libraries cfg file))
(binding [pmap/*tracked* (pmap/create-tracked)
pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(let [file (update file :data (fn [data]
(-> data
(blob/decode)
(assoc :id (:id file)))))
libs (delay (bfc/get-resolved-file-libraries cfg file))
need-migration?
(fmg/need-migration? file)
;; For avoid unnecesary overhead of creating multiple pointers
;; and handly internally with objects map in their worst
;; case (when probably all shapes and all pointers will be
;; readed in any case), we just realize/resolve them before
;; applying the migration to the file
file (if (fmg/need-migration? file)
(-> file
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(fmg/migrate-file libs))
file)
take-snapshot?
(take-snapshot? file)
file (apply update-fn cfg file args)
;; For avoid unnecesary overhead of creating multiple
;; pointers and handly internally with objects map in their
;; worst case (when probably all shapes and all pointers
;; will be readed in any case), we just realize/resolve them
;; before applying the migration to the file
file
(cond-> file
need-migration?
(->> (fdata/realize cfg))
;; TODO: reuse operations if file is migrated
;; TODO: move encoding to a separated thread
file (if (take-snapshot? file)
(let [tpoint (dt/tpoint)
snapshot (-> (:data file)
(feat.fdata/process-pointers deref)
(feat.fdata/process-objects (partial into {}))
(blob/encode))
elapsed (tpoint)
label (str "internal/snapshot/" (:revn file))]
need-migration?
(fmg/migrate-file libs)
(l/trc :hint "take snapshot"
:file-id (str (:id file))
:revn (:revn file)
:label label
:elapsed (dt/format-duration elapsed))
take-snapshot?
(attach-snapshot need-migration? cfg))]
(-> file
(assoc ::snapshot-data snapshot)
(assoc ::snapshot-label label)))
file)]
(bfc/encode-file cfg file))))
(apply update-fn cfg file args)))
(defn- soft-validate-file-schema!
[file]
@@ -437,11 +452,11 @@
(when (contains? cf/flags :auto-file-snapshot)
(let [freq (or (cf/get :auto-file-snapshot-every) 20)
timeout (or (cf/get :auto-file-snapshot-timeout)
(ct/duration {:hours 1}))]
(dt/duration {:hours 1}))]
(or (= 1 freq)
(zero? (mod revn freq))
(> (inst-ms (ct/diff modified-at (ct/now)))
(> (inst-ms (dt/diff modified-at (dt/now)))
(inst-ms timeout))))))
(def ^:private sql:lagged-changes
@@ -455,9 +470,8 @@
(defn- get-lagged-changes
[conn {:keys [id revn] :as params}]
(->> (db/exec! conn [sql:lagged-changes id revn])
(filter :changes)
(mapv (fn [row]
(update row :changes blob/decode)))))
(map files/decode-row)
(vec)))
(defn- send-notifications!
[cfg {:keys [team changes session-id] :as params} file]
@@ -482,5 +496,5 @@
:file-id (:id file)
:session-id session-id
:revn (:revn file)
:modified-at (ct/now)
:modified-at (dt/now)
:changes lchanges}))))

View File

@@ -9,7 +9,6 @@
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.db.sql :as-alias sql]
@@ -27,6 +26,7 @@
[app.rpc.quotes :as quotes]
[app.storage :as sto]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[promesa.exec :as px]))
@@ -124,7 +124,7 @@
content (-> (sto/content resource)
(sto/wrap-with-hash hash))]
{::sto/content content
::sto/touched-at (ct/now)
::sto/touched-at (dt/now)
::sto/deduplicate? true
:content-type mtype
:bucket "team-font-variant"})))
@@ -217,7 +217,7 @@
{::sql/for-update true})
delay (ldel/get-deletion-delay team)
tnow (ct/in-future delay)]
tnow (dt/in-future delay)]
(teams/check-edition-permissions! (:permissions team))
@@ -261,7 +261,7 @@
(teams/check-edition-permissions! (:permissions team))
(db/update! conn :team-font-variant
{:deleted-at (ct/in-future delay)}
{:deleted-at (dt/in-future delay)}
{:id (:id variant)}
{::db/return-keys false})

View File

@@ -13,7 +13,6 @@
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -29,6 +28,7 @@
[app.setup.templates :as tmpl]
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[promesa.exec :as px]))
@@ -104,7 +104,7 @@
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(binding [bfc/*state* (volatile! {:index {file-id (uuid/next)}})]
(duplicate-file (assoc cfg ::bfc/timestamp (ct/now))
(duplicate-file (assoc cfg ::bfc/timestamp (dt/now))
(-> params
(assoc :profile-id profile-id)
(assoc :reset-shared-flag true)))))))
@@ -164,7 +164,7 @@
(db/tx-run! cfg (fn [cfg]
;; Defer all constraints
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
(-> (assoc cfg ::bfc/timestamp (ct/now))
(-> (assoc cfg ::bfc/timestamp (dt/now))
(duplicate-project (assoc params :profile-id profile-id))))))
(defn duplicate-team
@@ -320,7 +320,7 @@
;; trully different modification date to each file.
(px/sleep 10)
(db/update! conn :project
{:modified-at (ct/now)}
{:modified-at (dt/now)}
{:id project-id}))
nil))
@@ -425,7 +425,7 @@
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(db/update! conn :project
{:modified-at (ct/now)}
{:modified-at (dt/now)}
{:id project-id}
{::db/return-keys false})

View File

@@ -10,7 +10,6 @@
[app.common.exceptions :as ex]
[app.common.media :as cm]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -24,6 +23,7 @@
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[cuerdas.core :as str]
[datoteka.io :as io]
@@ -67,7 +67,7 @@
mobj (create-file-media-object cfg params)]
(db/update! conn :file
{:modified-at (ct/now)
{:modified-at (dt/now)
:has-media-trimmed false}
{:id file-id}
{::db/return-keys false})
@@ -192,7 +192,7 @@
mobj (create-file-media-object-from-url cfg (assoc params :profile-id profile-id))]
(db/update! pool :file
{:modified-at (ct/now)
{:modified-at (dt/now)
:has-media-trimmed false}
{:id file-id}
{::db/return-keys false})

View File

@@ -10,7 +10,6 @@
[app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.plugins :refer [schema:plugin-registry]]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -29,6 +28,7 @@
[app.storage :as sto]
[app.tokens :as tokens]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]
[cuerdas.core :as str]
[promesa.exec :as px]))
@@ -70,8 +70,8 @@
[:is-blocked {:optional true} ::sm/boolean]
[:is-demo {:optional true} ::sm/boolean]
[:is-muted {:optional true} ::sm/boolean]
[:created-at {:optional true} ::ct/inst]
[:modified-at {:optional true} ::ct/inst]
[:created-at {:optional true} ::sm/inst]
[:modified-at {:optional true} ::sm/inst]
[:default-project-id {:optional true} ::sm/uuid]
[:default-team-id {:optional true} ::sm/uuid]
[:props {:optional true} schema:props]])
@@ -352,13 +352,13 @@
[{:keys [::db/conn] :as cfg} {:keys [profile email] :as params}]
(let [token (tokens/generate (::setup/props cfg)
{:iss :change-email
:exp (ct/in-future "15m")
:exp (dt/in-future "15m")
:profile-id (:id profile)
:email email})
ptoken (tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id (:id profile)
:exp (ct/in-future {:days 30})})]
:exp (dt/in-future {:days 30})})]
(when (not= email (:email profile))
(check-profile-existence! conn params))
@@ -444,7 +444,7 @@
::db/transaction true}
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id] :as params}]
(let [teams (get-owned-teams conn profile-id)
deleted-at (ct/now)]
deleted-at (dt/now)]
;; If we found owned teams with participants, we don't allow
;; delete profile until the user properly transfer ownership or

View File

@@ -9,7 +9,6 @@
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.logical-deletion :as ldel]
@@ -22,6 +21,7 @@
[app.rpc.permissions :as perms]
[app.rpc.quotes :as quotes]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]))
;; --- Check Project Permissions
@@ -218,7 +218,7 @@
(sv/defmethod ::update-project-pin
{::doc/added "1.18"
::sm/params schema:update-project-pin
::webhooks/batch-timeout (ct/duration "5s")
::webhooks/batch-timeout (dt/duration "5s")
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
::webhooks/event? true
::db/transaction true}
@@ -257,7 +257,7 @@
[conn team project-id]
(let [delay (ldel/get-deletion-delay team)
project (db/update! conn :project
{:deleted-at (ct/in-future delay)}
{:deleted-at (dt/in-future delay)}
{:id project-id}
{::db/return-keys true})]

View File

@@ -11,7 +11,6 @@
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.team :as tt]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -31,6 +30,7 @@
[app.setup :as-alias setup]
[app.storage :as sto]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.set :as set]))
@@ -666,7 +666,7 @@
(let [delay (ldel/get-deletion-delay team)
team (db/update! conn :team
{:deleted-at (ct/in-future delay)}
{:deleted-at (dt/in-future delay)}
{:id id}
{::db/return-keys true})]

View File

@@ -6,14 +6,12 @@
(ns app.rpc.commands.teams-invitations
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.team :as types.team]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -22,6 +20,7 @@
[app.loggers.audit :as audit]
[app.main :as-alias main]
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.rpc.commands.profile :as profile]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
@@ -30,6 +29,7 @@
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.util.services :as sv]
[app.util.time :as dt]
[cuerdas.core :as str]))
;; --- Mutation: Create Team Invitation
@@ -62,7 +62,7 @@
(tokens/generate (::setup/props cfg)
{:iss :profile-identity
:profile-id profile-id
:exp (ct/in-future {:days 30})}))
:exp (dt/in-future {:days 30})}))
(def ^:private schema:create-invitation
[:map {:title "params:create-invitation"}
@@ -126,7 +126,7 @@
(teams/check-email-spam conn email true)
(let [id (uuid/next)
expire (ct/in-future "168h") ;; 7 days
expire (dt/in-future "168h") ;; 7 days
invitation (db/exec-one! conn [sql:upsert-team-invitation id
(:id team) (str/lower email)
(:id profile)
@@ -418,7 +418,7 @@
:code :insufficient-permissions))
(db/update! conn :team-invitation
{:role (name role) :updated-at (ct/now)}
{:role (name role) :updated-at (dt/now)}
{:team-id team-id :email-to (profile/clean-email email)})
nil))
@@ -471,7 +471,7 @@
(when-let [request (db/get* conn :team-access-request
{:team-id team-id
:requester-id profile-id})]
(when (ct/is-after? (:valid-until request) (ct/now))
(when (dt/is-after? (:valid-until request) (dt/now))
(ex/raise :type :validation
:code :request-already-sent
:hint "you have already made a request to join this team less than 24 hours ago"))))
@@ -487,8 +487,8 @@
"Create or update team access request for provided team and profile-id"
[conn team-id requester-id]
(check-existing-team-access-request conn team-id requester-id)
(let [valid-until (ct/in-future {:hours 24})
auto-join-until (ct/in-future {:days 7})
(let [valid-until (dt/in-future {:hours 24})
auto-join-until (dt/in-future {:days 7})
request-id (uuid/next)]
(db/exec-one! conn [sql:upsert-team-access-request
request-id team-id requester-id
@@ -499,7 +499,7 @@
"A specific method for obtain a file with name and page-id used for
team request access procediment"
[cfg file-id]
(let [file (bfc/get-file cfg file-id :migrate? false)]
(let [file (files/get-file cfg file-id :migrate? false)]
(-> file
(dissoc :data)
(dissoc :deleted-at)

View File

@@ -8,7 +8,6 @@
(:require
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.types.team :as types.team]
[app.config :as cf]
[app.db :as db]
@@ -24,7 +23,8 @@
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.tokens.spec.team-invitation :as-alias spec.team-invitation]
[app.util.services :as sv]))
[app.util.services :as sv]
[app.util.time :as dt]))
(defmulti process-token (fn [_ _ claims] (:iss claims)))
@@ -126,7 +126,7 @@
(def schema:team-invitation-claims
[:map {:title "TeamInvitationClaims"}
[:iss :keyword]
[:exp ::ct/inst]
[:exp ::dt/instant]
[:profile-id ::sm/uuid]
[:role ::types.team/role]
[:team-id ::sm/uuid]

View File

@@ -51,7 +51,7 @@
(defn- get-view-only-bundle
[{:keys [::db/conn] :as cfg} {:keys [profile-id file-id ::perms] :as params}]
(let [file (bfc/get-file cfg file-id)
(let [file (files/get-file cfg file-id)
project (db/get conn :project
{:id (:project-id file)}
@@ -81,7 +81,7 @@
libs (->> (bfc/get-file-libraries conn file-id)
(mapv (fn [{:keys [id] :as lib}]
(merge lib (bfc/get-file cfg id)))))
(merge lib (files/get-file cfg id)))))
links (->> (db/query conn :share-link {:file-id file-id})
(mapv (fn [row]

View File

@@ -9,7 +9,6 @@
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.common.uuid :as uuid]
[app.db :as db]
@@ -20,6 +19,7 @@
[app.rpc.doc :as-alias doc]
[app.rpc.permissions :as perms]
[app.util.services :as sv]
[app.util.time :as dt]
[cuerdas.core :as str]))
(defn get-webhooks-permissions
@@ -54,7 +54,7 @@
(http/req! cfg
{:method :head
:uri (str (:uri params))
:timeout (ct/duration "3s")}
:timeout (dt/duration "3s")}
{:sync? true}))]
(if (ex/exception? response)
(if-let [hint (webhooks/interpret-exception response)]

View File

@@ -10,9 +10,9 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.util.time :as dt]
[app.worker :as wrk]
[cuerdas.core :as str]))
@@ -95,7 +95,7 @@
"- Total: ~(::total params) (INCR ~(::incr params 1))\n")]
(wrk/submit! {::db/conn conn
::wrk/task :sendmail
::wrk/delay (ct/duration "30s")
::wrk/delay (dt/duration "30s")
::wrk/max-retries 4
::wrk/priority 200
::wrk/dedupe true

View File

@@ -47,7 +47,6 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as uri]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -59,6 +58,7 @@
[app.rpc.rlimit.result :as-alias lresult]
[app.util.inet :as inet]
[app.util.services :as-alias sv]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.edn :as edn]
[cuerdas.core :as str]
@@ -67,7 +67,7 @@
[promesa.exec :as px]))
(def ^:private default-timeout
(ct/duration 400))
(dt/duration 400))
(def ^:private default-options
{:codec rds/string-codec
@@ -94,10 +94,6 @@
(defmulti parse-limit (fn [[_ strategy _]] strategy))
(defmulti process-limit (fn [_ _ _ o] (::strategy o)))
(defn- ->seconds
[d]
(-> d inst-ms (/ 1000) int))
(sm/register!
{:type ::rpc/rlimit
:pred #(instance? clojure.lang.Agent %)})
@@ -119,7 +115,7 @@
[:map
[::capacity ::sm/int]
[::rate ::sm/int]
[::internal ::ct/duration]
[::internal ::dt/duration]
[::params [::sm/vec :any]]]
[:map
[::nreq ::sm/int]
@@ -161,7 +157,7 @@
(assert (valid-limit-tuple? vlimit) "expected valid limit tuple")
(if-let [[_ capacity rate interval] (re-find bucket-opts-re opts)]
(let [interval (ct/duration interval)
(let [interval (dt/duration interval)
rate (parse-long rate)
capacity (parse-long capacity)]
{::name name
@@ -170,7 +166,7 @@
::rate rate
::interval interval
::opts opts
::params [(->seconds interval) rate capacity]
::params [(dt/->seconds interval) rate capacity]
::key (str "ratelimit.bucket." (d/name name))})
(ex/raise :type :validation
:code :invalid-bucket-limit-opts
@@ -180,7 +176,7 @@
[redis user-id now {:keys [::key ::params ::service ::capacity ::interval ::rate] :as limit}]
(let [script (-> bucket-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." user-id)])
(assoc ::rscript/vals (conj params (->seconds now))))
(assoc ::rscript/vals (conj params (dt/->seconds now))))
result (rds/eval redis script)
allowed? (boolean (nth result 0))
remaining (nth result 1)
@@ -195,16 +191,16 @@
:remaining remaining)
(-> limit
(assoc ::lresult/allowed allowed?)
(assoc ::lresult/reset (ct/plus now reset))
(assoc ::lresult/reset (dt/plus now reset))
(assoc ::lresult/remaining remaining))))
(defmethod process-limit :window
[redis user-id now {:keys [::nreq ::unit ::key ::service] :as limit}]
(let [ts (ct/truncate now unit)
ttl (ct/diff now (ct/plus ts {unit 1}))
(let [ts (dt/truncate now unit)
ttl (dt/diff now (dt/plus ts {unit 1}))
script (-> window-rate-limit-script
(assoc ::rscript/keys [(str key "." service "." user-id "." (ct/format-inst ts))])
(assoc ::rscript/vals [nreq (->seconds ttl)]))
(assoc ::rscript/keys [(str key "." service "." user-id "." (dt/format-instant ts))])
(assoc ::rscript/vals [nreq (dt/->seconds ttl)]))
result (rds/eval redis script)
allowed? (boolean (nth result 0))
remaining (nth result 1)]
@@ -218,7 +214,7 @@
(-> limit
(assoc ::lresult/allowed allowed?)
(assoc ::lresult/remaining remaining)
(assoc ::lresult/reset (ct/plus ts {unit 1})))))
(assoc ::lresult/reset (dt/plus ts {unit 1})))))
(defn- process-limits!
[redis user-id limits now]
@@ -227,7 +223,7 @@
(d/index-by ::name ::lresult/remaining)
(uri/map->query-string))
reset (->> results
(d/index-by ::name (comp ->seconds ::lresult/reset))
(d/index-by ::name (comp dt/->seconds ::lresult/reset))
(uri/map->query-string))
rejected (d/seek (complement ::lresult/allowed) results)]
@@ -265,7 +261,7 @@
(let [redis (rds/get-or-connect redis ::rpc/rlimit default-options)
uid (get-uid params)
;; FIXME: why not clasic try/catch?
result (ex/try! (process-limits! redis uid limits (ct/now)))]
result (ex/try! (process-limits! redis uid limits (dt/now)))]
(l/trc :hint "process-limits"
:service sname
@@ -325,7 +321,7 @@
(sm/check-fn schema:config))
(def ^:private check-refresh
(sm/check-fn ::ct/duration))
(sm/check-fn ::dt/duration))
(def ^:private check-limits
(sm/check-fn schema:limits))
@@ -355,7 +351,7 @@
config)))]
(when-let [config (some->> path slurp edn/read-string check-config)]
(let [refresh (->> config meta :refresh ct/duration check-refresh)
(let [refresh (->> config meta :refresh dt/duration check-refresh)
limits (->> config compile-pass-1 compile-pass-2 check-limits)]
{::refresh refresh
@@ -414,7 +410,7 @@
(l/info :hint "initializing rlimit config reader" :path (str path))
;; Initialize the state with initial refresh value
(send-via executor state (constantly {::refresh (ct/duration "5s")}))
(send-via executor state (constantly {::refresh (dt/duration "5s")}))
;; Force a refresh
(refresh-config (assoc cfg ::path path ::state state)))

View File

@@ -11,11 +11,11 @@
[app.common.exceptions :as ex]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.time :as ct]
[app.config :as cf]
[app.srepl.cli :as cli]
[app.srepl.main]
[app.util.locks :as locks]
[app.util.time :as dt]
[clojure.core :as c]
[clojure.core.server :as ccs]
[clojure.main :as cm]
@@ -77,7 +77,7 @@
(loop []
(when (try
(let [data (read-line)
tpoint (ct/tpoint)]
tpoint (dt/tpoint)]
(l/dbg :hint "received" :data (if (= data ::eof) "EOF" data))

View File

@@ -10,14 +10,13 @@
[app.auth :as auth]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.schema.generators :as sg]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc.commands.auth :as cmd.auth]
[app.rpc.commands.profile :as cmd.profile]
[app.setup :as-alias setup]
[app.tokens :as tokens]
[app.util.time :as dt]
[cuerdas.core :as str]))
(defn coercer
@@ -102,7 +101,7 @@
(fn [{:keys [::db/conn] :as system}]
(let [res (if soft
(db/update! conn :profile
{:deleted-at (ct/now)}
{:deleted-at (dt/now)}
{:email email :deleted-at nil})
(db/delete! conn :profile
{:email email}))]
@@ -174,21 +173,6 @@
:num-editors (get-customer-slots system id)
:subscription (get props :subscription)})))
(def ^:private schema:timestamp
(sm/type-schema
{:type ::timestamp
:pred ct/inst?
:type-properties
{:title "inst"
:description "The same as :app.common.time/inst but encodes to epoch"
:error/message "should be an instant"
:gen/gen (->> (sg/small-int)
(sg/fmap (fn [v] (ct/inst v))))
:decode/string ct/inst
:encode/string inst-ms
:decode/json ct/inst
:encode/json inst-ms}}))
(def ^:private schema:customer-subscription
[:map {:title "CustomerSubscription"}
[:id ::sm/text]
@@ -214,15 +198,15 @@
"year"]]
[:quantity :int]
[:description [:maybe ::sm/text]]
[:created-at schema:timestamp]
[:start-date [:maybe schema:timestamp]]
[:ended-at [:maybe schema:timestamp]]
[:trial-end [:maybe schema:timestamp]]
[:trial-start [:maybe schema:timestamp]]
[:cancel-at [:maybe schema:timestamp]]
[:canceled-at [:maybe schema:timestamp]]
[:current-period-end [:maybe schema:timestamp]]
[:current-period-start [:maybe schema:timestamp]]
[:created-at ::sm/timestamp]
[:start-date [:maybe ::sm/timestamp]]
[:ended-at [:maybe ::sm/timestamp]]
[:trial-end [:maybe ::sm/timestamp]]
[:trial-start [:maybe ::sm/timestamp]]
[:cancel-at [:maybe ::sm/timestamp]]
[:canceled-at [:maybe ::sm/timestamp]]
[:current-period-end [:maybe ::sm/timestamp]]
[:current-period-start [:maybe ::sm/timestamp]]
[:cancel-at-period-end :boolean]
[:cancellation-details

View File

@@ -12,10 +12,11 @@
[app.common.data :as d]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as cfv]
[app.common.time :as ct]
[app.db :as db]
[app.features.file-snapshots :as fsnap]
[app.main :as main]))
[app.main :as main]
[app.rpc.commands.files :as files]
[app.rpc.commands.files-snapshot :as fsnap]
[app.util.time :as dt]))
(def ^:dynamic *system* nil)
@@ -47,7 +48,7 @@
([system id]
(db/run! system
(fn [system]
(bfc/get-file system id :decode? false)))))
(files/get-file system id :migrate? false)))))
(defn update-team!
[system {:keys [id] :as team}]
@@ -117,10 +118,10 @@
(let [conn (db/get-connection system)]
(->> (get-and-lock-team-files conn team-id)
(reduce (fn [result file-id]
(let [file (bfc/get-file system file-id :realize? true :lock-for-update? true)]
(fsnap/create! system file
{:label label
:created-by "admin"})
(let [file (fsnap/get-file-snapshots system file-id)]
(fsnap/create-file-snapshot! system file
{:label label
:created-by :admin})
(inc result)))
0))))
@@ -131,23 +132,21 @@
(into #{}))
snap (search-file-snapshots conn ids label)
ids' (into #{} (map :file-id) snap)]
(when (not= ids ids')
(throw (RuntimeException. "no uniform snapshot available")))
(reduce (fn [result {:keys [file-id id]}]
(fsnap/restore! system file-id id)
(fsnap/restore-file-snapshot! system file-id id)
(inc result))
0
snap)))
(defn process-file!
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
(let [file (bfc/get-file system file-id
:lock-for-update? true
:realize? true)
(let [file (bfc/get-file system file-id ::db/for-update true)
libs (when with-libraries?
(bfc/get-resolved-file-libraries system file))
@@ -164,10 +163,10 @@
(cfv/validate-file-schema! file'))
(when (string? label)
(fsnap/create! system file
{:label label
:deleted-at (ct/in-future {:days 30})
:created-by "admin"}))
(fsnap/create-file-snapshot! system file
{:label label
:deleted-at (dt/in-future {:days 30})
:created-by :admin}))
(let [file' (update file' :revn inc)]
(bfc/update-file! system file')

View File

@@ -19,18 +19,17 @@
[app.common.pprint :as p]
[app.common.schema :as sm]
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as-alias sql]
[app.features.fdata :as fdata]
[app.features.file-snapshots :as fsnap]
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as audit]
[app.main :as main]
[app.msgbus :as mbus]
[app.rpc.commands.auth :as auth]
[app.rpc.commands.files :as files]
[app.rpc.commands.files-snapshot :as fsnap]
[app.rpc.commands.management :as mgmt]
[app.rpc.commands.profile :as profile]
[app.rpc.commands.projects :as projects]
@@ -39,6 +38,7 @@
[app.srepl.helpers :as h]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[app.worker :as wrk]
[clojure.java.io :as io]
[clojure.pprint :refer [print-table]]
@@ -150,15 +150,15 @@
(defn enable-objects-map-feature-on-file!
[file-id & {:as opts}]
(process-file! file-id fdata/enable-objects-map opts))
(process-file! file-id feat.fdata/enable-objects-map opts))
(defn enable-pointer-map-feature-on-file!
[file-id & {:as opts}]
(process-file! file-id fdata/enable-pointer-map opts))
(process-file! file-id feat.fdata/enable-pointer-map opts))
(defn enable-path-data-feature-on-file!
[file-id & {:as opts}]
(process-file! file-id fdata/enable-path-data opts))
(process-file! file-id feat.fdata/enable-path-data opts))
(defn enable-storage-features-on-file!
[file-id & {:as opts}]
@@ -338,10 +338,7 @@
collectable file-changes entry."
[& {:keys [file-id label]}]
(let [file-id (h/parse-uuid file-id)]
(db/tx-run! main/system
(fn [cfg]
(let [file (bfc/get-file cfg file-id :realize? true)]
(fsnap/create! cfg file {:label label :created-by "admin"}))))))
(db/tx-run! main/system fsnap/create-file-snapshot! {:file-id file-id :label label})))
(defn restore-file-snapshot!
[file-id & {:keys [label id]}]
@@ -351,13 +348,13 @@
(fn [{:keys [::db/conn] :as system}]
(cond
(uuid? snapshot-id)
(fsnap/restore! system file-id snapshot-id)
(fsnap/restore-file-snapshot! system file-id snapshot-id)
(string? label)
(->> (h/search-file-snapshots conn #{file-id} label)
(map :id)
(first)
(fsnap/restore! system file-id))
(fsnap/restore-file-snapshot! system file-id))
:else
(throw (ex-info "snapshot id or label should be provided" {})))))))
@@ -366,9 +363,9 @@
[file-id & {:as _}]
(let [file-id (h/parse-uuid file-id)]
(db/tx-run! main/system
(fn [cfg]
(->> (fsnap/get-visible-snapshots cfg file-id)
(print-table [:label :id :revn :created-at :created-by]))))))
(fn [{:keys [::db/conn]}]
(->> (fsnap/get-file-snapshots conn file-id)
(print-table [:label :id :revn :created-at]))))))
(defn take-team-snapshot!
[team-id & {:keys [label rollback?] :or {rollback? true}}]
@@ -479,7 +476,7 @@
:max-jobs max-jobs
:max-items max-items)
(let [tpoint (ct/tpoint)
(let [tpoint (dt/tpoint)
factory (px/thread-factory :virtual false :prefix "penpot/file-process/")
executor (px/cached-executor :factory factory)
sjobs (ps/create :permits max-jobs)
@@ -509,7 +506,7 @@
(Thread/sleep (int pause)))
(ps/release! sjobs)
(let [elapsed (ct/format-duration (tpoint))]
(let [elapsed (dt/format-duration (tpoint))]
(l/trc :hint "process:file:end"
:tid thread-id
:file-id (str file-id)
@@ -519,7 +516,7 @@
process-file*
(fn [idx file-id]
(ps/acquire! sjobs)
(px/run! executor (partial process-file file-id idx (ct/tpoint)))
(px/run! executor (partial process-file file-id idx (dt/tpoint)))
(inc idx))
process-files
@@ -545,73 +542,11 @@
(l/dbg :hint "process:error" :cause cause))
(finally
(let [elapsed (ct/format-duration (tpoint))]
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "process:end"
:rollback rollback?
:elapsed elapsed))))))
(defn process!
"Apply a function to all files in the database"
[& {:keys [max-jobs
rollback?
max-items
chunk-size
proc-fn]
:or {max-items Long/MAX_VALUE
chunk-size 100
rollback? true}
:as opts}]
(let [tpoint (ct/tpoint)
max-jobs (or max-jobs (px/get-available-processors))
processed (atom 0)
opts (-> opts
(assoc :chunk-size chunk-size)
(dissoc :rollback?)
(dissoc :proc-fn)
(dissoc :max-jobs)
(dissoc :max-items))
start-job
(fn [jid]
(l/dbg :hint "start job thread" :jid jid)
(px/sleep 1000)
(loop []
(let [result (-> main/system
(assoc ::db/rollback rollback?)
(proc-fn opts))]
(let [total (swap! processed + result)]
(l/dbg :hint "chunk processed" :jid jid :total total :chunk result ::l/sync? true)
(when (and (pos? result)
(< total max-items))
(recur))))))]
(l/dbg :hint "process:start"
:rollback rollback?
:max-jobs max-jobs
:max-items max-items)
(try
(let [jobs (->> (range max-jobs)
(map (fn [jid] (px/fn->thread (partial start-job jid))))
(doall))]
(doseq [job jobs]
(.join ^java.lang.Thread job)))
(catch Throwable cause
(l/dbg :hint "process:error" :cause cause))
(finally
(let [elapsed (ct/format-duration (tpoint))]
(l/dbg :hint "process:end"
:processed @processed
:rollback rollback?
:elapsed elapsed))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DELETE/RESTORE OBJECTS (WITH CASCADE, SOFT)
@@ -621,7 +556,7 @@
"Mark a project for deletion"
[file-id]
(let [file-id (h/parse-uuid file-id)
tnow (ct/now)]
tnow (dt/now)]
(audit/insert! main/system
{::audit/name "delete-file"
@@ -671,10 +606,11 @@
(let [file-id (h/parse-uuid file-id)]
(db/tx-run! main/system
(fn [system]
(when-let [file (db/get* system :file
{:id file-id}
{::db/remove-deleted false
::sql/columns [:id :name]})]
(when-let [file (some-> (db/get* system :file
{:id file-id}
{::db/remove-deleted false
::sql/columns [:id :name]})
(files/decode-row))]
(audit/insert! system
{::audit/name "restore-file"
::audit/type "action"
@@ -682,7 +618,7 @@
::audit/props file
::audit/context {:triggered-by "srepl"
:cause "explicit call to restore-file!"}
::audit/tracked-at (ct/now)})
::audit/tracked-at (dt/now)})
(restore-file* system file-id))))))
@@ -690,7 +626,7 @@
"Mark a project for deletion"
[project-id]
(let [project-id (h/parse-uuid project-id)
tnow (ct/now)]
tnow (dt/now)]
(audit/insert! main/system
{::audit/name "delete-project"
@@ -737,7 +673,7 @@
::audit/props project
::audit/context {:triggered-by "srepl"
:cause "explicit call to restore-team!"}
::audit/tracked-at (ct/now)})
::audit/tracked-at (dt/now)})
(restore-project* system project-id))))))
@@ -745,7 +681,7 @@
"Mark a team for deletion"
[team-id]
(let [team-id (h/parse-uuid team-id)
tnow (ct/now)]
tnow (dt/now)]
(audit/insert! main/system
{::audit/name "delete-team"
@@ -797,7 +733,7 @@
::audit/props team
::audit/context {:triggered-by "srepl"
:cause "explicit call to restore-team!"}
::audit/tracked-at (ct/now)})
::audit/tracked-at (dt/now)})
(restore-team* system team-id))))))
@@ -805,7 +741,7 @@
"Mark a profile for deletion."
[profile-id]
(let [profile-id (h/parse-uuid profile-id)
tnow (ct/now)]
tnow (dt/now)]
(audit/insert! main/system
{::audit/name "delete-profile"
@@ -839,7 +775,7 @@
::audit/props (audit/profile->props profile)
::audit/context {:triggered-by "srepl"
:cause "explicit call to restore-profile!"}
::audit/tracked-at (ct/now)})
::audit/tracked-at (dt/now)})
(db/update! system :profile
{:deleted-at nil}
@@ -885,7 +821,7 @@
{:deleted deleted :total total})))]
(let [path (fs/path path)
deleted-at (ct/minus (ct/now) (cf/get-deletion-delay))]
deleted-at (dt/minus (dt/now) (cf/get-deletion-delay))]
(when-not (fs/exists? path)
(throw (ex-info "path does not exists" {:path path})))
@@ -895,19 +831,6 @@
(with-open [reader (io/reader path)]
(process-data! system deleted-at (line-seq reader))))))))
(defn process-chunks
"A generic function that executes the specified proc iterativelly
until 0 results is returned"
[cfg proc-fn & params]
(loop [total 0]
(let [result (apply proc-fn cfg params)]
(if (pos? result)
(do
(l/trc :hint "chunk processed" :size result :total total)
(recur (+ total result)))
total))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CASCADE FIXING
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -982,7 +905,7 @@
(db/tx-run! main/system
(fn [{:keys [::db/conn] :as cfg}]
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(let [team (-> (assoc cfg ::bfc/timestamp (ct/now))
(let [team (-> (assoc cfg ::bfc/timestamp (dt/now))
(mgmt/duplicate-team :team-id team-id :name name))
rels (db/query conn :team-profile-rel {:team-id team-id})]

View File

@@ -12,13 +12,13 @@
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.storage.fs :as sfs]
[app.storage.impl :as impl]
[app.storage.s3 :as ss3]
[app.util.time :as dt]
[cuerdas.core :as str]
[datoteka.fs :as fs]
[integrant.core :as ig])
@@ -113,13 +113,16 @@
(defn- create-database-object
[{:keys [::backend ::db/connectable]} {:keys [::content ::expired-at ::touched-at ::touch] :as params}]
(let [id (or (::id params) (uuid/random))
(let [id (or (:id params) (uuid/random))
mdata (cond-> (get-metadata params)
(satisfies? impl/IContentHash content)
(assoc :hash (impl/get-hash content)))
(assoc :hash (impl/get-hash content))
:always
(dissoc :id))
touched-at (if touch
(or touched-at (ct/now))
(or touched-at (dt/now))
touched-at)
;; NOTE: for now we don't reuse the deleted objects, but in
@@ -221,7 +224,7 @@
(assert (valid-storage? storage))
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)]
(-> (db/update! connectable :storage-object
{:touched-at (ct/now)}
{:touched-at (dt/now)}
{:id id})
(db/get-update-count)
(pos?))))
@@ -232,7 +235,7 @@
[storage object]
(assert (valid-storage? storage))
(when (or (nil? (:expired-at object))
(ct/is-after? (:expired-at object) (ct/now)))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (impl/resolve-backend storage (:backend object))
(impl/get-object-data object))))
@@ -241,7 +244,7 @@
[storage object]
(assert (valid-storage? storage))
(when (or (nil? (:expired-at object))
(ct/is-after? (:expired-at object) (ct/now)))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (impl/resolve-backend storage (:backend object))
(impl/get-object-bytes object))))
@@ -251,7 +254,7 @@
([storage object options]
(assert (valid-storage? storage))
(when (or (nil? (:expired-at object))
(ct/is-after? (:expired-at object) (ct/now)))
(dt/is-after? (:expired-at object) (dt/now)))
(-> (impl/resolve-backend storage (:backend object))
(impl/get-object-url object options)))))
@@ -263,7 +266,7 @@
(let [backend (impl/resolve-backend storage (:backend object))]
(when (and (= :fs (::type backend))
(or (nil? (:expired-at object))
(ct/is-after? (:expired-at object) (ct/now))))
(dt/is-after? (:expired-at object) (dt/now))))
(-> (impl/get-object-url backend object nil) file-url->path))))
(defn del-object!
@@ -271,7 +274,7 @@
(assert (valid-storage? storage))
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
res (db/update! connectable :storage-object
{:deleted-at (ct/now)}
{:deleted-at (dt/now)}
{:id id})]
(pos? (db/get-update-count res))))

View File

@@ -15,10 +15,10 @@
(:require
[app.common.data :as d]
[app.common.logging :as l]
[app.common.time :as ct]
[app.db :as db]
[app.storage :as sto]
[app.storage.impl :as impl]
[app.util.time :as dt]
[integrant.core :as ig]))
(def ^:private sql:lock-sobjects
@@ -106,18 +106,18 @@
(defmethod ig/expand-key ::handler
[k v]
{k (assoc v ::min-age (ct/duration {:hours 2}))})
{k (assoc v ::min-age (dt/duration {:hours 2}))})
(defmethod ig/init-key ::handler
[_ {:keys [::min-age] :as cfg}]
(fn [{:keys [props] :as task}]
(let [min-age (ct/duration (or (:min-age props) min-age))]
(let [min-age (dt/duration (or (:min-age props) min-age))]
(db/tx-run! cfg (fn [cfg]
(let [cfg (assoc cfg ::min-age min-age)
total (clean-deleted! cfg)]
(l/inf :hint "task finished"
:min-age (ct/format-duration min-age)
:min-age (dt/format-duration min-age)
:total total)
{:deleted total}))))))

View File

@@ -34,7 +34,7 @@
(SELECT EXISTS (SELECT 1 FROM team_font_variant WHERE ttf_file_id = ?))) AS has_refs")
(defn- has-team-font-variant-refs?
[conn {:keys [id]}]
[conn id]
(-> (db/exec-one! conn [sql:has-team-font-variant-refs id id id id])
(get :has-refs)))
@@ -44,7 +44,7 @@
(SELECT EXISTS (SELECT 1 FROM file_media_object WHERE thumbnail_id = ?))) AS has_refs")
(defn- has-file-media-object-refs?
[conn {:keys [id]}]
[conn id]
(-> (db/exec-one! conn [sql:has-file-media-object-refs id id])
(get :has-refs)))
@@ -53,7 +53,7 @@
(SELECT EXISTS (SELECT 1 FROM team WHERE photo_id = ?))) AS has_refs")
(defn- has-profile-refs?
[conn {:keys [id]}]
[conn id]
(-> (db/exec-one! conn [sql:has-profile-refs id id])
(get :has-refs)))
@@ -62,7 +62,7 @@
"SELECT EXISTS (SELECT 1 FROM file_tagged_object_thumbnail WHERE media_id = ?) AS has_refs")
(defn- has-file-object-thumbnails-refs?
[conn {:keys [id]}]
[conn id]
(-> (db/exec-one! conn [sql:has-file-object-thumbnail-refs id])
(get :has-refs)))
@@ -71,23 +71,36 @@
"SELECT EXISTS (SELECT 1 FROM file_thumbnail WHERE media_id = ?) AS has_refs")
(defn- has-file-thumbnails-refs?
[conn {:keys [id]}]
[conn id]
(-> (db/exec-one! conn [sql:has-file-thumbnail-refs id])
(get :has-refs)))
(def sql:exists-file-data-refs
"SELECT EXISTS (
SELECT 1 FROM file_data
WHERE file_id = ?
AND id = ?
AND metadata->>'storage-ref-id' = ?::text
) AS has_refs")
(def ^:private
sql:has-file-data-refs
"SELECT EXISTS (SELECT 1 FROM file WHERE data_ref_id = ?) AS has_refs")
(defn- has-file-data-refs?
[conn sobject]
(let [{:keys [file-id id]} (:metadata sobject)]
(-> (db/exec-one! conn [sql:exists-file-data-refs file-id id (:id sobject)])
(get :has-refs))))
[conn id]
(-> (db/exec-one! conn [sql:has-file-data-refs id])
(get :has-refs)))
(def ^:private
sql:has-file-data-fragment-refs
"SELECT EXISTS (SELECT 1 FROM file_data_fragment WHERE data_ref_id = ?) AS has_refs")
(defn- has-file-data-fragment-refs?
[conn id]
(-> (db/exec-one! conn [sql:has-file-data-fragment-refs id])
(get :has-refs)))
(def ^:private
sql:has-file-change-refs
"SELECT EXISTS (SELECT 1 FROM file_change WHERE data_ref_id = ?) AS has_refs")
(defn- has-file-change-refs?
[conn id]
(-> (db/exec-one! conn [sql:has-file-change-refs id])
(get :has-refs)))
(def ^:private sql:mark-freeze-in-bulk
"UPDATE storage_object
@@ -130,50 +143,52 @@
"file-media-object"))
(defn- process-objects!
[conn has-refs? bucket objects]
[conn has-refs? ids bucket]
(loop [to-freeze #{}
to-delete #{}
objects (seq objects)]
(if-let [{:keys [id] :as object} (first objects)]
(if (has-refs? conn object)
ids (seq ids)]
(if-let [id (first ids)]
(if (has-refs? conn id)
(do
(l/debug :hint "processing object"
:id (str id)
:status "freeze"
:bucket bucket)
(recur (conj to-freeze id) to-delete (rest objects)))
(recur (conj to-freeze id) to-delete (rest ids)))
(do
(l/debug :hint "processing object"
:id (str id)
:status "delete"
:bucket bucket)
(recur to-freeze (conj to-delete id) (rest objects))))
(recur to-freeze (conj to-delete id) (rest ids))))
(do
(some->> (seq to-freeze) (mark-freeze-in-bulk! conn))
(some->> (seq to-delete) (mark-delete-in-bulk! conn))
[(count to-freeze) (count to-delete)]))))
(defn- process-bucket!
[conn bucket objects]
[conn bucket ids]
(case bucket
"file-media-object" (process-objects! conn has-file-media-object-refs? bucket objects)
"team-font-variant" (process-objects! conn has-team-font-variant-refs? bucket objects)
"file-object-thumbnail" (process-objects! conn has-file-object-thumbnails-refs? bucket objects)
"file-thumbnail" (process-objects! conn has-file-thumbnails-refs? bucket objects)
"profile" (process-objects! conn has-profile-refs? bucket objects)
"file-data" (process-objects! conn has-file-data-refs? bucket objects)
"file-media-object" (process-objects! conn has-file-media-object-refs? ids bucket)
"team-font-variant" (process-objects! conn has-team-font-variant-refs? ids bucket)
"file-object-thumbnail" (process-objects! conn has-file-object-thumbnails-refs? ids bucket)
"file-thumbnail" (process-objects! conn has-file-thumbnails-refs? ids bucket)
"profile" (process-objects! conn has-profile-refs? ids bucket)
"file-data" (process-objects! conn has-file-data-refs? ids bucket)
"file-data-fragment" (process-objects! conn has-file-data-fragment-refs? ids bucket)
"file-change" (process-objects! conn has-file-change-refs? ids bucket)
(ex/raise :type :internal
:code :unexpected-unknown-reference
:hint (dm/fmt "unknown reference '%'" bucket))))
(defn process-chunk!
[{:keys [::db/conn]} chunk]
(reduce-kv (fn [[nfo ndo] bucket objects]
(let [[nfo' ndo'] (process-bucket! conn bucket objects)]
(reduce-kv (fn [[nfo ndo] bucket ids]
(let [[nfo' ndo'] (process-bucket! conn bucket ids)]
[(+ nfo nfo')
(+ ndo ndo')]))
[0 0]
(d/group-by lookup-bucket identity #{} chunk)))
(d/group-by lookup-bucket :id #{} chunk)))
(def ^:private
sql:get-touched-storage-objects

View File

@@ -12,11 +12,11 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uri :as u]
[app.storage :as-alias sto]
[app.storage.impl :as impl]
[app.storage.tmp :as tmp]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.java.io :as io]
[datoteka.fs :as fs]
@@ -69,7 +69,7 @@
20000)
(def default-timeout
(ct/duration {:seconds 30}))
(dt/duration {:seconds 30}))
(declare put-object)
(declare get-object-bytes)
@@ -338,11 +338,11 @@
(p/fmap #(.asByteArray ^ResponseBytes %)))))
(def default-max-age
(ct/duration {:minutes 10}))
(dt/duration {:minutes 10}))
(defn- get-object-url
[{:keys [::presigner ::bucket ::prefix]} {:keys [id]} {:keys [max-age] :or {max-age default-max-age}}]
(assert (ct/duration? max-age) "expected valid duration instance")
(assert (dt/duration? max-age) "expected valid duration instance")
(let [gor (.. (GetObjectRequest/builder)
(bucket bucket)

View File

@@ -12,8 +12,8 @@
(:require
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.util.time :as dt]
[app.worker :as wrk]
[datoteka.fs :as fs]
[datoteka.io :as io]
@@ -38,7 +38,7 @@
(defmethod ig/expand-key ::cleaner
[k v]
{k (assoc v ::min-age (ct/duration "60m"))})
{k (assoc v ::min-age (dt/duration "60m"))})
(defmethod ig/init-key ::cleaner
[_ cfg]
@@ -52,13 +52,13 @@
(defn- io-loop
[{:keys [::min-age] :as cfg}]
(l/inf :hint "started tmp cleaner" :default-min-age (ct/format-duration min-age))
(l/inf :hint "started tmp cleaner" :default-min-age (dt/format-duration min-age))
(try
(loop []
(when-let [[path min-age'] (sp/take! queue)]
(let [min-age (or min-age' min-age)]
(l/dbg :hint "schedule tempfile deletion" :path path
:expires-at (ct/plus (ct/now) min-age))
:expires-at (dt/plus (dt/now) min-age))
(px/schedule! (inst-ms min-age) (partial remove-temp-file cfg path))
(recur))))
(catch InterruptedException _
@@ -87,7 +87,7 @@
path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))
path (Files/createFile path attrs)]
(fs/delete-on-exit! path)
(sp/offer! queue [path (some-> min-age ct/duration)])
(sp/offer! queue [path (some-> min-age dt/duration)])
path))
(defn tempfile-from

View File

@@ -8,10 +8,10 @@
"A generic task for object deletion cascade handling"
(:require
[app.common.logging :as l]
[app.common.time :as ct]
[app.db :as db]
[app.rpc.commands.files :as files]
[app.rpc.commands.profile :as profile]
[app.util.time :as dt]
[integrant.core :as ig]))
(def ^:dynamic *team-deletion* false)
@@ -23,7 +23,7 @@
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(when-let [file (db/get* conn :file {:id id} {::db/remove-deleted false})]
(l/trc :hint "marking for deletion" :rel "file" :id (str id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
(db/update! conn :file
{:deleted-at deleted-at}
@@ -45,11 +45,6 @@
{:deleted-at deleted-at}
{:file-id id})
;; Mark file data fragment to be deleted
(db/update! conn :file-data-fragment
{:deleted-at deleted-at}
{:file-id id})
;; Mark file media objects to be deleted
(db/update! conn :file-media-object
{:deleted-at deleted-at}
@@ -67,7 +62,7 @@
(defmethod delete-object :project
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(l/trc :hint "marking for deletion" :rel "project" :id (str id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
(db/update! conn :project
{:deleted-at deleted-at}
@@ -84,7 +79,7 @@
(defmethod delete-object :team
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(l/trc :hint "marking for deletion" :rel "team" :id (str id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
(db/update! conn :team
{:deleted-at deleted-at}
{:id id}
@@ -106,7 +101,7 @@
(defmethod delete-object :profile
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
(l/trc :hint "marking for deletion" :rel "profile" :id (str id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
(db/update! conn :profile
{:deleted-at deleted-at}

View File

@@ -16,20 +16,33 @@
[app.common.files.validate :as cfv]
[app.common.logging :as l]
[app.common.thumbnails :as thc]
[app.common.time :as ct]
[app.common.types.components-list :as ctkl]
[app.common.types.file :as ctf]
[app.common.types.shape-tree :as ctt]
[app.config :as cf]
[app.db :as db]
[app.features.fdata :as feat.fdata]
[app.features.file-snapshots :as fsnap]
[app.storage :as sto]
[app.util.time :as dt]
[app.worker :as wrk]
[integrant.core :as ig]))
(declare get-file)
(def sql:get-snapshots
"SELECT fc.file_id AS id,
fc.id AS snapshot_id,
fc.data,
fc.revn,
fc.version,
fc.features,
fc.data_backend,
fc.data_ref_id
FROM file_change AS fc
WHERE fc.file_id = ?
AND fc.data IS NOT NULL
ORDER BY fc.created_at ASC")
(def ^:private sql:mark-file-media-object-deleted
"UPDATE file_media_object
SET deleted_at = now()
@@ -44,22 +57,21 @@
(defn- clean-file-media!
"Performs the garbage collection of file media objects."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [used-media
(fsnap/reduce-snapshots cfg id xf:collect-used-media conj #{})
(let [xform (comp
(map (partial bfc/decode-file cfg))
xf:collect-used-media)
used-media
(into used-media xf:collect-used-media [file])
used (->> (db/plan conn [sql:get-snapshots id] {:fetch-size 1})
(transduce xform conj #{}))
used (into used xf:collect-used-media [file])
used-media
(db/create-array conn "uuid" used-media)
ids (db/create-array conn "uuid" used)
unused (->> (db/exec! conn [sql:mark-file-media-object-deleted id ids])
(into #{} (map :id)))]
unused-media
(->> (db/exec! conn [sql:mark-file-media-object-deleted id used-media])
(into #{} (map :id)))]
(l/dbg :hint "clean" :rel "file-media-object" :file-id (str id) :total (count unused))
(l/dbg :hint "clean" :rel "file-media-object" :file-id (str id) :total (count unused-media))
(doseq [id unused-media]
(doseq [id unused]
(l/trc :hint "mark deleted"
:rel "file-media-object"
:id (str id)
@@ -86,7 +98,7 @@
(thc/fmt-object-id file-id page-id id "frame")
(thc/fmt-object-id file-id page-id id "component")))))))
ids (db/create-array conn "uuid" using)
ids (db/create-array conn "text" using)
unused (->> (db/exec! conn [sql:mark-file-object-thumbnails-deleted file-id ids])
(into #{} (map :object-id)))]
@@ -122,7 +134,13 @@
file))
(def ^:private sql:get-files-for-library
"SELECT f.id
"SELECT f.id,
f.data,
f.modified_at,
f.features,
f.version,
f.data_backend,
f.data_ref_id
FROM file AS f
LEFT JOIN file_library_rel AS fl ON (fl.file_id = f.id)
WHERE fl.library_file_id = ?
@@ -143,21 +161,15 @@
deleted-components
(ctkl/deleted-components-seq data)
file-xform
xform
(mapcat (partial get-used-components deleted-components file-id))
library-xform
(comp
(map :id)
(map #(bfc/get-file cfg % :realize? true :read-only? true))
file-xform)
used-remote
(->> (db/plan conn [sql:get-files-for-library file-id] {:fetch-size 1})
(transduce library-xform conj #{}))
(transduce (comp (map (partial bfc/decode-file cfg)) xform) conj #{}))
used-local
(into #{} file-xform [file])
(into #{} xform [file])
unused
(transduce bfc/xf-map-id disj
@@ -217,22 +229,34 @@
(cfv/validate-file-schema! file)
file))
(defn get-file
[cfg {:keys [file-id revn]}]
(let [file (bfc/get-file cfg file-id
:realize? true
:skip-locked? true
:lock-for-update? true)]
(def ^:private sql:get-file
"SELECT f.id,
f.data,
f.revn,
f.version,
f.features,
f.modified_at,
f.data_backend,
f.data_ref_id
FROM file AS f
WHERE f.has_media_trimmed IS false
AND f.modified_at < now() - ?::interval
AND f.deleted_at IS NULL
AND f.id = ?
FOR UPDATE
SKIP LOCKED")
;; We should ensure that the scheduled file and the procesing file
;; has not changed since schedule, for this reason we check the
;; revn from props with the revn from retrieved file from database
(when (= revn (:revn file))
file)))
(defn get-file
[{:keys [::db/conn ::min-age]} file-id]
(let [min-age (if min-age
(db/interval min-age)
(db/interval 0))]
(->> (db/exec! conn [sql:get-file min-age file-id])
(first))))
(defn- process-file!
[cfg {:keys [file-id] :as props}]
(if-let [file (get-file cfg props)]
[cfg file-id]
(if-let [file (get-file cfg file-id)]
(let [file (->> file
(bfc/decode-file cfg)
(bfl/clean-file)
@@ -243,7 +267,7 @@
true)
(do
(l/dbg :hint "skip cleaning, criteria does not match" :file-id (str file-id))
(l/dbg :hint "skip" :file-id (str file-id))
false)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -258,20 +282,26 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [{:keys [props] :as task}]
(try
(-> cfg
(assoc ::db/rollback (:rollback? props))
(db/tx-run! (fn [{:keys [::db/conn] :as cfg}]
(let [cfg (update cfg ::sto/storage sto/configure conn)
processed? (process-file! cfg props)]
(when (and processed? (contains? cf/flags :tiered-file-data-storage))
(wrk/submit! (-> cfg
(assoc ::wrk/task :offload-file-data)
(assoc ::wrk/params props)
(assoc ::wrk/priority 10)
(assoc ::wrk/delay 1000))))
processed?))))
(catch Throwable cause
(l/err :hint "error on cleaning file"
:file-id (str (:file-id props))
:cause cause)))))
(let [min-age (dt/duration (or (:min-age props)
(cf/get-deletion-delay)))
file-id (get props :file-id)
cfg (-> cfg
(assoc ::db/rollback (:rollback? props))
(assoc ::min-age min-age))]
(try
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(let [cfg (update cfg ::sto/storage sto/configure conn)
processed? (process-file! cfg file-id)]
(when (and processed? (contains? cf/flags :tiered-file-data-storage))
(wrk/submit! (-> cfg
(assoc ::wrk/task :offload-file-data)
(assoc ::wrk/params props)
(assoc ::wrk/priority 10)
(assoc ::wrk/delay 1000))))
processed?)))
(catch Throwable cause
(l/err :hint "error on cleaning file"
:file-id (str (:file-id props))
:cause cause))))))

View File

@@ -8,38 +8,38 @@
"A maintenance task that is responsible of properly scheduling the
file-gc task for all files that matches the eligibility threshold."
(:require
[app.common.time :as ct]
[app.config :as cf]
[app.db :as db]
[app.util.time :as dt]
[app.worker :as wrk]
[integrant.core :as ig]))
(def ^:private
sql:get-candidates
"SELECT f.id,
f.revn,
f.modified_at
FROM file AS f
WHERE f.has_media_trimmed IS false
AND f.modified_at < now() - ?::interval
AND f.deleted_at IS NULL
ORDER BY f.modified_at DESC
FOR UPDATE OF f
FOR UPDATE
SKIP LOCKED")
(defn- get-candidates
[{:keys [::db/conn ::min-age] :as cfg}]
(let [min-age (db/interval min-age)]
(db/plan conn [sql:get-candidates min-age] {:fetch-size 10})))
(db/cursor conn [sql:get-candidates min-age] {:chunk-size 10})))
(defn- schedule!
[cfg]
(let [total (reduce (fn [total {:keys [id modified-at revn]}]
(let [params {:file-id id :modified-at modified-at :revn revn}]
[{:keys [::min-age] :as cfg}]
(let [total (reduce (fn [total {:keys [id]}]
(let [params {:file-id id :min-age min-age}]
(wrk/submit! (assoc cfg ::wrk/params params))
(inc total)))
0
(get-candidates cfg))]
{:processed total}))
(defmethod ig/assert-key ::handler
@@ -48,12 +48,12 @@
(defmethod ig/expand-key ::handler
[k v]
{k (assoc v ::min-age (cf/get-file-clean-delay))})
{k (assoc v ::min-age (cf/get-deletion-delay))})
(defmethod ig/init-key ::handler
[_ cfg]
(fn [{:keys [props] :as task}]
(let [min-age (ct/duration (or (:min-age props) (::min-age cfg)))]
(let [min-age (dt/duration (or (:min-age props) (::min-age cfg)))]
(-> cfg
(assoc ::db/rollback (:rollback? props))
(assoc ::min-age min-age)

View File

@@ -9,10 +9,9 @@
of deleted or unreachable objects."
(:require
[app.common.logging :as l]
[app.common.time :as ct]
[app.db :as db]
[app.features.fdata :as fdata]
[app.storage :as sto]
[app.util.time :as dt]
[integrant.core :as ig]))
(def ^:private sql:get-profiles
@@ -54,7 +53,7 @@
(l/trc :hint "permanently delete"
:rel "team"
:id (str id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; Mark as deleted the storage object
(some->> photo-id (sto/touch-object! storage))
@@ -83,7 +82,7 @@
:rel "team-font-variant"
:id (str id)
:team-id (str team-id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; Mark as deleted the all related storage objects
(some->> (:woff1-file-id font) (sto/touch-object! storage))
@@ -115,7 +114,7 @@
:rel "project"
:id (str id)
:team-id (str team-id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; And finally, permanently delete the project.
(db/delete! conn :project {:id id})
@@ -124,29 +123,27 @@
0)))
(def ^:private sql:get-files
"SELECT f.id,
f.deleted_at,
f.project_id
FROM file AS f
WHERE f.deleted_at IS NOT NULL
AND f.deleted_at < now() + ?::interval
ORDER BY f.deleted_at ASC
"SELECT id, deleted_at, project_id, data_backend, data_ref_id
FROM file
WHERE deleted_at IS NOT NULL
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-files!
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
[{:keys [::db/conn ::sto/storage ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-files deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
(l/trc :hint "permanently delete"
:rel "file"
:id (str id)
:project-id (str project-id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; Delete associated file data
(fdata/delete! cfg {:file-id id :id id :type "main"})
(when (= "objects-storage" (:data-backend file))
(sto/touch-object! storage (:data-ref-id file)))
;; And finally, permanently delete the file.
(db/delete! conn :file {:id id})
@@ -172,7 +169,7 @@
:rel "file-thumbnail"
:file-id (str file-id)
:revn revn
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; Mark as deleted the storage object
(some->> media-id (sto/touch-object! storage))
@@ -201,7 +198,7 @@
:rel "file-tagged-object-thumbnail"
:file-id (str file-id)
:object-id object-id
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; Mark as deleted the storage object
(some->> media-id (sto/touch-object! storage))
@@ -212,6 +209,32 @@
(inc total))
0)))
(def ^:private sql:get-file-data-fragments
"SELECT file_id, id, deleted_at, data_ref_id
FROM file_data_fragment
WHERE deleted_at IS NOT NULL
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-data-fragments!
[{:keys [::db/conn ::sto/storage ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-data-fragments deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id id deleted-at data-ref-id]}]
(l/trc :hint "permanently delete"
:rel "file-data-fragment"
:id (str id)
:file-id (str file-id)
:deleted-at (dt/format-instant deleted-at))
(some->> data-ref-id (sto/touch-object! storage))
(db/delete! conn :file-data-fragment {:file-id file-id :id id})
(inc total))
0)))
(def ^:private sql:get-file-media-objects
"SELECT id, file_id, media_id, thumbnail_id, deleted_at
FROM file_media_object
@@ -230,7 +253,7 @@
:rel "file-media-object"
:id (str id)
:file-id (str file-id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; Mark as deleted the all related storage objects
(some->> (:media-id fmo) (sto/touch-object! storage))
@@ -241,35 +264,8 @@
(inc total))
0)))
(def ^:private sql:get-file-data-fragments
"SELECT file_id, id, deleted_at
FROM file_data_fragment
WHERE deleted_at IS NOT NULL
AND deleted_at < now() + ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-data-fragments!
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
(->> (db/plan conn [sql:get-file-data-fragments deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [file-id id deleted-at]}]
(l/trc :hint "permanently delete"
:rel "file-data-fragment"
:id (str id)
:file-id (str file-id)
:deleted-at (ct/format-inst deleted-at))
;; Delete associated file data
(fdata/delete! cfg {:file-id file-id :id id :type "fragment"})
(db/delete! conn :file-data-fragment {:file-id file-id :id id})
(inc total))
0)))
(def ^:private sql:get-file-change
"SELECT id, file_id, deleted_at
"SELECT id, file_id, deleted_at, data_backend, data_ref_id
FROM file_change
WHERE deleted_at IS NOT NULL
AND deleted_at < now() + ?::interval
@@ -279,17 +275,17 @@
SKIP LOCKED")
(defn- delete-file-changes!
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
(->> (db/plan conn [sql:get-file-change deletion-threshold chunk-size] {:fetch-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
(l/trc :hint "permanently delete"
:rel "file-change"
:id (str id)
:file-id (str file-id)
:deleted-at (ct/format-inst deleted-at))
:deleted-at (dt/format-instant deleted-at))
;; Delete associated file data, if it exists
(fdata/delete! cfg {:file-id file-id :id id :type "snapshot"})
(when (= "objects-storage" (:data-backend xlog))
(sto/touch-object! storage (:data-ref-id xlog)))
(db/delete! conn :file-change {:id id})
@@ -299,10 +295,10 @@
(def ^:private deletion-proc-vars
[#'delete-profiles!
#'delete-file-media-objects!
#'delete-file-data-fragments!
#'delete-file-object-thumbnails!
#'delete-file-thumbnails!
#'delete-file-changes!
#'delete-file-data-fragments!
#'delete-files!
#'delete-projects!
#'delete-fonts!
@@ -332,7 +328,7 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [{:keys [props] :as task}]
(let [threshold (ct/duration (get props :deletion-threshold 0))
(let [threshold (dt/duration (get props :deletion-threshold 0))
cfg (assoc cfg ::deletion-threshold (db/interval threshold))]
(loop [procs (map deref deletion-proc-vars)
total 0]

View File

@@ -8,73 +8,101 @@
"A maintenance task responsible of moving file data from hot
storage (the database row) to a cold storage (fs or s3)."
(:require
[app.binfile.common :as bfc]
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.db :as db]
[app.features.fdata :as fdata]
[app.features.file-snapshots :as fsnap]
[app.db.sql :as-alias sql]
[app.storage :as sto]
[app.util.blob :as blob]
[integrant.core :as ig]))
(defn- offload-file-data
[{:keys [::db/conn ::file-id] :as cfg}]
(let [file (bfc/get-file cfg file-id :realize? true :lock-for-update? true)]
(cond
(not= "db" (:backend file))
(l/wrn :hint (str "skiping file offload (file offloaded or incompatible with offloading) for " file-id)
:file-id (str file-id))
(defn- offload-file-data!
[{:keys [::db/conn ::sto/storage ::file-id] :as cfg}]
(let [file (db/get conn :file {:id file-id}
{::sql/for-update true})]
(when (nil? (:data file))
(ex/raise :hint "file already offloaded"
:type :internal
:code :file-already-offloaded
:file-id file-id))
(nil? (:data file))
(l/err :hint (str "skiping file offload (missing data) for " file-id)
:file-id (str file-id))
(let [data (sto/content (:data file))
sobj (sto/put-object! storage
{::sto/content data
::sto/touch true
:bucket "file-data"
:content-type "application/octet-stream"
:file-id file-id})]
:else
(do
(fdata/update! cfg {:id file-id
:file-id file-id
:type "main"
:backend "storage"
:data (blob/encode (:data file))})
(l/trc :hint "offload file data"
:file-id (str file-id)
:storage-id (str (:id sobj)))
(db/update! conn :file
{:data nil}
{:id file-id}
{::db/return-keys false})
(db/update! conn :file
{:data-backend "objects-storage"
:data-ref-id (:id sobj)
:data nil}
{:id file-id}
{::db/return-keys false}))))
(l/trc :hint "offload file data"
:file-id (str file-id))))))
(defn- offload-file-data-fragments!
[{:keys [::db/conn ::sto/storage ::file-id] :as cfg}]
(doseq [fragment (db/query conn :file-data-fragment
{:file-id file-id
:deleted-at nil
:data-backend nil}
{::db/for-update true})]
(let [data (sto/content (:data fragment))
sobj (sto/put-object! storage
{::sto/content data
::sto/touch true
:bucket "file-data-fragment"
:content-type "application/octet-stream"
:file-id file-id
:file-fragment-id (:id fragment)})]
(l/trc :hint "offload file data fragment"
:file-id (str file-id)
:file-fragment-id (str (:id fragment))
:storage-id (str (:id sobj)))
(db/update! conn :file-data-fragment
{:data-backend "objects-storage"
:data-ref-id (:id sobj)
:data nil}
{:id (:id fragment)}
{::db/return-keys false}))))
(def sql:get-snapshots
(str "WITH snapshots AS (" fsnap/sql:snapshots ")"
"SELECT s.*
FROM snapshots AS s
WHERE s.backend = 'db'
AND s.file_id = ?
ORDER BY s.created_at"))
"SELECT fc.*
FROM file_change AS fc
WHERE fc.file_id = ?
AND fc.label IS NOT NULL
AND fc.data IS NOT NULL
AND fc.data_backend IS NULL")
(defn- offload-snapshot-data
[{:keys [::db/conn ::file-id] :as cfg} snapshot]
(let [{:keys [id data] :as snapshot} (fdata/resolve-file-data cfg snapshot)]
(if (nil? (:data snapshot))
(l/err :hint (str "skiping snapshot offload (missing data) for " file-id)
(defn- offload-file-snapshots!
[{:keys [::db/conn ::sto/storage ::file-id] :as cfg}]
(doseq [snapshot (db/exec! conn [sql:get-snapshots file-id])]
(let [data (sto/content (:data snapshot))
sobj (sto/put-object! storage
{::sto/content data
::sto/touch true
:bucket "file-change"
:content-type "application/octet-stream"
:file-id file-id
:file-change-id (:id snapshot)})]
(l/trc :hint "offload file change"
:file-id (str file-id)
:snapshot-id id)
(do
(fsnap/create! cfg {:id id
:file-id file-id
:type "snapshot"
:backend "storage"
:data data})
:file-change-id (str (:id snapshot))
:storage-id (str (:id sobj)))
(l/trc :hint "offload snapshot data"
:file-id (str file-id)
:snapshot-id (str id))
(db/update! conn :file-change
{:data nil}
{:id id :file-id file-id}
{::db/return-keys false})))))
(db/update! conn :file-change
{:data-backend "objects-storage"
:data-ref-id (:id sobj)
:data nil}
{:id (:id snapshot)}
{::db/return-keys false}))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HANDLER
@@ -88,12 +116,10 @@
(defmethod ig/init-key ::handler
[_ cfg]
(fn [{:keys [props] :as task}]
(let [file-id (:file-id props)]
(-> cfg
(assoc ::db/rollback (:rollback? props))
(assoc ::file-id (:file-id props))
(db/tx-run! (fn [{:keys [::db/conn] :as cfg}]
(offload-file-data cfg)
(run! (partial offload-snapshot-data cfg)
(db/plan conn [sql:get-snapshots file-id]))))))))
(-> cfg
(assoc ::db/rollback (:rollback? props))
(assoc ::file-id (:file-id props))
(db/tx-run! (fn [cfg]
(offload-file-data! cfg)
(offload-file-data-fragments! cfg)
(offload-file-snapshots! cfg))))))

View File

@@ -10,8 +10,8 @@
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.time :as ct]
[app.common.transit :as t]
[app.util.time :as dt]
[buddy.sign.jwe :as jwe]))
(defn generate
@@ -22,7 +22,7 @@
(bytes? tokens-key))
(let [payload (-> claims
(assoc :iat (ct/now))
(assoc :iat (dt/now))
(d/without-nils)
(t/encode))]
(jwe/encrypt payload tokens-key {:alg :a256kw :enc :a256gcm})))
@@ -35,8 +35,8 @@
(defn verify
[sprops {:keys [token] :as params}]
(let [claims (decode sprops token)]
(when (and (ct/inst? (:exp claims))
(ct/is-before? (:exp claims) (ct/now)))
(when (and (dt/instant? (:exp claims))
(dt/is-before? (:exp claims) (dt/now)))
(ex/raise :type :validation
:code :invalid-token
:reason :token-expired

View File

@@ -9,7 +9,7 @@
(:refer-clojure :exclude [get])
(:require
[app.common.schema :as sm]
[app.common.time :as ct]
[app.util.time :as dt]
[promesa.exec :as px])
(:import
com.github.benmanes.caffeine.cache.AsyncCache
@@ -51,7 +51,7 @@
(let [cache (as-> (Caffeine/newBuilder) builder
(if (fn? on-remove) (.removalListener builder (create-listener on-remove)) builder)
(if executor (.executor builder ^Executor (px/resolve-executor executor)) builder)
(if keepalive (.expireAfterAccess builder ^Duration (ct/duration keepalive)) builder)
(if keepalive (.expireAfterAccess builder ^Duration (dt/duration keepalive)) builder)
(if (int? max-size) (.maximumSize builder (long max-size)) builder)
(.recordStats builder)
(.buildAsync builder))

View File

@@ -1,138 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.util.cron
(:require
[app.common.exceptions :as ex])
(:import
java.time.Instant
java.util.Date
org.apache.logging.log4j.core.util.CronExpression))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Cron Expression
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Cron expressions are comprised of 6 required fields and one
;; optional field separated by white space. The fields respectively
;; are described as follows:
;;
;; Field Name Allowed Values Allowed Special Characters
;; Seconds 0-59 , - * /
;; Minutes 0-59 , - * /
;; Hours 0-23 , - * /
;; Day-of-month 1-31 , - * ? / L W
;; Month 0-11 or JAN-DEC , - * /
;; Day-of-Week 1-7 or SUN-SAT , - * ? / L #
;; Year (Optional) empty, 1970-2199 , - * /
;;
;; The '*' character is used to specify all values. For example, "*"
;; in the minute field means "every minute".
;;
;; The '?' character is allowed for the day-of-month and day-of-week
;; fields. It is used to specify 'no specific value'. This is useful
;; when you need to specify something in one of the two fields, but
;; not the other.
;;
;; The '-' character is used to specify ranges For example "10-12" in
;; the hour field means "the hours 10, 11 and 12".
;;
;; The ',' character is used to specify additional values. For
;; example "MON,WED,FRI" in the day-of-week field means "the days
;; Monday, Wednesday, and Friday".
;;
;; The '/' character is used to specify increments. For example "0/15"
;; in the seconds field means "the seconds 0, 15, 30, and
;; 45". And "5/15" in the seconds field means "the seconds 5, 20, 35,
;; and 50". Specifying '*' before the '/' is equivalent to specifying
;; 0 is the value to start with. Essentially, for each field in the
;; expression, there is a set of numbers that can be turned on or
;; off. For seconds and minutes, the numbers range from 0 to 59. For
;; hours 0 to 23, for days of the month 0 to 31, and for months 0 to
;; 11 (JAN to DEC). The "/" character simply helps you turn on
;; every "nth" value in the given set. Thus "7/6" in the month field
;; only turns on month "7", it does NOT mean every 6th month, please
;; note that subtlety.
;;
;; The 'L' character is allowed for the day-of-month and day-of-week
;; fields. This character is short-hand for "last", but it has
;; different meaning in each of the two fields. For example, the
;; value "L" in the day-of-month field means "the last day of the
;; month" - day 31 for January, day 28 for February on non-leap
;; years. If used in the day-of-week field by itself, it simply
;; means "7" or "SAT". But if used in the day-of-week field after
;; another value, it means "the last xxx day of the month" - for
;; example "6L" means "the last friday of the month". You can also
;; specify an offset from the last day of the month, such as "L-3"
;; which would mean the third-to-last day of the calendar month. When
;; using the 'L' option, it is important not to specify lists, or
;; ranges of values, as you'll get confusing/unexpected results.
;;
;; The 'W' character is allowed for the day-of-month field. This
;; character is used to specify the weekday (Monday-Friday) nearest
;; the given day. As an example, if you were to specify "15W" as the
;; value for the day-of-month field, the meaning is: "the nearest
;; weekday to the 15th of the month". So if the 15th is a Saturday,
;; the trigger will fire on Friday the 14th. If the 15th is a Sunday,
;; the trigger will fire on Monday the 16th. If the 15th is a Tuesday,
;; then it will fire on Tuesday the 15th. However if you specify "1W"
;; as the value for day-of-month, and the 1st is a Saturday, the
;; trigger will fire on Monday the 3rd, as it will not 'jump' over the
;; boundary of a month's days. The 'W' character can only be specified
;; when the day-of-month is a single day, not a range or list of days.
;;
;; The 'L' and 'W' characters can also be combined for the
;; day-of-month expression to yield 'LW', which translates to "last
;; weekday of the month".
;;
;; The '#' character is allowed for the day-of-week field. This
;; character is used to specify "the nth" XXX day of the month. For
;; example, the value of "6#3" in the day-of-week field means the
;; third Friday of the month (day 6 = Friday and "#3" = the 3rd one in
;; the month). Other examples: "2#1" = the first Monday of the month
;; and "4#5" = the fifth Wednesday of the month. Note that if you
;; specify "#5" and there is not 5 of the given day-of-week in the
;; month, then no firing will occur that month. If the '#' character
;; is used, there can only be one expression in the day-of-week
;; field ("3#1,6#3" is not valid, since there are two expressions).
;;
;; The legal characters and the names of months and days of the week
;; are not case sensitive.
(defn cron
"Creates an instance of CronExpression from string."
[s]
(try
(CronExpression. s)
(catch java.text.ParseException e
(ex/raise :type :parse
:code :invalid-cron-expression
:cause e
:context {:expr s}))))
(defn cron-expr?
[v]
(instance? CronExpression v))
(defn next-valid-instant-from
[^CronExpression cron ^Instant now]
(assert (cron-expr? cron))
(.toInstant (.getNextValidTimeAfter cron (Date/from now))))
(defn get-next
[cron tnow]
(let [nt (next-valid-instant-from cron tnow)]
(cons nt (lazy-seq (get-next cron nt)))))
(defmethod print-method CronExpression
[o w]
(print-dup o w))
(defmethod print-dup CronExpression
[mv ^java.io.Writer writer]
;; Do not delete this comment
;; (print-ctor o (fn [o w] (print-dup (.toString ^CronExpression o) w)) w)
(.write writer (str "#penpot/cron \"" (.toString ^CronExpression mv) "\"")))

View File

@@ -37,9 +37,9 @@
(:require
[app.common.fressian :as fres]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.util.time :as dt]
[clojure.core :as c]
[clojure.data.json :as json])
(:import
@@ -61,10 +61,8 @@
(declare create)
(defn create-tracked
[& {:keys [inherit]}]
(if inherit
(atom (if *tracked* @*tracked* {}))
(atom {})))
[]
(atom {}))
(defprotocol IPointerMap
(get-id [_])
@@ -104,7 +102,7 @@
(clone [this]
(when-not loaded? (load! this))
(let [mdata (assoc mdata :created-at (ct/now))
(let [mdata (assoc mdata :created-at (dt/now))
id (uuid/next)
pmap (PointerMap. id
mdata
@@ -179,7 +177,7 @@
(let [odata' (assoc odata key val)]
(if (identical? odata odata')
this
(let [mdata (assoc mdata :created-at (ct/now))
(let [mdata (assoc mdata :created-at (dt/now))
id (if modified? id (uuid/next))
pmap (PointerMap. id
mdata
@@ -197,7 +195,7 @@
(let [odata' (dissoc odata key)]
(if (identical? odata odata')
this
(let [mdata (assoc mdata :created-at (ct/now))
(let [mdata (assoc mdata :created-at (dt/now))
id (if modified? id (uuid/next))
pmap (PointerMap. id
mdata
@@ -220,7 +218,7 @@
(defn create
([]
(let [id (uuid/next)
mdata (assoc *metadata* :created-at (ct/now))
mdata (assoc *metadata* :created-at (dt/now))
pmap (PointerMap. id mdata {} true true)]
(some-> *tracked* (swap! assoc id pmap))
pmap))
@@ -239,7 +237,7 @@
(do
(some-> *tracked* (swap! assoc (get-id data) data))
data)
(let [mdata (assoc (meta data) :created-at (ct/now))
(let [mdata (assoc (meta data) :created-at (dt/now))
id (uuid/next)
pmap (PointerMap. id
mdata

View File

@@ -0,0 +1,399 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.util.time
(:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.schema :as sm]
[app.common.schema.openapi :as-alias oapi]
[app.common.time :as common-time]
[clojure.spec.alpha :as s]
[clojure.test.check.generators :as tgen]
[cuerdas.core :as str]
[fipp.ednize :as fez])
(:import
java.nio.file.attribute.FileTime
java.time.Duration
java.time.Instant
java.time.OffsetDateTime
java.time.ZoneId
java.time.ZonedDateTime
java.time.format.DateTimeFormatter
java.time.temporal.ChronoUnit
java.time.temporal.Temporal
java.time.temporal.TemporalAmount
java.time.temporal.TemporalUnit
java.util.Date
org.apache.logging.log4j.core.util.CronExpression))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Instant & Duration
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn temporal-unit
[o]
(if (instance? TemporalUnit o)
o
(case o
:nanos ChronoUnit/NANOS
:millis ChronoUnit/MILLIS
:micros ChronoUnit/MICROS
:seconds ChronoUnit/SECONDS
:minutes ChronoUnit/MINUTES
:hours ChronoUnit/HOURS
:days ChronoUnit/DAYS)))
;; --- DURATION
(defn- obj->duration
[params]
(reduce-kv (fn [o k v]
(.plus ^Duration o ^long v ^TemporalUnit (temporal-unit k)))
(Duration/ofMillis 0)
params))
(defn duration?
[v]
(instance? Duration v))
(defn duration
[ms-or-obj]
(cond
(string? ms-or-obj)
(Duration/parse (str "PT" ms-or-obj))
(duration? ms-or-obj)
ms-or-obj
(integer? ms-or-obj)
(Duration/ofMillis ms-or-obj)
:else
(obj->duration ms-or-obj)))
(defn ->seconds
[d]
(-> d inst-ms (/ 1000) int))
(defn diff
[t1 t2]
(Duration/between t1 t2))
(defn truncate
[o unit]
(let [unit (temporal-unit unit)]
(cond
(instance? Instant o)
(.truncatedTo ^Instant o ^TemporalUnit unit)
(instance? Duration o)
(.truncatedTo ^Duration o ^TemporalUnit unit)
:else
(throw (IllegalArgumentException. "only instant and duration allowed")))))
(s/def ::duration
(s/conformer
(fn [v]
(cond
(duration? v) v
(string? v)
(try
(duration v)
(catch java.time.format.DateTimeParseException _e
::s/invalid))
:else
::s/invalid))
(fn [v]
(subs (str v) 2))))
(extend-protocol clojure.core/Inst
java.time.Duration
(inst-ms* [v] (.toMillis ^Duration v))
OffsetDateTime
(inst-ms* [v] (.toEpochMilli (.toInstant ^OffsetDateTime v)))
FileTime
(inst-ms* [v] (.toMillis ^FileTime v)))
(defmethod print-method Duration
[mv ^java.io.Writer writer]
(.write writer (str "#app/duration \"" (str/lower (subs (str mv) 2)) "\"")))
(defmethod print-dup Duration [o w]
(print-method o w))
(extend-protocol fez/IEdn
Duration
(-edn [o]
(tagged-literal 'app/duration (str o))))
(defn format-duration
[o]
(str/lower (subs (str o) 2)))
;; --- INSTANT
(defn instant?
[v]
(instance? Instant v))
(defn instant
([s]
(cond
(instant? s) s
(int? s) (Instant/ofEpochMilli s)
:else (Instant/parse s)))
([s fmt]
(case fmt
:rfc1123 (Instant/from (.parse DateTimeFormatter/RFC_1123_DATE_TIME ^String s))
:iso (Instant/from (.parse DateTimeFormatter/ISO_INSTANT ^String s))
:iso8601 (Instant/from (.parse DateTimeFormatter/ISO_INSTANT ^String s)))))
(defn is-after?
"Analgous to: da > db"
[da db]
(.isAfter ^Instant da ^Instant db))
(defn is-before?
[da db]
(.isBefore ^Instant da ^Instant db))
(defn plus
[d ta]
(let [^TemporalAmount ta (duration ta)]
(cond
(instance? Duration d)
(.plus ^Duration d ta)
(instance? Temporal d)
(.plus ^Temporal d ta)
:else
(throw (UnsupportedOperationException. "unsupported type")))))
(defn minus
[d ta]
(let [^TemporalAmount ta (duration ta)]
(cond
(instance? Duration d)
(.minus ^Duration d ta)
(instance? Temporal d)
(.minus ^Temporal d ta)
:else
(throw (UnsupportedOperationException. "unsupported type")))))
(dm/export common-time/now)
(defn in-future
[v]
(plus (now) v))
(defn in-past
[v]
(minus (now) v))
(defn instant->zoned-date-time
[v]
(ZonedDateTime/ofInstant v (ZoneId/of "UTC")))
(defn format-instant
([v] (.format DateTimeFormatter/ISO_INSTANT ^Instant v))
([v fmt]
(case fmt
:iso
(.format DateTimeFormatter/ISO_INSTANT ^Instant v)
:iso-local-time
(.format DateTimeFormatter/ISO_LOCAL_TIME
^ZonedDateTime (instant->zoned-date-time v))
:rfc1123
(.format DateTimeFormatter/RFC_1123_DATE_TIME
^ZonedDateTime (instant->zoned-date-time v)))))
(defmethod print-method Instant
[mv ^java.io.Writer writer]
(.write writer (str "#app/instant \"" (format-instant mv) "\"")))
(defmethod print-dup Instant [o w]
(print-method o w))
(extend-protocol fez/IEdn
Instant
(-edn [o] (tagged-literal 'app/instant (format-instant o))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Cron Expression
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Cron expressions are comprised of 6 required fields and one
;; optional field separated by white space. The fields respectively
;; are described as follows:
;;
;; Field Name Allowed Values Allowed Special Characters
;; Seconds 0-59 , - * /
;; Minutes 0-59 , - * /
;; Hours 0-23 , - * /
;; Day-of-month 1-31 , - * ? / L W
;; Month 0-11 or JAN-DEC , - * /
;; Day-of-Week 1-7 or SUN-SAT , - * ? / L #
;; Year (Optional) empty, 1970-2199 , - * /
;;
;; The '*' character is used to specify all values. For example, "*"
;; in the minute field means "every minute".
;;
;; The '?' character is allowed for the day-of-month and day-of-week
;; fields. It is used to specify 'no specific value'. This is useful
;; when you need to specify something in one of the two fields, but
;; not the other.
;;
;; The '-' character is used to specify ranges For example "10-12" in
;; the hour field means "the hours 10, 11 and 12".
;;
;; The ',' character is used to specify additional values. For
;; example "MON,WED,FRI" in the day-of-week field means "the days
;; Monday, Wednesday, and Friday".
;;
;; The '/' character is used to specify increments. For example "0/15"
;; in the seconds field means "the seconds 0, 15, 30, and
;; 45". And "5/15" in the seconds field means "the seconds 5, 20, 35,
;; and 50". Specifying '*' before the '/' is equivalent to specifying
;; 0 is the value to start with. Essentially, for each field in the
;; expression, there is a set of numbers that can be turned on or
;; off. For seconds and minutes, the numbers range from 0 to 59. For
;; hours 0 to 23, for days of the month 0 to 31, and for months 0 to
;; 11 (JAN to DEC). The "/" character simply helps you turn on
;; every "nth" value in the given set. Thus "7/6" in the month field
;; only turns on month "7", it does NOT mean every 6th month, please
;; note that subtlety.
;;
;; The 'L' character is allowed for the day-of-month and day-of-week
;; fields. This character is short-hand for "last", but it has
;; different meaning in each of the two fields. For example, the
;; value "L" in the day-of-month field means "the last day of the
;; month" - day 31 for January, day 28 for February on non-leap
;; years. If used in the day-of-week field by itself, it simply
;; means "7" or "SAT". But if used in the day-of-week field after
;; another value, it means "the last xxx day of the month" - for
;; example "6L" means "the last friday of the month". You can also
;; specify an offset from the last day of the month, such as "L-3"
;; which would mean the third-to-last day of the calendar month. When
;; using the 'L' option, it is important not to specify lists, or
;; ranges of values, as you'll get confusing/unexpected results.
;;
;; The 'W' character is allowed for the day-of-month field. This
;; character is used to specify the weekday (Monday-Friday) nearest
;; the given day. As an example, if you were to specify "15W" as the
;; value for the day-of-month field, the meaning is: "the nearest
;; weekday to the 15th of the month". So if the 15th is a Saturday,
;; the trigger will fire on Friday the 14th. If the 15th is a Sunday,
;; the trigger will fire on Monday the 16th. If the 15th is a Tuesday,
;; then it will fire on Tuesday the 15th. However if you specify "1W"
;; as the value for day-of-month, and the 1st is a Saturday, the
;; trigger will fire on Monday the 3rd, as it will not 'jump' over the
;; boundary of a month's days. The 'W' character can only be specified
;; when the day-of-month is a single day, not a range or list of days.
;;
;; The 'L' and 'W' characters can also be combined for the
;; day-of-month expression to yield 'LW', which translates to "last
;; weekday of the month".
;;
;; The '#' character is allowed for the day-of-week field. This
;; character is used to specify "the nth" XXX day of the month. For
;; example, the value of "6#3" in the day-of-week field means the
;; third Friday of the month (day 6 = Friday and "#3" = the 3rd one in
;; the month). Other examples: "2#1" = the first Monday of the month
;; and "4#5" = the fifth Wednesday of the month. Note that if you
;; specify "#5" and there is not 5 of the given day-of-week in the
;; month, then no firing will occur that month. If the '#' character
;; is used, there can only be one expression in the day-of-week
;; field ("3#1,6#3" is not valid, since there are two expressions).
;;
;; The legal characters and the names of months and days of the week
;; are not case sensitive.
(defn cron
"Creates an instance of CronExpression from string."
[s]
(try
(CronExpression. s)
(catch java.text.ParseException e
(ex/raise :type :parse
:code :invalid-cron-expression
:cause e
:context {:expr s}))))
(defn cron?
[v]
(instance? CronExpression v))
(defn next-valid-instant-from
[^CronExpression cron ^Instant now]
(s/assert cron? cron)
(.toInstant (.getNextValidTimeAfter cron (Date/from now))))
(defn get-next
[cron tnow]
(let [nt (next-valid-instant-from cron tnow)]
(cons nt (lazy-seq (get-next cron nt)))))
(defmethod print-method CronExpression
[mv ^java.io.Writer writer]
(.write writer (str "#app/cron \"" (.toString ^CronExpression mv) "\"")))
(defmethod print-dup CronExpression
[o w]
(print-ctor o (fn [o w] (print-dup (.toString ^CronExpression o) w)) w))
(extend-protocol fez/IEdn
CronExpression
(-edn [o] (pr-str o)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Measurement Helpers
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn tpoint
"Create a measurement checkpoint for time measurement of potentially
asynchronous flow."
[]
(let [p1 (System/nanoTime)]
#(duration {:nanos (- (System/nanoTime) p1)})))
(sm/register!
{:type ::instant
:pred instant?
:type-properties
{:error/message "should be an instant"
:title "instant"
:decode/string instant
:encode/string format-instant
:decode/json instant
:encode/json format-instant
:gen/gen (tgen/fmap (fn [i] (in-past i)) tgen/pos-int)
::oapi/type "string"
::oapi/format "iso"}})
(sm/register!
{:type ::duration
:pred duration?
:type-properties
{:error/message "should be a duration"
:gen/gen (tgen/fmap duration tgen/pos-int)
:title "duration"
:decode/string duration
:encode/string format-duration
:decode/json duration
:encode/json format-duration
::oapi/type "string"
::oapi/format "duration"}})

View File

@@ -9,10 +9,10 @@
(:require
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.time :as ct]
[app.common.transit :as t]
[app.common.uuid :as uuid]
[app.util.inet :as inet]
[app.util.time :as dt]
[promesa.exec :as px]
[promesa.exec.csp :as sp]
[promesa.util :as pu]
@@ -93,7 +93,7 @@
(assoc ::id id)
(assoc ::state state)
(assoc ::beats beats)
(assoc ::created-at (ct/now))
(assoc ::created-at (dt/now))
(assoc ::input-ch input-ch)
(assoc ::heartbeat-ch hbeat-ch)
(assoc ::output-ch output-ch)
@@ -107,7 +107,7 @@
(let [options (-> options
(assoc ::channel channel)
(on-connect))
timeout (ct/duration idle-timeout)]
timeout (dt/duration idle-timeout)]
(yws/set-idle-timeout! channel timeout)
(px/submit! :vthread (partial start-io-loop! options))))
@@ -128,7 +128,7 @@
(fn on-message [_channel message]
(when (string? message)
(sp/offer! input-ch message)
(swap! state assoc ::last-activity-at (ct/now))))
(swap! state assoc ::last-activity-at (dt/now))))
:on-pong
(fn on-pong [_channel data]

View File

@@ -10,11 +10,11 @@
[app.common.data :as d]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.metrics :as mtx]
[app.util.time :as dt]
[cuerdas.core :as str]
[integrant.core :as ig]))
@@ -31,7 +31,7 @@
[f metrics tname]
(let [labels (into-array String [tname])]
(fn [params]
(let [tp (ct/tpoint)]
(let [tp (dt/tpoint)]
(try
(f params)
(finally
@@ -95,7 +95,7 @@
[::task [:or ::sm/text :keyword]]
[::label {:optional true} ::sm/text]
[::delay {:optional true}
[:or ::sm/int ::ct/duration]]
[:or ::sm/int ::dt/duration]]
[::queue {:optional true} [:or ::sm/text :keyword]]
[::priority {:optional true} ::sm/int]
[::max-retries {:optional true} ::sm/int]
@@ -111,7 +111,7 @@
(check-options! options)
(let [duration (ct/duration delay)
(let [duration (dt/duration delay)
interval (db/interval duration)
props (db/tjson params)
id (uuid/next)
@@ -129,7 +129,7 @@
:queue queue
:label label
:dedupe (boolean dedupe)
:delay (ct/format-duration duration)
:delay (dt/format-duration duration)
:replace (or deleted 0))
(db/exec-one! conn [sql:insert-new-task id task props queue

View File

@@ -10,9 +10,8 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.db :as db]
[app.util.cron :as cron]
[app.util.time :as dt]
[app.worker :as wrk]
[app.worker.runner :refer [get-error-context]]
[cuerdas.core :as str]
@@ -50,7 +49,7 @@
[cfg {:keys [id cron] :as task}]
(px/thread
{:name (str "penpot/cron-task/" id)}
(let [tpoint (ct/tpoint)]
(let [tpoint (dt/tpoint)]
(try
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
(db/exec-one! conn ["SET LOCAL statement_timeout=0;"])
@@ -58,20 +57,20 @@
(when (lock-scheduled-task! conn id)
(db/update! conn :scheduled-task
{:cron-expr (str cron)
:modified-at (ct/now)}
:modified-at (dt/now)}
{:id id}
{::db/return-keys false})
(l/dbg :hint "start" :id id)
((:fn task) task)
(let [elapsed (ct/format-duration (tpoint))]
(let [elapsed (dt/format-duration (tpoint))]
(l/dbg :hint "end" :id id :elapsed elapsed)))))
(catch InterruptedException _
(let [elapsed (ct/format-duration (tpoint))]
(let [elapsed (dt/format-duration (tpoint))]
(l/debug :hint "task interrupted" :id id :elapsed elapsed)))
(catch Throwable cause
(let [elapsed (ct/format-duration (tpoint))]
(let [elapsed (dt/format-duration (tpoint))]
(binding [l/*context* (get-error-context cause task)]
(l/err :hint "unhandled exception on running task"
:id id
@@ -83,10 +82,10 @@
(defn- ms-until-valid
[cron]
(assert (cron/cron-expr? cron) "expected cron instance")
(let [now (ct/now)
next (cron/next-valid-instant-from cron now)]
(ct/diff now next)))
(assert (dt/cron? cron) "expected cron instance")
(let [now (dt/now)
next (dt/next-valid-instant-from cron now)]
(dt/diff now next)))
(defn- schedule-cron-task
[{:keys [::running] :as cfg} {:keys [cron id] :as task}]
@@ -94,8 +93,8 @@
ft (px/schedule! ts (partial execute-cron-task cfg task))]
(l/dbg :hint "schedule" :id id
:ts (ct/format-duration ts)
:at (ct/format-inst (ct/in-future ts)))
:ts (dt/format-duration ts)
:at (dt/format-instant (dt/in-future ts)))
(swap! running #(into #{ft} (filter p/pending?) %))))
@@ -105,7 +104,7 @@
[:vector
[:maybe
[:map
[:cron [:fn cron/cron-expr?]]
[:cron [:fn dt/cron?]]
[:task :keyword]
[:props {:optional true} :map]
[:id {:optional true} :keyword]]]]]

View File

@@ -10,11 +10,11 @@
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.transit :as t]
[app.db :as db]
[app.metrics :as mtx]
[app.redis :as rds]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[cuerdas.core :as str]
[integrant.core :as ig]
@@ -32,9 +32,9 @@
(defmethod ig/expand-key ::wrk/dispatcher
[k v]
{k (-> (d/without-nils v)
(assoc ::timeout (ct/duration "10s"))
(assoc ::timeout (dt/duration "10s"))
(assoc ::batch-size 100)
(assoc ::wait-duration (ct/duration "5s")))})
(assoc ::wait-duration (dt/duration "5s")))})
(defmethod ig/assert-key ::wrk/dispatcher
[_ cfg]

View File

@@ -10,8 +10,8 @@
[app.common.data :as d]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.metrics :as mtx]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[integrant.core :as ig]
[promesa.exec :as px])
@@ -55,7 +55,7 @@
(defmethod ig/expand-key ::wrk/monitor
[k v]
{k (-> (d/without-nils v)
(assoc ::interval (ct/duration "2s")))})
(assoc ::interval (dt/duration "2s")))})
(defmethod ig/init-key ::wrk/monitor
[_ {:keys [::wrk/executor ::mtx/metrics ::interval ::wrk/name]}]

View File

@@ -12,11 +12,11 @@
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.time :as ct]
[app.common.transit :as t]
[app.db :as db]
[app.metrics :as mtx]
[app.redis :as rds]
[app.util.time :as dt]
[app.worker :as wrk]
[cuerdas.core :as str]
[integrant.core :as ig]
@@ -29,10 +29,10 @@
[:id ::sm/uuid]
[:queue :string]
[:name :string]
[:created-at ::ct/inst]
[:modified-at ::ct/inst]
[:scheduled-at {:optional true} ::ct/inst]
[:completed-at {:optional true} ::ct/inst]
[:created-at ::sm/inst]
[:modified-at ::sm/inst]
[:scheduled-at {:optional true} ::sm/inst]
[:completed-at {:optional true} ::sm/inst]
[:error {:optional true} :string]
[:max-retries :int]
[:retry-num :int]
@@ -76,10 +76,10 @@
:queue queue
:runner-id id
:retry (:retry-num task))
(let [tpoint (ct/tpoint)
(let [tpoint (dt/tpoint)
task-fn (wrk/get-task registry (:name task))
result (when task-fn (task-fn task))
elapsed (ct/format-duration (tpoint))
elapsed (dt/format-duration (tpoint))
result (if (valid-task-result? result)
result
{:status "completed"})]
@@ -105,7 +105,7 @@
(:max-retries task))
(= ::retry (:type edata)))
(cond-> {:status "retry" :error cause}
(ct/duration? (:delay edata))
(dt/duration? (:delay edata))
(assoc :delay (:delay edata))
(= ::noop (:strategy edata))
@@ -156,13 +156,13 @@
(str error))
task (-> result meta ::task)
nretry (+ (:retry-num task) inc-by)
now (ct/now)
now (dt/now)
delay (->> (iterate #(* % 2) delay) (take nretry) (last))]
(db/update! pool :task
{:error explain
:status "retry"
:modified-at now
:scheduled-at (ct/plus now delay)
:scheduled-at (dt/plus now delay)
:retry-num nretry}
{:id (:id task)})
nil))
@@ -172,14 +172,14 @@
explain (ex-message error)]
(db/update! pool :task
{:error explain
:modified-at (ct/now)
:modified-at (dt/now)
:status "failed"}
{:id (:id task)})
nil))
(handle-task-completion [result]
(let [task (-> result meta ::task)
now (ct/now)]
now (dt/now)]
(db/update! pool :task
{:completed-at now
:modified-at now
@@ -255,7 +255,7 @@
(let [cfg (-> cfg
(assoc ::rds/rconn rconn)
(assoc ::queue (str/ffmt "%:%" tenant queue))
(assoc ::timeout (ct/duration "5s")))]
(assoc ::timeout (dt/duration "5s")))]
(loop []
(when (px/interrupted?)
(throw (InterruptedException. "interrupted")))

View File

@@ -1,10 +1,3 @@
{penpot/inst app.common.time/inst
penpot/cron app.util.cron/cron
penpot/duration app.common.time/duration
penpot/path-data app.common.types.path/from-string
penpot/matrix app.common.geom.matrix/decode-matrix
penpot/point app.common.geom.point/decode-point
penpot/token-lib app.common.types.tokens-lib/parse-multi-set-dtcg-json
penpot/token-set app.common.types.tokens-lib/make-token-set
penpot/token-theme app.common.types.tokens-lib/make-token-theme
penpot/token app.common.types.tokens-lib/make-token}
{app/instant app.util.time/instant
app/cron app.util.time/cron
app/duration app.util.time/duration}

View File

@@ -20,6 +20,7 @@
[app.rpc :as-alias rpc]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[cuerdas.core :as str]

View File

@@ -6,11 +6,11 @@
(ns backend-tests.bounce-handling-test
(:require
[app.common.time :as ct]
[app.db :as db]
[app.email :as email]
[app.http.awsns :as awsns]
[app.tokens :as tokens]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.pprint :refer [pprint]]
[clojure.test :as t]
@@ -250,7 +250,7 @@
(let [profile (th/create-profile* 1)
pool (:app.db/pool th/*system*)]
(th/create-complaint-for pool {:type :bounce :id (:id profile) :created-at (ct/in-past {:days 8})})
(th/create-complaint-for pool {:type :bounce :id (:id profile) :created-at (dt/in-past {:days 8})})
(th/create-complaint-for pool {:type :bounce :id (:id profile)})
(th/create-complaint-for pool {:type :bounce :id (:id profile)})
@@ -268,8 +268,8 @@
:profile-complaint-threshold 2})}]
(let [profile (th/create-profile* 1)
pool (:app.db/pool th/*system*)]
(th/create-complaint-for pool {:type :bounce :id (:id profile) :created-at (ct/in-past {:days 8})})
(th/create-complaint-for pool {:type :bounce :id (:id profile) :created-at (ct/in-past {:days 8})})
(th/create-complaint-for pool {:type :bounce :id (:id profile) :created-at (dt/in-past {:days 8})})
(th/create-complaint-for pool {:type :bounce :id (:id profile) :created-at (dt/in-past {:days 8})})
(th/create-complaint-for pool {:type :bounce :id (:id profile)})
(th/create-complaint-for pool {:type :bounce :id (:id profile)})
(th/create-complaint-for pool {:type :complaint :id (:id profile)})

View File

@@ -15,7 +15,6 @@
[app.common.pprint :as pp]
[app.common.schema :as sm]
[app.common.spec :as us]
[app.common.time :as ct]
[app.common.transit :as tr]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -34,6 +33,7 @@
[app.rpc.helpers :as rph]
[app.util.blob :as blob]
[app.util.services :as sv]
[app.util.time :as dt]
[app.worker :as wrk]
[app.worker.runner]
[clojure.java.io :as io]
@@ -263,7 +263,7 @@
(dm/with-open [conn (db/open system)]
(db/insert! conn :profile-complaint-report
{:profile-id id
:created-at (or created-at (ct/now))
:created-at (or created-at (dt/now))
:type (name type)
:content (db/tjson {})})))
@@ -273,7 +273,7 @@
(db/insert! conn :global-complaint-report
{:email email
:type (name type)
:created-at (or created-at (ct/now))
:created-at (or created-at (dt/now))
:content (db/tjson {})})))
(defn create-team-role*
@@ -305,7 +305,7 @@
([system {:keys [file-id changes session-id profile-id revn]
:or {session-id (uuid/next) revn 0}}]
(-> system
(assoc ::files.update/timestamp (ct/now))
(assoc ::files.update/timestamp (dt/now))
(db/tx-run! (fn [{:keys [::db/conn] :as system}]
(let [file (files.update/get-file conn file-id)]
(#'files.update/update-file* system
@@ -379,7 +379,7 @@
;; (app.common.pprint/pprint (:app.rpc/methods *system*))
(try-on! (method-fn (-> data
(dissoc ::type)
(assoc :app.rpc/request-at (ct/now)))))))
(assoc :app.rpc/request-at (dt/now)))))))
(defn run-task!
([name]
@@ -525,7 +525,7 @@
(defn sleep
[ms-or-duration]
(Thread/sleep (inst-ms (ct/duration ms-or-duration))))
(Thread/sleep (inst-ms (dt/duration ms-or-duration))))
(defn config-get-mock
[data]

View File

@@ -7,10 +7,10 @@
(ns backend-tests.rpc-audit-test
(:require
[app.common.pprint :as pp]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[yetti.request]))
@@ -46,7 +46,7 @@
:route "dashboard-files"}
:context {:engine "blink"}
:profile-id (:id prof)
:timestamp (ct/now)
:timestamp (dt/now)
:type "action"}]}
params (with-meta params
@@ -79,7 +79,7 @@
:route "dashboard-files"}
:context {:engine "blink"}
:profile-id uuid/zero
:timestamp (ct/now)
:timestamp (dt/now)
:type "action"}]}
params (with-meta params
{:app.http/request http-request})

View File

@@ -7,7 +7,6 @@
(ns backend-tests.rpc-comment-test
(:require
[app.common.geom.point :as gpt]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.http :as http]
@@ -15,6 +14,7 @@
[app.rpc.commands.comments :as comments]
[app.rpc.cond :as cond]
[app.rpc.quotes :as-alias quotes]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[datoteka.fs :as fs]
@@ -78,7 +78,7 @@
(let [{:keys [result] :as out} (th/command! data)]
(t/is (th/success? out))
(t/is (ct/inst? (:modified-at result))))
(t/is (dt/instant? (:modified-at result))))
(let [status' (th/db-get :comment-thread-status
{:thread-id (:id thread)

View File

@@ -17,6 +17,7 @@
[app.http :as http]
[app.rpc :as-alias rpc]
[app.storage :as sto]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[cuerdas.core :as str]))
@@ -39,7 +40,7 @@
(t/is (nil? (:error out)))
(:result out)))
(t/deftest snapshots-crud
(t/deftest generic-ops
(let [profile (th/create-profile* 1 {:is-active true})
team-id (:default-team-id profile)
proj-id (:default-project-id profile)
@@ -132,85 +133,3 @@
(t/is (= (:type data) :validation))
(t/is (= (:code data) :system-snapshots-cant-be-deleted)))))))))
(t/deftest snapshots-locking
(let [profile-1 (th/create-profile* 1 {:is-active true})
profile-2 (th/create-profile* 2 {:is-active true})
team
(th/create-team* 1 {:profile-id (:id profile-1)})
project
(th/create-project* 1 {:profile-id (:id profile-1)
:team-id (:id team)})
file
(th/create-file* 1 {:profile-id (:id profile-1)
:project-id (:id project)
:is-shared false})
snapshot
(let [params {::th/type :create-file-snapshot
::rpc/profile-id (:id profile-1)
:file-id (:id file)
:label "label1"}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(:result out))]
;; Add the secont profile to the team
(th/create-team-role* {:team-id (:id team)
:profile-id (:id profile-2)
:role :admin})
(t/testing "lock snapshot"
(let [params {::th/type :lock-file-snapshot
::rpc/profile-id (:id profile-1)
:file-id (:id file)
:id (:id snapshot)}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(t/is (nil? (:result out)))
(let [snapshot (th/db-get :file-change {:id (:id snapshot)})]
(t/is (= (:id profile-1) (:locked-by snapshot))))))
(t/testing "delete locked snapshot"
(let [params {::th/type :delete-file-snapshot
::rpc/profile-id (:id profile-2)
:file-id (:id file)
:id (:id snapshot)}
out (th/command! params)]
;; (th/print-result! out)
(let [error (:error out)
data (ex-data error)]
(t/is (th/ex-info? error))
(t/is (= (:type data) :validation))
(t/is (= (:code data) :snapshot-is-locked)))))
(t/testing "unlock snapshot"
(let [params {::th/type :unlock-file-snapshot
::rpc/profile-id (:id profile-1)
:file-id (:id file)
:id (:id snapshot)}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(t/is (nil? (:result out)))
(let [snapshot (th/db-get :file-change {:id (:id snapshot)})]
(t/is (= nil (:locked-by snapshot))))))
(t/testing "delete locked snapshot"
(let [params {::th/type :delete-file-snapshot
::rpc/profile-id (:id profile-2)
:file-id (:id file)
:id (:id snapshot)}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(t/is (nil? (:result out)))))))

View File

@@ -9,7 +9,6 @@
[app.common.features :as cfeat]
[app.common.pprint :as pp]
[app.common.thumbnails :as thc]
[app.common.time :as ct]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]
[app.config :as cf]
@@ -19,6 +18,7 @@
[app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.storage :as sto]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[cuerdas.core :as str]))
@@ -135,7 +135,7 @@
(t/is (nil? (:users result))))))
(th/db-update! :file
{:deleted-at (ct/now)}
{:deleted-at (dt/now)}
{:id file-id})
(t/testing "query single file after delete and wait"
@@ -1844,7 +1844,7 @@
(th/run-task! :delete-object
{:object :file
:deleted-at (ct/now)
:deleted-at (dt/now)
:id (:id file-1)})
;; Check that file media object references are marked all for deletion

View File

@@ -16,6 +16,7 @@
[app.rpc.commands.auth :as cauth]
[app.storage :as sto]
[app.tokens :as tokens]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.java.io :as io]
[clojure.test :as t]

View File

@@ -6,11 +6,11 @@
(ns backend-tests.rpc-media-test
(:require
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.storage :as sto]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[datoteka.fs :as fs]))
@@ -257,7 +257,7 @@
:is-shared false})
_ (th/db-update! :file
{:deleted-at (ct/now)}
{:deleted-at (dt/now)}
{:id (:id file)})
mfile {:filename "sample.jpg"

View File

@@ -6,7 +6,6 @@
(ns backend-tests.rpc-profile-test
(:require
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -15,6 +14,7 @@
[app.rpc :as-alias rpc]
[app.rpc.commands.profile :as profile]
[app.tokens :as tokens]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.java.io :as io]
[clojure.test :as t]
@@ -158,7 +158,7 @@
(let [row (th/db-get :team
{:id (:default-team-id prof)}
{::db/remove-deleted false})]
(t/is (ct/inst? (:deleted-at row))))
(t/is (dt/instant? (:deleted-at row))))
;; execute permanent deletion task
(let [result (th/run-task! :objects-gc {:min-age 0})]
@@ -212,7 +212,7 @@
;; (th/print-result! out)
(let [team (th/db-get :team {:id (:id team1)} {::db/remove-deleted false})]
(t/is (ct/inst? (:deleted-at team)))))
(t/is (dt/instant? (:deleted-at team)))))
;; Request profile to be deleted
(let [params {::th/type :delete-profile
@@ -517,7 +517,7 @@
(let [sprops (:app.setup/props th/*system*)
itoken (tokens/generate sprops
{:iss :team-invitation
:exp (ct/in-future "48h")
:exp (dt/in-future "48h")
:role :editor
:team-id uuid/zero
:member-email "user@example.com"})
@@ -546,7 +546,7 @@
(let [sprops (:app.setup/props th/*system*)
itoken (tokens/generate sprops
{:iss :team-invitation
:exp (ct/in-future "48h")
:exp (dt/in-future "48h")
:role :editor
:team-id uuid/zero
:member-email "user2@example.com"})
@@ -568,7 +568,7 @@
(let [sprops (:app.setup/props th/*system*)
itoken (tokens/generate sprops
{:iss :team-invitation
:exp (ct/in-future "48h")
:exp (dt/in-future "48h")
:role :editor
:team-id uuid/zero
:member-email "user@example.com"})
@@ -589,7 +589,7 @@
(let [sprops (:app.setup/props th/*system*)
itoken (tokens/generate sprops
{:iss :team-invitation
:exp (ct/in-future "48h")
:exp (dt/in-future "48h")
:role :editor
:team-id uuid/zero
:member-email "user2@example.com"})
@@ -611,7 +611,7 @@
(let [sprops (:app.setup/props th/*system*)
itoken (tokens/generate sprops
{:iss :team-invitation
:exp (ct/in-future "48h")
:exp (dt/in-future "48h")
:role :editor
:team-id uuid/zero
:member-email "user2@example.com"})

View File

@@ -11,6 +11,7 @@
[app.db :as db]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]))

View File

@@ -7,7 +7,6 @@
(ns backend-tests.rpc-team-test
(:require
[app.common.logging :as l]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
@@ -15,6 +14,7 @@
[app.rpc :as-alias rpc]
[app.storage :as sto]
[app.tokens :as tokens]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[datoteka.fs :as fs]
@@ -163,7 +163,7 @@
;; Proceed to delete the requester user
(th/db-update! :profile
{:deleted-at (ct/in-past "1h")}
{:deleted-at (dt/in-past "1h")}
{:id (:id requester)})
;; Create a new profile with the same email
@@ -271,7 +271,7 @@
(let [token (tokens/generate sprops
{:iss :team-invitation
:exp (ct/in-future "1h")
:exp (dt/in-future "1h")
:profile-id (:id profile1)
:role :editor
:team-id (:id team)
@@ -283,7 +283,7 @@
{:team-id (:id team)
:email-to (:email profile2)
:role "editor"
:valid-until (ct/in-future "48h")})
:valid-until (dt/in-future "48h")})
(let [data {::th/type :verify-token :token token}
out (th/command! data)]
@@ -328,7 +328,7 @@
{:team-id (:id team)
:email-to (:email profile3)
:role "editor"
:valid-until (ct/in-future "48h")})
:valid-until (dt/in-future "48h")})
(let [data {::th/type :verify-token
::rpc/profile-id (:id profile1)
@@ -381,14 +381,14 @@
{:team-id (:team-id data)
:email-to "test1@mail.com"
:role "editor"
:valid-until (ct/in-future "48h")})
:valid-until (dt/in-future "48h")})
;; insert an entry on the database with an expired invitation
(db/insert! th/*pool* :team-invitation
{:team-id (:team-id data)
:email-to "test2@mail.com"
:role "editor"
:valid-until (ct/in-past "48h")})
:valid-until (dt/in-past "48h")})
(let [out (th/command! data)]
(t/is (th/success? out))
@@ -415,7 +415,7 @@
{:team-id (:team-id data)
:email-to "test1@mail.com"
:role "editor"
:valid-until (ct/in-future "48h")})
:valid-until (dt/in-future "48h")})
(let [out (th/command! data)
;; retrieve the value from the database and check its content
@@ -438,7 +438,7 @@
{:team-id (:team-id data)
:email-to "test1@mail.com"
:role "editor"
:valid-until (ct/in-future "48h")})
:valid-until (dt/in-future "48h")})
(let [out (th/command! data)
;; retrieve the value from the database and check its content
@@ -582,7 +582,7 @@
(let [rows (th/db-exec! ["select * from team where id = ?" (:id team)])]
(t/is (= 1 (count rows)))
(t/is (ct/inst? (:deleted-at (first rows)))))
(t/is (dt/instant? (:deleted-at (first rows)))))
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
(t/is (= 5 (:processed result))))))
@@ -626,7 +626,7 @@
(th/reset-mock! mock)
(th/db-update! :team-access-request
{:valid-until (ct/in-past "1h")}
{:valid-until (dt/in-past "1h")}
{:team-id (:id team)
:requester-id (:id requester)})

View File

@@ -7,11 +7,11 @@
(ns backend-tests.storage-test
(:require
[app.common.exceptions :as ex]
[app.common.time :as ct]
[app.common.uuid :as uuid]
[app.db :as db]
[app.rpc :as-alias rpc]
[app.storage :as sto]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[cuerdas.core :as str]
@@ -53,12 +53,12 @@
(configure-storage-backend))
content (sto/content "content")
object (sto/put-object! storage {::sto/content content
::sto/expired-at (ct/in-future {:seconds 1})
::sto/expired-at (dt/in-future {:seconds 1})
:content-type "text/plain"})]
(t/is (sto/object? object))
(t/is (ct/inst? (:expired-at object)))
(t/is (ct/is-after? (:expired-at object) (ct/now)))
(t/is (dt/instant? (:expired-at object)))
(t/is (dt/is-after? (:expired-at object) (dt/now)))
(t/is (= object (sto/get-object storage (:id object))))
(th/sleep 1000)
@@ -73,7 +73,7 @@
content (sto/content "content")
object (sto/put-object! storage {::sto/content content
:content-type "text/plain"
:expired-at (ct/in-future {:seconds 1})})]
:expired-at (dt/in-future {:seconds 1})})]
(t/is (sto/object? object))
(t/is (true? (sto/del-object! storage object)))
@@ -95,13 +95,13 @@
content3 (sto/content "content3")
object1 (sto/put-object! storage {::sto/content content1
::sto/expired-at (ct/now)
::sto/expired-at (dt/now)
:content-type "text/plain"})
object2 (sto/put-object! storage {::sto/content content2
::sto/expired-at (ct/in-past {:hours 2})
::sto/expired-at (dt/in-past {:hours 2})
:content-type "text/plain"})
object3 (sto/put-object! storage {::sto/content content3
::sto/expired-at (ct/in-past {:hours 1})
::sto/expired-at (dt/in-past {:hours 1})
:content-type "text/plain"})]
@@ -154,7 +154,7 @@
(t/is (= (:media-id result-1) (:media-id result-2)))
(th/db-update! :file-media-object
{:deleted-at (ct/now)}
{:deleted-at (dt/now)}
{:id (:id result-1)})
;; run the objects gc task for permanent deletion
@@ -239,7 +239,7 @@
result-2 (:result out2)]
(th/db-update! :team-font-variant
{:deleted-at (ct/now)}
{:deleted-at (dt/now)}
{:id (:id result-2)})
;; run the objects gc task for permanent deletion

View File

@@ -7,6 +7,7 @@
(ns backend-tests.tasks-telemetry-test
(:require
[app.db :as db]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.pprint :refer [pprint]]
[clojure.test :as t]

View File

@@ -10,15 +10,15 @@
"type": "git",
"url": "https://github.com/penpot/penpot"
},
"dependencies": {
"luxon": "^3.6.1"
},
"devDependencies": {
"concurrently": "^9.1.2",
"nodemon": "^3.1.10",
"source-map-support": "^0.5.21",
"ws": "^8.18.2"
},
"dependencies": {
"date-fns": "^4.1.0"
},
"scripts": {
"fmt:clj:check": "cljfmt check --parallel=false src/ test/",
"fmt:clj": "cljfmt fix --parallel=true src/ test/",

View File

@@ -8,7 +8,7 @@
(:require
[app.common.geom.shapes :as gsh]
[app.common.math :as mth]
[app.common.types.text :as txt]))
[app.common.text :as txt]))
(defn- get-attr
[obj attr]

View File

@@ -21,13 +21,6 @@
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
`(long (.get ~target ~offset)))))
(defmacro read-unsigned-byte
[target offset]
(if (:ns &env)
`(.getUint8 ~target ~offset true)
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
`(bit-and (long (.get ~target ~offset)) 0xff))))
(defmacro read-bool
[target offset]
(if (:ns &env)
@@ -81,13 +74,6 @@
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
`(.put ~target ~offset (unchecked-byte ~value)))))
(defmacro write-bool
[target offset value]
(if (:ns &env)
`(.setInt8 ~target ~offset (if ~value 0x01 0x00) true)
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
`(.put ~target ~offset (unchecked-byte (if ~value 0x01 0x00))))))
(defmacro write-short
[target offset value]
(if (:ns &env)
@@ -127,12 +113,6 @@
(finally
(.order ~target ByteOrder/LITTLE_ENDIAN))))))
(defn wrap
[data]
#?(:clj (let [buffer (ByteBuffer/wrap ^bytes data)]
(.order buffer ByteOrder/LITTLE_ENDIAN))
:cljs (new js/DataView (.-buffer ^js data))))
(defn allocate
[size]
#?(:clj (let [buffer (ByteBuffer/allocate (int size))]

View File

@@ -99,14 +99,13 @@
(into frontend-only-features)
(into backend-only-features)))
(def schema:features
(sm/register!
^{::sm/type ::features}
[:schema
{:title "FileFeatures"
::smdj/inline true
:gen/gen (smg/subseq supported-features)}
[::sm/set :string]]))
(sm/register!
^{::sm/type ::features}
[:schema
{:title "FileFeatures"
::smdj/inline true
:gen/gen (smg/subseq supported-features)}
[::sm/set :string]])
(defn- flag->feature
"Translate a flag to a feature name"

Some files were not shown because too many files have changed in this diff Show More