178 Commits
1.0.0 ... 1.1.0

Author SHA1 Message Date
MartinBraquet
a157f6ce27 Release 2025-09-09 14:50:36 +02:00
MartinBraquet
d27cc94dd0 Update readme 2025-09-09 14:48:47 +02:00
MartinBraquet
05b1416d39 Fix link 2025-09-09 13:58:41 +02:00
MartinBraquet
001d6ed968 Add constitution 2025-09-09 13:53:24 +02:00
MartinBraquet
3ed3eecb00 Ignore test 2025-09-09 13:30:24 +02:00
MartinBraquet
8638e6cdeb Clean about blocks 2025-09-09 13:28:26 +02:00
MartinBraquet
15c7a9c22e Add react-md 2025-09-09 03:31:58 +02:00
MartinBraquet
20566e42ec Fix sign in loading page 2025-09-09 03:25:14 +02:00
MartinBraquet
92d1cac254 Fix sign up loading page 2025-09-09 03:03:02 +02:00
MartinBraquet
72e879c424 Add terms and privacy notices 2025-09-09 02:08:12 +02:00
Martin Braquet
3b2516fea2 Create NOTICE 2025-09-09 01:44:06 +02:00
Martin Braquet
1151f98fd3 Update LICENSE-MIT 2025-09-09 01:42:54 +02:00
Martin Braquet
d608fdb6f0 Update LICENSE-MIT 2025-09-09 01:42:33 +02:00
Martin Braquet
5a2c172f96 Update LICENSE-MIT 2025-09-09 01:38:40 +02:00
Martin Braquet
6e1f7bdd7b Update LICENSE-MIT 2025-09-09 01:38:21 +02:00
Martin Braquet
bf96227b8b Update LICENSE-MIT 2025-09-09 01:35:38 +02:00
MartinBraquet
e7195dd68d Log 2025-09-09 01:16:14 +02:00
MartinBraquet
6a374b0c5a Fix compatibility questions 2025-09-08 23:46:50 +02:00
MartinBraquet
a0a876a282 Fix link going over max width 2025-09-08 23:12:41 +02:00
MartinBraquet
0a538375b2 Fix hanging button 2025-09-08 23:03:57 +02:00
MartinBraquet
ac5d60cd58 Add home line 2025-09-08 22:54:01 +02:00
MartinBraquet
8498480b9c Clean forms and prevent from changing username after creation 2025-09-08 22:35:57 +02:00
MartinBraquet
4c1c7fc514 Remove onlyfans (2)... 2025-09-08 22:35:33 +02:00
MartinBraquet
1c3a0f9c71 Remove onlyfans... 2025-09-08 22:35:28 +02:00
MartinBraquet
39b5068370 Fix tracker 2025-09-08 22:34:50 +02:00
MartinBraquet
4815cc7682 Make all complete-profile fields optional and Pre-Save lover 2025-09-07 23:54:37 +02:00
MartinBraquet
7886d32933 Refactor 2025-09-07 21:40:56 +02:00
MartinBraquet
2dab88c7a9 Rollback packages 2025-09-07 21:33:44 +02:00
MartinBraquet
35b83dcb9a Fix 2025-09-07 21:28:37 +02:00
MartinBraquet
5194b5f6bf Clean home and fix profiles not loading 2025-09-07 21:12:06 +02:00
MartinBraquet
d7c49fe19f Refactor into AboutBox 2025-09-07 20:39:09 +02:00
MartinBraquet
e3dadd2ce8 Fix dynamic "search" 2025-09-07 20:28:50 +02:00
MartinBraquet
d2d08bc77c Update packages 2025-09-07 20:04:56 +02:00
MartinBraquet
41da374d93 Fix github CI 2025-09-07 16:23:24 +02:00
MartinBraquet
c2f48fc90c Fix scaling 2025-09-07 16:16:57 +02:00
MartinBraquet
18f2b61545 Render bio and name only in profiles grid 2025-09-07 16:15:01 +02:00
MartinBraquet
a71c7adaf7 Move mutual likes down 2025-09-06 20:33:42 +02:00
MartinBraquet
3c050bee3b Show previous profile while loading another one 2025-09-06 20:30:46 +02:00
MartinBraquet
50be1ba510 Add logs 2025-09-06 20:13:44 +02:00
MartinBraquet
445f62ca53 Put profile pic down 2025-09-06 20:13:17 +02:00
MartinBraquet
4810904aa8 Add logs 2025-09-06 20:13:05 +02:00
MartinBraquet
3a6d459ebd Skip jwt supabase update 2025-09-06 20:12:37 +02:00
MartinBraquet
5c23380de9 Clean 2025-09-06 19:35:13 +02:00
MartinBraquet
a0285d970f Clean 2025-09-06 19:34:52 +02:00
MartinBraquet
0e7e0f52f1 Fix 2025-09-06 15:56:54 +02:00
MartinBraquet
5054a9552b Update env.example 2025-09-06 15:56:47 +02:00
MartinBraquet
a7c55530a4 Hide and regenerate Firebase API key 2025-09-06 14:58:30 +02:00
MartinBraquet
0f03746c6a Fix register redirect and add error message for email already in use 2025-09-06 12:02:06 +02:00
MartinBraquet
4a891d9c9a Fix 2025-09-06 11:31:27 +02:00
MartinBraquet
209e233ee2 Add error message 2025-09-06 11:29:37 +02:00
MartinBraquet
519ec081b5 Add 404 info 2025-09-06 11:07:49 +02:00
MartinBraquet
a73c7ff8b6 Fix 2025-09-06 10:53:42 +02:00
MartinBraquet
ea74e0514e Make sign in and sign up pages and allow for email/pwd registration 2025-09-06 10:50:47 +02:00
MartinBraquet
523bbd11cc Update readme 2025-09-05 20:29:01 +02:00
MartinBraquet
becf6ad7a4 Fix wrong firebase bucket name 2025-09-03 12:00:15 +02:00
MartinBraquet
364f58b186 Fix 2025-09-01 22:54:52 +02:00
MartinBraquet
ef36b78399 Comment 2025-09-01 22:51:34 +02:00
MartinBraquet
1c77e6dc2c Hide profiles if not logged in and put nice home page 2025-09-01 22:46:02 +02:00
MartinBraquet
d581ce054c Clean more files 2025-09-01 22:19:23 +02:00
MartinBraquet
2956ec073f Add about in mobile 2025-09-01 22:19:14 +02:00
MartinBraquet
3ace876b66 Clean text 2025-09-01 22:19:01 +02:00
MartinBraquet
d8722a8274 Clean NEXT_PUBLIC_FIREBASE_ENV 2025-09-01 22:18:48 +02:00
MartinBraquet
d5216a8e8c Remove unused component 2025-09-01 18:29:29 +02:00
MartinBraquet
152521e9e5 Fix color style 2025-09-01 18:27:58 +02:00
MartinBraquet
a2d8518f14 Even nicer /about 2025-09-01 18:21:16 +02:00
MartinBraquet
e88a384c5e Get nice about page 2025-09-01 18:12:41 +02:00
MartinBraquet
063663d5b0 Fix discord link 2025-09-01 17:54:15 +02:00
MartinBraquet
0a48263013 Remove supabase log 2025-09-01 17:54:05 +02:00
MartinBraquet
2877bc8239 Fix old about 2025-09-01 17:51:14 +02:00
MartinBraquet
d5886fe3f2 Remove pink border 2025-09-01 17:50:58 +02:00
MartinBraquet
c0bacb104f Clean 2025-09-01 17:45:53 +02:00
MartinBraquet
27b851dca1 Clean 2025-09-01 17:40:38 +02:00
MartinBraquet
bd358b38f7 Fix 2025-09-01 17:33:53 +02:00
MartinBraquet
f6c895fe78 Clean manifold names and update discord invite and improve email notifs 2025-09-01 17:30:45 +02:00
MartinBraquet
23c8f175bb Fix supabase IPV6 only host 2025-09-01 16:49:53 +02:00
MartinBraquet
2949871ba1 Fixes 2025-09-01 15:01:23 +02:00
MartinBraquet
34298fcfa1 Clean 2025-09-01 14:58:29 +02:00
MartinBraquet
83c6973d8e Add setup script 2025-09-01 14:56:23 +02:00
MartinBraquet
3b932fd52d Remove build 2025-09-01 14:39:49 +02:00
MartinBraquet
e0291b8e5a Fix tests 2025-09-01 14:36:47 +02:00
MartinBraquet
8fe3736411 Add base tests 2025-09-01 14:33:04 +02:00
MartinBraquet
70e46c2b69 Upgrade API deploy script 2025-09-01 14:17:21 +02:00
MartinBraquet
c5a7d823c8 Upgrade API readme 2025-09-01 14:17:09 +02:00
MartinBraquet
56115d34a4 Fix SSL 2025-09-01 14:16:57 +02:00
MartinBraquet
af3b91037e Copy tsconfig to docker 2025-09-01 14:16:30 +02:00
MartinBraquet
549161586e Fix tsc-alias 2025-09-01 14:15:31 +02:00
MartinBraquet
20ac60219f Add version 2025-09-01 14:15:14 +02:00
MartinBraquet
ef8b17f5c1 Source from .env 2025-09-01 14:15:06 +02:00
MartinBraquet
2c5339aa92 Use supabase ipv4 2025-09-01 14:14:41 +02:00
MartinBraquet
720fa70d60 Use prod by default 2025-09-01 14:14:17 +02:00
MartinBraquet
b2eef1279f use LOCAL_DEV 2025-09-01 14:14:07 +02:00
MartinBraquet
da5e5aedb2 rename secret 2025-09-01 14:13:35 +02:00
MartinBraquet
51c505f9a6 Update lock 2025-09-01 14:13:18 +02:00
MartinBraquet
4f97a61e94 Remove package lock as using yarn lock 2025-09-01 14:13:09 +02:00
MartinBraquet
7af0f28bd7 Acknowledge manifold.love 2025-08-31 12:17:46 +02:00
MartinBraquet
a9f4e95b77 Set up google cloud server 2025-08-28 22:16:42 +02:00
MartinBraquet
3d3420b1aa Add lock file 2025-08-28 17:57:03 +02:00
MartinBraquet
26915ea94f Fix vercel env key 2025-08-28 17:29:03 +02:00
MartinBraquet
dfa1d1c76e Remove pwd 2025-08-28 16:56:46 +02:00
MartinBraquet
87c1870770 Make PROD work 2025-08-28 16:42:51 +02:00
MartinBraquet
626c27b635 Fix color 2025-08-28 12:49:09 +02:00
MartinBraquet
7572d1a6ff Add email sending 2025-08-28 12:49:05 +02:00
MartinBraquet
b707439de7 Rename to Compass (2) 2025-08-27 22:17:07 +02:00
MartinBraquet
2e332707ff Rename to Compass 2025-08-27 22:12:04 +02:00
MartinBraquet
2f60efe273 Set grey color scales 2025-08-27 22:07:43 +02:00
MartinBraquet
632f8477fd Fix favicon 2025-08-27 22:07:35 +02:00
MartinBraquet
53432520cd Pull up features from manifold.love 2025-08-27 21:30:05 +02:00
MartinBraquet
078893f7d1 Add books feature 2025-08-11 17:38:26 +02:00
MartinBraquet
131cb0ff79 Disable interest cache 2025-08-11 17:34:00 +02:00
MartinBraquet
c49117ffd5 Rename about 2025-08-11 15:40:08 +02:00
MartinBraquet
cb52aa264d Update desc 2025-08-11 15:09:39 +02:00
MartinBraquet
1c1a02757f Add loading... info 2025-08-10 13:04:29 +02:00
MartinBraquet
4558c4a29c Make social style slider in /complete-profile 2025-08-10 12:59:09 +02:00
MartinBraquet
21832a1885 Update introversion bar 2025-08-09 22:19:05 +02:00
MartinBraquet
9b45cc087f Fix 2025-08-09 21:56:52 +02:00
MartinBraquet
b131e6ee8c Hide minor onboarding questions 2025-08-09 21:51:01 +02:00
MartinBraquet
306a297837 Change connections options 2025-08-09 21:50:38 +02:00
MartinBraquet
380eda64a0 Update schema.prisma 2025-08-08 01:51:29 +02:00
MartinBraquet
2dd8e3016f Undo schema 2025-08-08 01:43:55 +02:00
MartinBraquet
fd9c61a1c7 Redirect new users to onboarding page 2025-08-08 01:19:36 +02:00
MartinBraquet
4a2dba6e2e Hide register buttons if logged in 2025-08-07 23:28:10 +02:00
MartinBraquet
034d94ee22 Move sign in button from header to /profile 2025-08-07 23:16:07 +02:00
MartinBraquet
65139094cb Edit readme 2025-08-07 23:03:02 +02:00
emilyokeefe
b607cf22a7 More concise wording on the about page (#5)
* Changed wording

* remove share button because it doesn't function yet

* more concise wording
2025-08-07 19:16:10 +02:00
emilyokeefe
b79f8d05be Added onboarding questions and worked on style (#3) 2025-08-07 13:52:19 +02:00
Martin Braquet
520d157a0f Move LICENSE from MIT to AGPL 2025-08-07 13:39:33 +02:00
MartinBraquet
f73be7e38f Delete license 2025-08-07 13:32:22 +02:00
MartinBraquet
44834d1d27 Hide registration options until 3 chars typed 2025-08-07 01:00:25 +02:00
MartinBraquet
d07422060a Add local dev prisma db 2025-08-07 00:35:22 +02:00
emilyokeefe
0ee9437cf8 Changed wording (#4)
* Changed wording

* remove share button because it doesn't function yet

---------

Co-authored-by: Martin Braquet <martin.braquet@gmail.com>
2025-08-06 23:05:10 +02:00
MartinBraquet
6fe646a32b Hide stats for now 2025-08-06 14:16:21 +02:00
MartinBraquet
8e1f643612 Add basic multi-step form 2025-08-05 20:19:11 +02:00
MartinBraquet
7f2ba4d727 Fix test 2025-08-05 17:47:40 +02:00
MartinBraquet
b8706eae10 Clean 2025-08-05 17:42:21 +02:00
MartinBraquet
34600ab0cf Fix favicon 2025-08-05 17:33:35 +02:00
MartinBraquet
b3031b79d1 Restrict userbase access to logged-in users 2025-08-05 17:12:53 +02:00
MartinBraquet
cc679ddcfa Fix image 2025-08-05 17:11:55 +02:00
MartinBraquet
3633d469c1 Switch from home to logo on narrow screens 2025-08-05 16:46:49 +02:00
MartinBraquet
46fa721a6c Add prisma config 2025-08-05 16:26:57 +02:00
emilyokeefe
7e4116b1a2 UI redesign: Add compass branding, improve navigation, & simplify content (#1)
* UI redesign: Add compass branding, improve navigation, and simplify content

* Add homepage improvements
- Created "Why Compass" section with three value props
- Worked on spacing
- Made word "Search" styled with typing effect

* Delete pic

* Fix lint

* Compress favicon

---------

Co-authored-by: MartinBraquet <martin.braquet@gmail.com>
2025-08-05 16:14:07 +02:00
MartinBraquet
2a043dbd53 Add AND multi keyword search 2025-08-05 02:18:37 +02:00
MartinBraquet
4aa23cf755 Fix 2025-08-05 01:59:24 +02:00
MartinBraquet
4f261116de Add filter params to url query params 2025-08-05 01:55:31 +02:00
MartinBraquet
7ecd5481a0 Move button 2025-08-04 23:21:03 +02:00
MartinBraquet
1cea1b7bcf Hide cause areas 2025-08-04 23:09:26 +02:00
MartinBraquet
ccba688ec4 Rename 2025-08-04 22:55:29 +02:00
MartinBraquet
4eabe72078 Remove conflict style 2025-08-04 22:46:28 +02:00
MartinBraquet
fbbe97d297 Add sign in 2025-08-04 22:42:15 +02:00
MartinBraquet
26a80d1313 Change description 2025-08-04 22:19:55 +02:00
MartinBraquet
24b5edba9f Remove age slider 2025-08-04 22:14:16 +02:00
MartinBraquet
9113e0e372 Update name 2025-08-04 22:09:14 +02:00
MartinBraquet
9f944dd171 Clean texts 2025-08-04 19:34:50 +02:00
MartinBraquet
b91f76914c Rename manifesto 2025-08-04 19:31:20 +02:00
MartinBraquet
8593d90209 Remove core 2025-08-04 19:30:41 +02:00
MartinBraquet
df441b7236 Remove age slider 2025-08-04 19:29:04 +02:00
MartinBraquet
43e2798a9b Rename desired connection 2025-08-04 19:27:38 +02:00
MartinBraquet
7ee14a48a7 Change search bar placeholder 2025-08-04 19:24:42 +02:00
MartinBraquet
1e5cf0ca5b Move user count 2025-08-04 19:24:30 +02:00
MartinBraquet
12bac6d305 Fix spinner 2025-08-04 19:18:35 +02:00
MartinBraquet
1518cd50ec Clean 2025-08-04 18:46:33 +02:00
MartinBraquet
9dfc82c106 Clean 2025-08-04 14:43:23 +02:00
MartinBraquet
087f10f7bb Rename 2025-08-04 14:39:43 +02:00
MartinBraquet
6971eac21f Add badges 2025-08-04 14:38:19 +02:00
MartinBraquet
da7cde91b3 Fix 2025-08-04 14:28:24 +02:00
MartinBraquet
814b4fe0ae Add tests 2025-08-04 14:25:44 +02:00
MartinBraquet
a2abc4fda9 Fix bad useState practices 2025-08-04 14:25:33 +02:00
MartinBraquet
9bcba9895e Clean bar info 2025-08-04 14:24:54 +02:00
MartinBraquet
ff13ea71a6 Fix spinner 2025-08-04 14:24:03 +02:00
MartinBraquet
7993dcb0b1 Update gitig 2025-08-04 14:23:42 +02:00
MartinBraquet
9a527fef20 Update lint 2025-08-04 14:23:30 +02:00
MartinBraquet
e6de25c0a4 Fix some lints 2025-08-04 11:44:24 +02:00
MartinBraquet
c45adc1a8a Remove slack 2025-08-04 11:34:17 +02:00
MartinBraquet
c06f86edbb Fix loading spinner 2025-08-04 11:34:01 +02:00
MartinBraquet
ed515fa3fc Add base tests 2025-08-04 10:26:19 +02:00
MartinBraquet
c284983b4b Update dev docs 2025-08-04 10:16:24 +02:00
MartinBraquet
5fed099034 Update README.md 2025-08-04 10:16:19 +02:00
MartinBraquet
c8dd335d65 Update README.md 2025-08-04 09:58:51 +02:00
MartinBraquet
17d2c6aa57 Clean 2025-08-04 09:56:30 +02:00
597 changed files with 65367 additions and 13466 deletions

View File

@@ -1,20 +1,29 @@
# Use the Prisma Postgres integration from Vercel Marketeplace to automatically connect a Prisma Postgres instance # Rename this file to `.env` and fill in the values.
# Or manually run `npx prisma init --db` to create a Prisma Postgres and manually set the `DATABASE_URL` below
# Create a random 32-character string or run `npx auth secret` to obtain one and set it as the `AUTH_SECRET` below # Required variables for basic local functionality
GOOGLE_CLIENT_ID= # For database connection. A 16-character password with digits and letters.
GOOGLE_CLIENT_SECRET= SUPABASE_DB_PASSWORD=
NEXTAUTH_SECRET=
NEXTAUTH_URL=http://localhost:3000
# Email configuration # For authentication.
EMAIL_SERVER_HOST=smtp.resend.dev # Ask the project admin. Should start with "AIza".
EMAIL_SERVER_PORT=587 NEXT_PUBLIC_FIREBASE_API_KEY=
EMAIL_SERVER_USER=BayesBond
EMAIL_SERVER_PASSWORD=
RESEND_API_KEY=
EMAIL_FROM=
# Development (SQLite) # The URL where your local backend server is running.
DATABASE_URL=file:./dev.db # You can change the port if needed.
NEXT_PUBLIC_API_URL=localhost:8088
# Optional variables for full local functionality
# For the location / distance filtering features.
# Create a free account at https://rapidapi.com/wirefreethought/api/geodb-cities and get an API key.
GEODB_API_KEY=
# For analytics like page views, user actions, feature usage, etc.
# Create a free account at https://posthog.com and get a project API key. Should start with "phc_".
POSTHOG_KEY=
# For sending emails (e.g. for user sign up, password reset, notifications, etc.).
# Create a free account at https://resend.com and get an API key. Should start with "re_".
RESEND_API_KEY=

View File

@@ -1,31 +0,0 @@
name: Check Next.js
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
- name: Install dependencies
run: npm ci
- name: Build the app
env:
DATABASE_URL: ${{ secrets.DATABASE_URL }}
run: npm run build

View File

@@ -9,6 +9,7 @@ on:
jobs: jobs:
release: release:
name: Release
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo - name: Checkout repo

58
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,58 @@
name: CI
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
ci:
name: All
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
- name: Install dependencies
run: yarn install
- name: Type check
run: echo skipping #npx tsc --noEmit
- name: Lint
run: npm run lint
- name: Run Jest tests
run: npm run test tests/jest
# - name: Build app
# env:
# DATABASE_URL: ${{ secrets.DATABASE_URL }}
# run: npm run build
# Optional: Playwright E2E tests
- name: Install Playwright deps
run: npx playwright install --with-deps
# npm install @playwright/test
# npx playwright install
- name: Run E2E tests
run: |
NEXT_PUBLIC_API_URL=localhost:8088 \
NEXT_PUBLIC_FIREBASE_ENV=PROD \
NEXT_PUBLIC_FIREBASE_API_KEY=${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }} \
yarn --cwd=web serve &
npx wait-on http://localhost:3000
npx playwright test tests/playwright
SERVER_PID=$(fuser -k 3000/tcp)
echo $SERVER_PID
kill $SERVER_PID

16
.gitignore vendored
View File

@@ -33,6 +33,9 @@ yarn-error.log*
# env files (can opt-in for committing if needed) # env files (can opt-in for committing if needed)
.env .env
.env.local .env.local
.env.*
.envrc
supabase/*
# vercel # vercel
.vercel .vercel
@@ -41,9 +44,20 @@ yarn-error.log*
*.tsbuildinfo *.tsbuildinfo
next-env.d.ts next-env.d.ts
.idea/
node_modules
yarn-error.log
dev
firebase-debug.log
tsconfig.tsbuildinfo
*.db *.db
*prisma/migrations *prisma/migrations
martin martin
.obsidian .obsidian
.idea .idea
*.last-run.json
*lock.hcl
/web/pages/test.tsx

24
.prettierrc Normal file
View File

@@ -0,0 +1,24 @@
{
"tabWidth": 2,
"useTabs": false,
"semi": false,
"trailingComma": "es5",
"singleQuote": true,
"plugins": ["prettier-plugin-sql"],
"overrides": [
{
"files": "*.sql",
"options": {
"language": "postgresql",
"keywordCase": "lower",
"logicalOperatorNewline": "before"
}
},
{
"files": "*.svg",
"options": {
"parser": "html"
}
}
]
}

1
.yarnrc Normal file
View File

@@ -0,0 +1 @@
save-exact true

View File

@@ -13,13 +13,13 @@ We welcome pull requests, but only if they meet the project's quality and design
1. **Fork the repository** using the GitHub UI. 1. **Fork the repository** using the GitHub UI.
2. **Clone your fork** locally: 2. **Clone your fork** locally:
```bash ```bash
git clone https://github.com/your-username/BayesBond.git git clone https://github.com/your-username/Compass.git
cd your-fork cd your-fork
3. **Add the upstream remote**: 3. **Add the upstream remote**:
```bash ```bash
git remote add upstream https://github.com/BayesBond/BayesBond.git git remote add upstream https://github.com/CompassMeet/Compass.git
``` ```
## Create a New Branch ## Create a New Branch

674
LICENSE
View File

@@ -1,21 +1,661 @@
MIT License GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (c) 2025 BayesBond Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Permission is hereby granted, free of charge, to any person obtaining a copy Preamble
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all The GNU Affero General Public License is a free, copyleft license for
copies or substantial portions of the Software. software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR The licenses for most software and other practical works are designed
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, to take away your freedom to share and change the works. By contrast,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE our General Public Licenses are intended to guarantee your freedom to
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER share and change all versions of a program--to make sure it remains free
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, software for all its users.
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

21
LICENSE-MIT Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 polylove, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

6
NOTICE Normal file
View File

@@ -0,0 +1,6 @@
Modifications License (AGPL-3.0)
Portions of this software have been modified by Compass (c) 2025.
These modifications are licensed under the GNU Affero General Public License v3.0 (AGPL-3.0).
The original software remains MIT-licensed (c) 2025 polylove, LLC.

132
README.md
View File

@@ -1,91 +1,149 @@
# BayesBond
This repository provides the source code for [BayesBond](https://bayesbond.vercel.app), a web application where rational thinkers can bond and form deep 1-1 [![CI](https://github.com/CompassMeet/Compass/actions/workflows/ci.yml/badge.svg)](https://github.com/CompassMeet/Compass/actions/workflows/ci.yml)
relationships in a fully transparent and efficient way. It just got released—please share it with anyone who would benefit from it! [![CD](https://github.com/CompassMeet/Compass/actions/workflows/cd.yml/badge.svg)](https://github.com/CompassMeet/Compass/actions/workflows/cd.yml)
![Vercel](https://deploy-badge.vercel.app/vercel/bayesbond)
To contribute, please submit a pull request or issue, or fill out this [form](https://forms.gle/tKnXUMAbEreMK6FC6) for suggestions and collaborations. # Compass
This repository provides the source code for [Compass](https://compassmeet.com), a web application for people to form deep 1-on-1 relationships in a fully transparent and efficient way. And it just got released!
**We cant do this alone.** Whatever your skills—coding, design, writing, moderation, marketing, or even small donations—you can make a real difference. [Contribute](https://www.compassmeet.com/about) in any way you can and help our community thrive!
## Features ## Features
- Extremely detailed profiles for deep connections - Extremely detailed profiles for deep connections
- Radically transparent: user base fully searchable - Radically transparent: user base fully searchable
- Free, ad-free, not for profit - Free, ad-free, not for profit (supported by donations)
- Supported by donation - Created, hosted, maintained, and moderated by volunteers
- Open source - Open source
- Democratically governed - Democratically governed
The full description is available [here](https://martinbraquet.com/meeting-rational). A detailed description of the vision is available [here](https://martinbraquet.com/meeting-rational).
## To Do ## To Do
- [x] Authentication (user/password and Google Sign In) - [x] Authentication (user/password and Google Sign In)
- [x] Set up PostgreSQL in Production with supabase or prisma console (can stick with SQLite in dev / local) - [x] Set up PostgreSQL in Production with supabase
- [x] Set up hosting (vercel) - [x] Set up web hosting (vercel)
- [x] Ask for detailed info per profile upon registration (intellectual interests, location, cause areas, personality type, conflict style, desired type of connection, prompt answers, gender, etc.) - [x] Set up backend hosting (google cloud)
- [x] Ask for detailed info upon registration (location, desired type of connection, prompt answers, gender, etc.)
- [x] Set up page listing all the profiles - [x] Set up page listing all the profiles
- [x] Search through all the profile variables - [x] Search through most profile variables
- [ ] (Set up chat / direct messaging) - [x] (Set up chat / direct messaging)
- [ ] Set up domain name (https://bayesbond.com) - [x] Set up domain name (https://compassmeet.com)
#### Secondary To Do #### Secondary To Do
Any action item is open to anyone for collaboration, but the following ones are particularly easy to do for first-time contributors. Any action item is open to anyone for collaboration, but the following ones are particularly easy to do for first-time contributors.
- [ ] Add profile features (intellectual interests, cause areas, personality type, conflict style, etc.)
- [ ] Add filters to search through remaining profile features (politics, religion, education level, etc.)
- [ ] Cover with tests (very important, just the test template and framework are ready)
- [ ] Clean up terms and conditions - [ ] Clean up terms and conditions
- [ ] Clean up privacy notice - [ ] Clean up privacy notice
- [ ] Clean up learn more page - [x] Clean up learn more page
- [x] Add dark theme - [x] Add dark theme
- [ ] Cover with tests
## Implementation ## Implementation
The web app is coded in Typescript using React as front-end and Prisma as back-end. It includes: The web app is coded in Typescript using React as front-end. It includes:
- [NextAuth.js v4](https://next-auth.js.org/) - [Supabase](https://supabase.com/) for the PostgreSQL database
- [Prisma Postgres](https://www.prisma.io/postgres) - [Google Cloud](https://console.cloud.google.com) for hosting the backend API
- [Prisma ORM](https://www.prisma.io/orm) - [Firebase](https://firebase.google.com/) for authentication and media storage
- Vercel - [Vercel](https://vercel.com/) for hosting the front-end
## Development ## Development
After cloning the repo and navigating into it, install dependencies: Below are all the steps to contribute. If you have any trouble or questions, please don't hesitate to open an issue or contact us on [Discord](https://discord.gg/8Vd7jzqjun)! We're responsive and happy to help.
``` ### Installation
npm install
Clone the repo and navigating into it:
```bash
git clone git@github.com:CompassMeet/Compass.git
cd Compass
``` ```
You now need to configure your database connection via an environment variable. Install `opentofu`, `docker`, and `yarn`. Try running this on Linux or macOS for a faster install:
```bash
./setup.sh
```
If it doesn't work, you can install them manually (Google how to install `opentofu`, `docker`, and `yarn` for your OS).
First, create an `.env` file: Then, install the dependencies for this project:
```bash
yarn install
```
### Environment Variables
We can't make the following information public, for security and privacy reasons:
- Database, otherwise anyone could access all the user data (including private messages)
- Firebase, otherwise anyone could remove users or modify the media files
- Email, analytics, and location services, otherwise anyone could use our paid plan
So, for your development, we will give you user-specific access when possible (e.g., Firebase) and for the rest you will need to set up cloned services (email, locations, etc.) and store your secrets as environment variables.
To do so, simply create an `.env` file as a copy of `.env.example`, open it, and fill in the variables according to the instructions in the file:
```bash ```bash
cp .env.example .env cp .env.example .env
``` ```
To ensure your authentication works properly, you'll also need to set the `AUTH_SECRET` [env var for NextAuth.js] ### Installing PostgreSQL
(https://next-auth.js.org/configuration/options). You can generate such a random 32-character string with:
Run the following commands to set up your local development database. Run only the section that corresponds to your operating system.
On macOS:
```bash ```bash
npx auth secret brew install postgresql
brew services start postgresql
``` ```
In the end, your entire `.env` file should look similar to this (but using _your own values_ for the env vars): On Linux:
```bash ```bash
DATABASE_URL="file:./dev.db" sudo apt update
AUTH_SECRET="gTwLSXFeNWFRpUTmxlRniOfegXYw445pd0k6JqXd7Ag=" sudo apt install postgresql postgresql-contrib
sudo systemctl start postgresql
````
On Windows, you can download PostgreSQL from the [official website](https://www.postgresql.org/download/windows/).
### Database Initialization
Create a database named `compass` and set the password for the `postgres` user:
```bash
sudo -u postgres psql
ALTER USER postgres WITH PASSWORD 'password';
\q
``` ```
Run the following commands to set up your local development database and Prisma schema: Create the database
```bash ```bash
npx prisma migrate dev --name init ...
``` ```
Note that your local database will be made of synthetic data, not real users. This is fine for development and testing. Note that your local database will be made of synthetic data, not real users. This is fine for development and testing.
Start the development server: ### Tests
Make sure the tests pass:
```bash ```bash
npm run dev yarn test
```
TODO: fix tests
### Running the Development Server
Start the development server:
```bash
yarn dev
``` ```
Once the server is running, visit http://localhost:3000 to start using the app. Once the server is running, visit http://localhost:3000 to start using the app. You can sign up and visit the profiles; you should see 5 synthetic profiles.
See [development.md](docs/development.md) for additional instructions, such as adding new profile features. Now you can start contributing by making changes and submitting pull requests!
See [development.md](docs/development.md) for additional instructions, such as adding new profile features.
## Acknowledgements
This project is built on top of [manifold.love](https://github.com/sipec/polylove), an open-source dating platform licensed under the MIT License. We greatly appreciate their work and contributions to open-source, which have significantly aided in the development of some core features such as direct messaging, prompts, and email notifications. We invite the community to explore and contribute to other open-source projects like manifold.love as well, especially if you're interested in functionalities that deviate from Compass' ideals of deep, intentional connections.

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 167 KiB

View File

@@ -1,53 +0,0 @@
'use client';
import Link from "next/link";
import {aColor} from "@/lib/client/constants";
export default function LearnMorePage() {
return (
<div className="text-gray-600 dark:text-white min-h-screen p-6">
{aColor}
<div className="max-w-3xl mx-auto">
<h1 className="text-3xl font-bold mb-4 text-center">About IntentionalBond</h1>
<div className="et_pb_text_inner">
{/*<h1 id="abstract">Abstract</h1>*/}
<p>Forming and maintaining close connections is fundamental for most peoples mental healthand hence overall
well-being. However, currently available meeting platforms, lacking transparency and searchability, are
deeply failing to bring together thoughtful people. This platform is designed to
foster close friendships and relationships for people who prioritize learning, curiosity, and critical
thinking. The directory of users is fully transparent and each profile contains extensive
information, allowing searches over all users through powerful filtering and sorting methods. To prevent any
value drift from this pro-social mission, the platform will always be free, ad-free, not for profit,
donation-supported, open source, and democratically governed.</p>
<div className="mt-8 flex space-x-4 justify-center">
<Link
href="/manifesto"
className="px-6 py-3 bg-gray-200 dark:bg-gray-600 text-gray-800 dark:text-white text-lg rounded-lg hover:bg-gray-300 dark:hover:bg-gray-500 transition"
>
Manifesto
</Link>
</div>
<h3 id="how-to-help">How to Help</h3>
<h5 id="give-suggestions-or-contribute">Give Suggestions or Contribute</h5>
<p>Give suggestions or show your inclination to contribute through this <a
href="https://forms.gle/tKnXUMAbEreMK6FC6">form</a>!</p>
<h5 id="join-chats">Join Chats</h5>
<p>You can join the community on <a
href="https://join.slack.com/t/bayesmeet/shared_invite/zt-3a2k2ybf4-~S8i0hCuUETojxqNHJbB_g">Slack</a> or <a
href="https://discord.gg/8Vd7jzqjun">Discord</a> to shape and test the productor just to chat with
like-minded people.</p>
<h5 id="share">Share</h5>
<p>Share the idea and article with people who identify with the community values and may benefit from the
product.</p>
<h5 id="donate">Donate</h5>
<p>You can already donate to support the initial infrastructure via <a
href="https://www.paypal.com/paypalme/MartinBraquet">PayPal</a> or <a
href="https://github.com/sponsors/MartinBraquet">GitHub</a> (GitHub has increased transparency, but requires
an account).</p>
<h5 id="github-repo">Source Code</h5>
<p>The source code and instructions for development are available on <a href="https://github.com/BayesBond/BayesBond">GitHub</a>.</p>
</div>
</div>
</div>
);
}

View File

@@ -1,148 +0,0 @@
"use client";
import {signIn} from "next-auth/react";
import {useRouter, useSearchParams} from "next/navigation";
import {Suspense, useEffect, useState} from "react";
import Link from "next/link";
import {FcGoogle} from "react-icons/fc";
export default function LoginPage() {
return (
<Suspense fallback={<div></div>}>
<RegisterComponent/>
</Suspense>
);
}
function RegisterComponent() {
const router = useRouter();
const searchParams = useSearchParams();
const [error, setError] = useState<string | null>(null);
const [isLoading, setIsLoading] = useState(false);
useEffect(() => {
const error = searchParams.get('error');
if (error === 'OAuthAccountNotLinked') {
setError('This email is already registered with a different provider');
} else if (error) {
setError('An error occurred during login');
}
}, [searchParams]);
const handleGoogleSignIn = async () => {
try {
setIsLoading(true);
await signIn('google', {callbackUrl: '/'});
} catch {
setError('Failed to sign in with Google');
setIsLoading(false);
}
};
async function handleSubmit(event: React.FormEvent<HTMLFormElement>) {
try {
event.preventDefault();
setIsLoading(true);
setError(null);
const formData = new FormData(event.currentTarget);
const response = await signIn("credentials", {
...Object.fromEntries(formData),
redirect: false,
});
if (response?.error) {
setError("Invalid email or password");
setIsLoading(false);
return;
}
router.push("/");
router.refresh();
} catch {
setError("An error occurred during login");
setIsLoading(false);
}
}
console.log('Form rendering');
return (
<div className="min-h-screen flex items-center justify-center py-12 px-4 sm:px-6 lg:px-8">
<div className="max-w-md w-full space-y-8">
<div>
<h2 className="mt-6 text-center text-3xl font-extrabold ">
Sign in to your account
</h2>
</div>
<form className="mt-8 space-y-6" onSubmit={handleSubmit}>
<div className="rounded-md shadow-sm -space-y-px">
<div>
<label htmlFor="email" className="sr-only">
Email address
</label>
<input
id="email"
name="email"
type="email"
required
className="appearance-none rounded-none relative block w-full px-3 py-2 border border-gray-300 placeholder-gray-500 rounded-t-md focus:outline-none focus:ring-blue-500 focus:border-blue-500 focus:z-10 sm:text-sm"
placeholder="Email address"
/>
</div>
<div>
<label htmlFor="password" className="sr-only">
Password
</label>
<input
id="password"
name="password"
type="password"
required
className="appearance-none rounded-none relative block w-full px-3 py-2 border border-gray-300 placeholder-gray-500 rounded-b-md focus:outline-none focus:ring-blue-500 focus:border-blue-500 focus:z-10 sm:text-sm"
placeholder="Password"
/>
</div>
</div>
{error && (
<div className="text-red-500 text-sm text-center">{error}</div>
)}
<div className="space-y-4">
<button
type="submit"
disabled={isLoading}
className={`group relative w-full flex justify-center py-2 px-4 border border-transparent text-sm font-medium rounded-md text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 ${isLoading ? 'opacity-70 cursor-not-allowed' : ''}`}
>
{isLoading ? 'Signing in...' : 'Sign in with Email'}
</button>
<div className="relative">
<div className="absolute inset-0 flex items-center">
<div className="w-full border-t border-gray-300"></div>
</div>
<div className="relative flex justify-center text-sm">
<span className="px-2 bg-gray-50 dark:bg-gray-900 text-gray-500">Or continue with</span>
</div>
</div>
<button
type="button"
onClick={handleGoogleSignIn}
disabled={isLoading}
className="w-full flex items-center justify-center gap-2 py-2 px-4 border border-gray-300 rounded-md shadow-sm text-sm font-medium text-gray-700 hover: focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 disabled:opacity-70 disabled:cursor-not-allowed"
>
<FcGoogle className="w-5 h-5"/>
Sign in with Google
</button>
</div>
</form>
<div className="text-center">
<Link href="/register" className="text-blue-600 hover:underline">
No account? Register.
</Link>
</div>
</div>
</div>
);
}

View File

@@ -1,48 +0,0 @@
'use client';
import ProfilePage from "@/app/profiles/page";
export const dynamic = "force-dynamic"; // This disables SSG and ISR
export default function HomePage() {
const profilePage = () => {
return (
<main className="min-h-screen flex flex-col">
<ProfilePage/>
</main>
)
}
return (
<main className="min-h-screen flex flex-col">
{/* Header */}
{/*<header className="flex justify-between items-center p-2 max-w-6xl mx-auto w-full">*/}
{/* <a */}
{/* href="https://github.com/BayesBond/BayesBond" */}
{/* target="_blank" */}
{/* rel="noopener noreferrer"*/}
{/* className="text-gray-700 hover: transition"*/}
{/* >*/}
{/* <svg className="w-6 h-6" fill="currentColor" viewBox="0 0 24 24" aria-hidden="true">*/}
{/* <path fillRule="evenodd" d="M12 2C6.477 2 2 6.484 2 12.017c0 4.425 2.865 8.18 6.839 9.504.5.092.682-.217.682-.483 0-.237-.008-.868-.013-1.703-2.782.605-3.369-1.343-3.369-1.343-.454-1.158-1.11-1.466-1.11-1.466-.908-.62.069-.608.069-.608 1.003.07 1.531 1.032 1.531 1.032.892 1.53 2.341 1.088 2.91.832.092-.647.35-1.088.636-1.338-2.22-.253-4.555-1.11-4.555-4.943 0-1.091.39-1.984 1.029-2.683-.103-.253-.446-1.27.098-2.647 0 0 .84-.269 2.75 1.026A9.564 9.564 0 0112 6.844c.85.004 1.705.115 2.504.337 1.909-1.296 2.747-1.027 2.747-1.027.546 1.379.202 2.398.1 2.651.64.699 1.028 1.595 1.028 2.688 0 3.842-2.339 4.687-4.566 4.935.359.309.678.919.678 1.852 0 1.336-.012 2.415-.012 2.743 0 .267.18.578.688.48A10.019 10.019 0 0022 12.017C22 6.484 17.522 2 12 2z" clipRule="evenodd" />*/}
{/* </svg>*/}
{/* </a>*/}
{/*</header>*/}
{/* Hero Section */}
<section className="flex flex-col items-center justify-center flex-1 text-center px-4">
<h1 className="text-5xl md:text-6xl xs:text-4xl font-extrabold max-w-3xl leading-tight">
IntentionalBond
</h1>
<p className="mt-6 text-lg md:text-xl text-gray-400 max-w-2xl">
Tired of swiping? Just search what you're looking for!
</p>
<div className=" w-full">
{profilePage()}
</div>
</section>
</main>
);
}

View File

@@ -1,259 +0,0 @@
"use client";
import {Suspense, useState} from "react";
import Link from "next/link";
import {signIn} from "next-auth/react";
import {FcGoogle} from "react-icons/fc";
import {useSearchParams} from "next/navigation";
export default function RegisterPage() {
return (
<Suspense fallback={<div></div>}>
<RegisterComponent/>
</Suspense>
);
}
function RegisterComponent() {
const searchParams = useSearchParams();
const [error, setError] = useState<string | null>(searchParams.get('error'));
const [isLoading, setIsLoading] = useState(false);
const [registrationSuccess, setRegistrationSuccess] = useState(false);
const [registeredEmail, setRegisteredEmail] = useState('');
function redirect() {
// Redirect to complete profile page
window.location.href = '/complete-profile';
}
const handleGoogleSignUp = async () => {
try {
setIsLoading(true);
await signIn('google', {callbackUrl: '/complete-profile'});
} catch (error) {
console.error('Error signing up with Google:', error);
setError('Failed to sign up with Google');
setIsLoading(false);
}
};
async function handleSubmit(event: React.FormEvent<HTMLFormElement>) {
function handleError(error: unknown) {
console.error("Registration error:", error);
setError(error instanceof Error ? error.message : "Registration failed");
}
try {
event.preventDefault();
setIsLoading(true);
setError(null);
const formData = new FormData(event.currentTarget);
const email = formData.get("email") as string;
const password = formData.get("password") as string;
const name = formData.get("name") as string;
// Basic validation
if (!email || !password || !name) {
handleError("All fields are required");
}
const res = await fetch("/api/auth/signup", {
method: "POST",
body: JSON.stringify({email, password, name}),
headers: {"Content-Type": "application/json"},
});
const data = await res.json();
if (!res.ok) {
handleError(data.error || "Registration failed");
}
// Show a success message with email verification notice
// setRegistrationSuccess(true);
setRegisteredEmail(email);
// Sign in after successful registration
const response = await signIn("credentials", {
email,
password,
redirect: false,
});
if (response?.error) {
handleError("Failed to sign in after registration");
}
redirect()
} catch (error) {
handleError(error);
} finally {
setIsLoading(false);
}
}
return (
<div className="min-h-screen flex items-center justify-center py-12 px-4 sm:px-6 lg:px-8">
<div className="max-w-md w-full space-y-8">
{registrationSuccess ? (
<div className="text-center">
<div className="mx-auto flex items-center justify-center h-12 w-12 rounded-full bg-green-100">
<svg
className="h-6 w-6 text-green-600"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M5 13l4 4L19 7"
/>
</svg>
</div>
<h2 className="mt-6 text-3xl font-extrabold ">
Check your email
</h2>
<p className="mt-2 text-sm text-gray-600">
We have sent a verification link to <span className="font-medium">{registeredEmail}</span>.
Please click the link in the email to verify your account.
</p>
<p className="mt-4 text-sm text-gray-500">
Did not receive the email? Check your spam folder or{' '}
<button
type="button"
className="font-medium text-blue-600 hover:text-blue-500"
onClick={() => setRegistrationSuccess(false)}
>
try again
</button>
.
</p>
<div className="mt-6">
<Link
href="/login"
className="w-full flex justify-center py-2 px-4 border border-transparent rounded-md shadow-sm text-sm font-medium bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500"
>
Back to Login
</Link>
</div>
</div>
) : (
<div>
<div>
{/*<h2 className="mt-6 text-center text-xl font-extrabold text-red-700">*/}
{/* The project is still in development. You can sign up if you want to test it, but your account*/}
{/* may be deleted at any time. To get release updates, fill in this <a*/}
{/* href='https://forms.gle/tKnXUMAbEreMK6FC6'>form</a>.*/}
{/*</h2>*/}
<h2 className="mt-6 text-center text-3xl font-extrabold ">
Create your account
</h2>
</div>
<form className="mt-8 space-y-6" onSubmit={handleSubmit}>
<div className="rounded-md shadow-sm -space-y-px">
<div>
<label htmlFor="name" className="sr-only">
Name
</label>
<input
id="name"
name="name"
type="text"
maxLength={100}
required
className="appearance-none rounded-none relative block w-full px-3 py-2 border border-gray-300 placeholder-gray-500 rounded-t-md focus:outline-none focus:ring-blue-500 focus:border-blue-500 focus:z-10 sm:text-sm"
placeholder="Full name"
/>
</div>
<div>
<label htmlFor="email" className="sr-only">
Email address
</label>
<input
id="email"
name="email"
type="email"
required
className="appearance-none rounded-none relative block w-full px-3 py-2 border border-gray-300 placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 focus:z-10 sm:text-sm"
placeholder="Email address"
/>
</div>
<div>
<label htmlFor="password" className="sr-only">
Password
</label>
<input
id="password"
name="password"
type="password"
required
className="appearance-none rounded-none relative block w-full px-3 py-2 border border-gray-300 placeholder-gray-500 rounded-b-md focus:outline-none focus:ring-blue-500 focus:border-blue-500 focus:z-10 sm:text-sm"
placeholder="Password"
/>
</div>
</div>
<div>
<p className="text-xs text-gray-500 mt-2 text-center">
By signing up, I agree to the{" "}
<a href="/terms" className="underline hover:text-blue-600">
Terms and Conditions
</a>{" "}
and{" "}
<a href="/privacy" className="underline hover:text-blue-600">
Privacy Policy
</a>.
</p>
</div>
{error && (
<div className="text-red-500 text-sm text-center">{error}</div>
)}
<div className="space-y-2">
<button
type="submit"
disabled={isLoading}
className={`group relative w-full flex justify-center py-2 px-4 border border-transparent text-sm font-medium rounded-md text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 ${isLoading ? 'opacity-70 cursor-not-allowed' : ''}`}
>
{isLoading ? 'Creating account...' : 'Sign up with Email'}
</button>
<div className="relative">
<div className="absolute inset-0 flex items-center">
<div className="w-full border-t border-gray-300"></div>
</div>
<div className="relative flex justify-center text-sm">
<span className="px-2 bg-gray-50 dark:bg-gray-900 text-gray-500">Or sign up with</span>
</div>
</div>
<button
type="button"
onClick={handleGoogleSignUp}
disabled={isLoading}
className="w-full flex items-center justify-center gap-2 py-2 px-4 border border-gray-300 rounded-md shadow-sm text-sm font-medium text-gray-700 hover: focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 disabled:opacity-70 disabled:cursor-not-allowed"
>
<FcGoogle className="w-5 h-5"/>
Continue with Google
</button>
</div>
</form>
<div className="text-center text-sm mt-2">
<p className="text-gray-600">
Already have an account?{' '}
<Link href="/login" className="font-medium text-blue-600 hover:text-blue-500">
Sign in
</Link>
</p>
</div>
</div>
)
}
</div>
</div>
);
}

7
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
# Supabase
.branches
.temp
.env
# regen-schema
**/dump.sql

50
backend/api/.eslintrc.js Normal file
View File

@@ -0,0 +1,50 @@
module.exports = {
plugins: ['lodash', 'unused-imports'],
extends: ['eslint:recommended'],
ignorePatterns: ['dist', 'lib'],
env: {
node: true,
},
overrides: [
{
files: ['**/*.ts'],
plugins: ['@typescript-eslint'],
extends: ['plugin:@typescript-eslint/recommended', 'prettier'],
parser: '@typescript-eslint/parser',
parserOptions: {
tsconfigRootDir: __dirname,
project: ['./tsconfig.json'],
},
rules: {
'@typescript-eslint/ban-types': [
'error',
{
extendDefaults: true,
types: {
'{}': false,
},
},
],
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-extra-semi': 'off',
'@typescript-eslint/no-unused-vars': [
'warn',
{
argsIgnorePattern: '^_',
varsIgnorePattern: '^_',
caughtErrorsIgnorePattern: '^_',
},
],
'unused-imports/no-unused-imports': 'warn',
'no-constant-condition': 'off',
},
},
],
rules: {
'linebreak-style': [
'error',
process.platform === 'win32' ? 'windows' : 'unix',
],
'lodash/import-scope': [2, 'member'],
},
}

View File

@@ -0,0 +1,6 @@
.gitignore
.gcloudignore
/tsconfig.json
/deploy.sh
/src/
/lib/

16
backend/api/.gitignore vendored Normal file
View File

@@ -0,0 +1,16 @@
# Compiled JavaScript files
lib/
dist/
# Node.js dependency directory
node_modules/
# Terraform
.terraform/*
*.tfstate
*.tfstate.*
crash.log
*_override.tf
*_override.tf.json
.terraformrc
terraform.rc

29
backend/api/Dockerfile Normal file
View File

@@ -0,0 +1,29 @@
# prereq: first do `yarn build` to compile typescript & etc.
FROM node:19-alpine
WORKDIR /usr/src/app
# Install PM2 globally
RUN yarn global add pm2
# Remove?
COPY tsconfig.json ./
# first get dependencies in for efficient docker layering
COPY dist/package.json dist/yarn.lock ./
RUN yarn install --frozen-lockfile --production
# then copy over typescript payload
COPY dist ./
# Copy the PM2 ecosystem configuration
COPY ecosystem.config.js ./
ENV PORT=80
EXPOSE 80/tcp
# EXPOSE 8090/tcp
# EXPOSE 8091/tcp
# EXPOSE 8092/tcp
# Use PM2 to run the application with the ecosystem config
CMD ["pm2-runtime", "ecosystem.config.js"]

131
backend/api/README.md Normal file
View File

@@ -0,0 +1,131 @@
# Backend API
This is the code for the API running at `api.compassmeet.com`.
It runs in a docker inside a Google Cloud virtual machine.
### Requirements
You must have the `gcloud` CLI.
On MacOS:
```bash
brew install --cask google-cloud-sdk
```
On Linux:
```bash
sudo apt-get update && sudo apt-get install google-cloud-sdk
```
Then:
```bash
gcloud init
gcloud auth login
gcloud config set project YOUR_PROJECT_ID
```
### Setup
This section is only for the people who are creating a server from scratch, for instance for a forked project.
One-time commands you may need to run:
```bash
gcloud artifacts repositories create builds \
--repository-format=docker \
--location=us-west1 \
--description="Docker images for API"
gcloud auth configure-docker us-west1-docker.pkg.dev
gcloud config set project compass-130ba
gcloud projects add-iam-policy-binding compass-130ba \
--member="user:YOUR_EMAIL@gmail.com" \
--role="roles/artifactregistry.writer"
gcloud projects add-iam-policy-binding compass-130ba \
--member="user:YOUR_EMAIL@gmail.com" \
--role="roles/storage.objectAdmin"
gsutil mb -l us-west1 gs://compass-130ba-terraform-state
gsutil uniformbucketlevelaccess set on gs://compass-130ba-terraform-state
gsutil iam ch user:YOUR_EMAIL@gmail.com:roles/storage.admin gs://compass-130ba-terraform-state
tofu init
gcloud projects add-iam-policy-binding compass-130ba \
--member="serviceAccount:253367029065-compute@developer.gserviceaccount.com" \
--role="roles/secretmanager.secretAccessor"
gcloud run services list
```
##### DNS
* After deployment, Terraform assigns a static external IP to this resource.
* You can get it manually:
```bash
gcloud compute addresses describe api-lb-ip-2 --global --format="get(address)"
34.117.20.215
```
Since Vercel manages your domain (`compassmeet.com`):
1. Log in to [Vercel dashboard](https://vercel.com/dashboard).
2. Go to **Domains → compassmeet.com → Add Record**.
3. Add an **A record** for your API subdomain:
| Type | Name | Value | TTL |
| ---- | ---- | ------------ | ----- |
| A | api | 34.123.45.67 | 600 s |
* `Name` is just the subdomain: `api``api.compassmeet.com`.
* `Value` is the **external IP of the LB** from step 1.
Verify connectivity
From your local machine:
```bash
nslookup api.compassmeet.com
ping -c 3 api.compassmeet.com
curl -I https://api.compassmeet.com
```
* `nslookup` should return the LB IP (`34.123.45.67`).
* `curl -I` should return `200 OK` from your service.
If SSL isnt ready (may take 15 mins), check LB logs:
```bash
gcloud compute ssl-certificates describe api-lb-cert-2
```
##### Secrets management
Secrets are strings that shouldn't be checked into Git (eg API keys, passwords).
Add the secrets for your specific project in [Google Cloud Secrets manager](https://console.cloud.google.com/security/secret-manager), so that the virtual machine can access them.
For Compass, the name of the secrets are in [secrets.ts](../../common/src/secrets.ts).
### Run Locally
In root directory, run the local api with hot reload, along with all the other backend and web code.
```bash
./run_local.sh prod
```
### Deploy
Run in this directory to deploy your code to the server.
```bash
./deploy-api.sh prod
```
### Connect to the server
Run in this directory to connect to the API server running as virtual machine in Google Cloud. You can access logs, files, debug, etc.
```bash
./ssh-api.sh prod
```
Useful commands once inside the server:
```bash
sudo journalctl -u konlet-startup --no-pager -efb
sudo docker logs -f $(sudo docker ps -alq)
docker exec -it $(sudo docker ps -alq) sh
```

55
backend/api/deploy-api.sh Executable file
View File

@@ -0,0 +1,55 @@
#!/bin/bash
# steps to deploy new version to GCP:
# 1. build new docker image & upload to Google
# 2. create a new GCP instance template with the new docker image
# 3. tell the GCP 'backend service' for the API to update to the new template
# 4. a. GCP creates a new instance with the new template
# b. wait for the new instance to be healthy (serving TCP connections)
# c. route new connections to the new instance
# d. delete the old instance
set -e
source ../../.env
ENV=${1:-prod}
# Config
REGION="us-west1"
ZONE="us-west1-b"
PROJECT="compass-130ba"
SERVICE_NAME="api"
GIT_REVISION=$(git rev-parse --short HEAD)
TIMESTAMP=$(date +"%s")
IMAGE_TAG="${TIMESTAMP}-${GIT_REVISION}"
IMAGE_URL="${REGION}-docker.pkg.dev/${PROJECT}/builds/${SERVICE_NAME}:${IMAGE_TAG}"
echo "🚀 Deploying ${SERVICE_NAME} to ${ENV} ($(date "+%Y-%m-%d %I:%M:%S %p"))"
yarn add tsconfig-paths
yarn build
gcloud auth print-access-token | docker login -u oauth2accesstoken --password-stdin us-west1-docker.pkg.dev
docker build . --tag ${IMAGE_URL} --platform linux/amd64
echo "docker push ${IMAGE_URL}"
docker push ${IMAGE_URL}
export TF_VAR_image_url=$IMAGE_URL
export TF_VAR_env=$ENV
tofu apply -auto-approve
INSTANCE_NAME=$(gcloud compute instances list \
--filter="zone:(us-west1-c)" \
--sort-by="~creationTimestamp" \
--format="value(name)" \
--limit=1)
SERVICE_ACCOUNT_EMAIL=$(gcloud compute instances describe ${INSTANCE_NAME} \
--zone us-west1-c \
--format="value(serviceAccounts.email)")
gcloud projects add-iam-policy-binding ${PROJECT} \
--member="serviceAccount:$SERVICE_ACCOUNT_EMAIL" \
--role="roles/artifactregistry.reader"
echo "✅ Deployment complete! Image: ${IMAGE_URL}"

View File

@@ -0,0 +1,21 @@
module.exports = {
apps: [
{
name: "api",
script: "node",
args: "-r tsconfig-paths/register --dns-result-order=ipv4first backend/api/lib/serve.js",
env: {
NODE_ENV: "production",
NODE_PATH: "/usr/src/app/node_modules", // <- ensures Node finds tsconfig-paths
PORT: 80,
},
instances: 1,
exec_mode: "fork",
autorestart: true,
watch: false,
// 4 GB on the box, give 3 GB to the JS heap
node_args: "--max-old-space-size=3072",
max_memory_restart: "3500M"
}
]
};

308
backend/api/main.tf Normal file
View File

@@ -0,0 +1,308 @@
variable "image_url" {
description = "Docker image URL"
type = string
default = "us-west1-docker.pkg.dev/compass-130ba/builds/api:latest"
}
variable "env" {
description = "Environment (env or prod)"
type = string
default = "prod"
}
locals {
project = "compass-130ba"
region = "us-west1"
zone = "us-west1-b"
service_name = "api"
machine_type = "e2-small"
}
terraform {
backend "gcs" {
bucket = "compass-130ba-terraform-state"
prefix = "api"
}
}
provider "google" {
project = local.project
region = local.region
zone = local.zone
}
# Firebase Storage Buckets
# Note you still have to deploy the rules: `firebase deploy --only storage`
resource "google_storage_bucket" "public_storage" {
# /!\ That bucket is different from the one in firebase (compass-130ba.firebasestorage.app)
# as it errors when trying to do so:
# Error: googleapi: Error 403: Another user owns the domain compass-130ba.firebasestorage.app or a parent domain. You can either verify domain ownership at https://search.google.com/search-console/welcome?new_domain_name=compass-130ba.firebasestorage.app or find the current owner and ask that person to create the bucket for you, forbidden
# To be fixed later if they must be the same bucket (shared resources)
name = "compass-130ba"
location = "US"
force_destroy = false
uniform_bucket_level_access = true
cors {
origin = ["*"]
method = ["GET", "HEAD", "PUT", "POST", "DELETE"]
response_header = ["*"]
max_age_seconds = 3600
}
}
# static IPs
resource "google_compute_global_address" "api_lb_ip" {
name = "api-lb-ip-2"
address_type = "EXTERNAL"
}
resource "google_compute_managed_ssl_certificate" "api_cert" {
name = "api-lb-cert-1"
managed {
domains = ["api.compassmeet.com"]
}
}
# Instance template with your Docker container
resource "google_compute_instance_template" "api_template" {
name_prefix = "${local.service_name}-"
machine_type = local.machine_type
tags = ["lb-health-check"]
disk {
source_image = "cos-cloud/cos-stable" # Container-Optimized OS
auto_delete = true
boot = true
}
network_interface {
network = "default"
subnetwork = "default"
access_config {
network_tier = "PREMIUM"
}
}
service_account {
scopes = ["cloud-platform"]
}
metadata = {
gce-container-declaration = <<EOF
spec:
containers:
- image: '${var.image_url}'
env:
- name: NEXT_PUBLIC_FIREBASE_ENV
value: ${upper(var.env)}
- name: GOOGLE_CLOUD_PROJECT
value: ${local.project}
ports:
- containerPort: 80
EOF
}
lifecycle {
create_before_destroy = true
}
}
# Managed instance group (for 1 VM)
resource "google_compute_region_instance_group_manager" "api_group" {
name = "${local.service_name}-group"
base_instance_name = "${local.service_name}-group"
region = local.region
target_size = 1
version {
instance_template = google_compute_instance_template.api_template.id
name = "primary"
}
update_policy {
type = "PROACTIVE"
minimal_action = "REPLACE"
max_unavailable_fixed = 0
max_surge_fixed = 3
}
named_port {
name = "http"
port = 80
}
auto_healing_policies {
health_check = google_compute_health_check.api_health_check.id
initial_delay_sec = 300
}
}
resource "google_compute_health_check" "api_health_check" {
name = "${local.service_name}-health-check"
check_interval_sec = 5
timeout_sec = 5
healthy_threshold = 2
unhealthy_threshold = 10
tcp_health_check {
port = "80"
}
}
# Backend service
resource "google_compute_backend_service" "api_backend" {
name = "${local.service_name}-backend"
protocol = "HTTP"
port_name = "http"
timeout_sec = 30
health_checks = [google_compute_health_check.api_health_check.id]
backend {
group = google_compute_region_instance_group_manager.api_group.instance_group
}
log_config {
enable = true
}
}
# URL map
resource "google_compute_url_map" "api_url_map" {
name = "${local.service_name}-url-map"
default_service = google_compute_backend_service.api_backend.id
host_rule {
hosts = ["*"]
path_matcher = "allpaths"
}
path_matcher {
name = "allpaths"
default_service = google_compute_backend_service.api_backend.self_link
}
}
# HTTPS proxy
resource "google_compute_target_https_proxy" "api_https_proxy" {
name = "${local.service_name}-https-proxy"
url_map = google_compute_url_map.api_url_map.id
ssl_certificates = [google_compute_managed_ssl_certificate.api_cert.id]
}
# Global forwarding rule (load balancer frontend)
resource "google_compute_global_forwarding_rule" "api_https_forwarding_rule" {
name = "${local.service_name}-https-forwarding-rule-2"
target = google_compute_target_https_proxy.api_https_proxy.id
port_range = "443"
ip_address = google_compute_global_address.api_lb_ip.id
}
# HTTP-to-HTTPS redirect
resource "google_compute_url_map" "api_http_redirect" {
name = "${local.service_name}-http-redirect"
default_url_redirect {
https_redirect = true
redirect_response_code = "MOVED_PERMANENTLY_DEFAULT"
strip_query = false
}
}
resource "google_compute_target_http_proxy" "api_http_proxy" {
name = "${local.service_name}-http-proxy"
url_map = google_compute_url_map.api_http_redirect.id
}
resource "google_compute_global_forwarding_rule" "api_http_forwarding_rule" {
name = "${local.service_name}-http-forwarding-rule"
target = google_compute_target_http_proxy.api_http_proxy.id
port_range = "80"
ip_address = google_compute_global_address.api_lb_ip.id
}
# Firewalls
resource "google_compute_firewall" "allow_health_check" {
name = "allow-health-check-${local.service_name}"
network = "default"
allow {
protocol = "tcp"
ports = ["80"]
}
source_ranges = ["130.211.0.0/22", "35.191.0.0/16"]
target_tags = ["lb-health-check"]
}
resource "google_compute_firewall" "default_allow_https" {
name = "default-allow-http"
network = "default"
priority = 1000
direction = "INGRESS"
allow {
protocol = "tcp"
ports = ["80", "443"] # ["443", "8090-8099"]
}
source_ranges = ["0.0.0.0/0"]
}
# resource "google_compute_firewall" "default_allow_ssh" {
# name = "default-allow-ssh"
# network = "default"
# priority = 65534
# direction = "INGRESS"
#
# allow {
# protocol = "tcp"
# ports = ["22"]
# }
#
# source_ranges = ["0.0.0.0/0"]
# }
#
# resource "google_compute_firewall" "default_allow_internal" {
# name = "default-allow-internal"
# network = "default"
# priority = 65534
# direction = "INGRESS"
#
# allow {
# protocol = "tcp"
# ports = ["0-65535"]
# }
#
# allow {
# protocol = "udp"
# ports = ["0-65535"]
# }
#
# allow {
# protocol = "icmp"
# }
#
# source_ranges = ["10.128.0.0/9"]
# }
#
# # Allow ICMP (ping)
# resource "google_compute_firewall" "default_allow_icmp" {
# name = "default-allow-icmp"
# network = "default"
# priority = 65534
# direction = "INGRESS"
#
# allow {
# protocol = "icmp"
# }
#
# source_ranges = ["0.0.0.0/0"]
# }

68
backend/api/package.json Normal file
View File

@@ -0,0 +1,68 @@
{
"name": "@compass/api",
"description": "Backend API endpoints",
"version": "0.1.0",
"private": true,
"scripts": {
"watch:compile": "npx concurrently \"tsc -b --watch --preserveWatchOutput\" \"(cd ../../common && tsc-alias --watch)\" \"(cd ../shared && tsc-alias --watch)\" \"(cd ../email && tsc-alias --watch)\" \"tsc-alias --watch\"",
"watch:serve": "nodemon -r tsconfig-paths/register --watch lib --ignore 'lib/**/*.map' src/serve.ts",
"dev": "npx concurrently -n COMPILE,SERVER -c cyan,green \"yarn watch:compile\" \"yarn watch:serve\"",
"build": "yarn compile && yarn dist:clean && yarn dist:copy",
"build:fast": "yarn compile && yarn dist:copy",
"compile": "tsc -b && tsc-alias && (cd ../../common && tsc-alias) && (cd ../shared && tsc-alias) && (cd ../email && tsc-alias)",
"debug": "nodemon -r tsconfig-paths/register --watch src -e ts --watch ../../common/src --watch ../shared/src --exec \"yarn build && node --inspect-brk src/serve.ts\"",
"dist": "yarn dist:clean && yarn dist:copy",
"dist:clean": "rm -rf dist && mkdir -p dist/common/lib dist/backend/shared/lib dist/backend/api/lib dist/backend/email/lib",
"dist:copy": "rsync -a --delete ../../common/lib/ dist/common/lib && rsync -a --delete ../shared/lib/ dist/backend/shared/lib && rsync -a --delete ../email/lib/ dist/backend/email/lib && rsync -a --delete ./lib/* dist/backend/api/lib && cp ../../yarn.lock dist && cp package.json dist",
"watch": "tsc -w",
"verify": "yarn --cwd=../.. verify",
"verify:dir": "npx eslint . --max-warnings 0",
"regen-types": "cd ../supabase && make ENV=prod regen-types",
"regen-types-dev": "cd ../supabase && make ENV=dev regen-types"
},
"engines": {
"node": ">=16.0.0"
},
"main": "src/serve.ts",
"dependencies": {
"@google-cloud/monitoring": "4.0.0",
"@google-cloud/secret-manager": "4.2.1",
"@react-email/components": "0.0.33",
"@supabase/supabase-js": "2.38.5",
"@tiptap/core": "2.3.2",
"@tiptap/extension-blockquote": "2.3.2",
"@tiptap/extension-bold": "2.3.2",
"@tiptap/extension-bubble-menu": "2.3.2",
"@tiptap/extension-floating-menu": "2.3.2",
"@tiptap/extension-image": "2.3.2",
"@tiptap/extension-link": "2.3.2",
"@tiptap/extension-mention": "2.3.2",
"@tiptap/html": "2.3.2",
"@tiptap/pm": "2.3.2",
"@tiptap/starter-kit": "2.3.2",
"@tiptap/suggestion": "2.3.2",
"colors": "1.4.0",
"cors": "2.8.5",
"dayjs": "1.11.4",
"express": "4.18.1",
"firebase-admin": "11.11.1",
"gcp-metadata": "6.1.0",
"jsonwebtoken": "9.0.0",
"lodash": "4.17.21",
"pg-promise": "11.4.1",
"posthog-node": "4.11.0",
"react": "19.0.0",
"react-dom": "19.0.0",
"react-email": "3.0.7",
"resend": "4.1.2",
"string-similarity": "4.0.4",
"tsconfig-paths": "4.2.0",
"twitter-api-v2": "1.15.0",
"ws": "8.17.0",
"zod": "3.21.4"
},
"devDependencies": {
"@types/cors": "2.8.17",
"@types/ws": "8.5.10"
}
}

187
backend/api/src/app.ts Normal file
View File

@@ -0,0 +1,187 @@
import { API, type APIPath } from 'common/api/schema'
import { APIError, pathWithPrefix } from 'common/api/utils'
import cors from 'cors'
import * as crypto from 'crypto'
import express from 'express'
import { type ErrorRequestHandler, type RequestHandler } from 'express'
import { hrtime } from 'node:process'
import { withMonitoringContext } from 'shared/monitoring/context'
import { log } from 'shared/monitoring/log'
import { metrics } from 'shared/monitoring/metrics'
import { banUser } from './ban-user'
import { blockUser, unblockUser } from './block-user'
import { getCompatibleLoversHandler } from './compatible-lovers'
import { createComment } from './create-comment'
import { createCompatibilityQuestion } from './create-compatibility-question'
import { createLover } from './create-lover'
import { createUser } from './create-user'
import { getCompatibilityQuestions } from './get-compatibililty-questions'
import { getLikesAndShips } from './get-likes-and-ships'
import { getLoverAnswers } from './get-lover-answers'
import { getLovers } from './get-lovers'
import { getSupabaseToken } from './get-supabase-token'
import { getDisplayUser, getUser } from './get-user'
import { getMe } from './get-me'
import { hasFreeLike } from './has-free-like'
import { health } from './health'
import { typedEndpoint, type APIHandler } from './helpers/endpoint'
import { hideComment } from './hide-comment'
import { likeLover } from './like-lover'
import { markAllNotifsRead } from './mark-all-notifications-read'
import { removePinnedPhoto } from './remove-pinned-photo'
import { report } from './report'
import { searchLocation } from './search-location'
import { searchNearCity } from './search-near-city'
import { shipLovers } from './ship-lovers'
import { starLover } from './star-lover'
import { updateLover } from './update-lover'
import { updateMe } from './update-me'
import { deleteMe } from './delete-me'
import { getCurrentPrivateUser } from './get-current-private-user'
import { createPrivateUserMessage } from './create-private-user-message'
import {
getChannelMemberships,
getChannelMessages,
getLastSeenChannelTime,
setChannelLastSeenTime,
} from 'api/get-private-messages'
import { searchUsers } from './search-users'
import { createPrivateUserMessageChannel } from './create-private-user-message-channel'
import { leavePrivateUserMessageChannel } from './leave-private-user-message-channel'
import { updatePrivateUserMessageChannel } from './update-private-user-message-channel'
import { getNotifications } from './get-notifications'
import { updateNotifSettings } from './update-notif-setting'
const allowCorsUnrestricted: RequestHandler = cors({})
function cacheController(policy?: string): RequestHandler {
return (_req, res, next) => {
if (policy) res.appendHeader('Cache-Control', policy)
next()
}
}
const requestMonitoring: RequestHandler = (req, _res, next) => {
const traceContext = req.get('X-Cloud-Trace-Context')
const traceId = traceContext
? traceContext.split('/')[0]
: crypto.randomUUID()
const context = { endpoint: req.path, traceId }
withMonitoringContext(context, () => {
const startTs = hrtime.bigint()
log(`${req.method} ${req.url}`)
metrics.inc('http/request_count', { endpoint: req.path })
next()
const endTs = hrtime.bigint()
const latencyMs = Number(endTs - startTs) / 1e6
metrics.push('http/request_latency', latencyMs, { endpoint: req.path })
})
}
const apiErrorHandler: ErrorRequestHandler = (error, _req, res, _next) => {
if (error instanceof APIError) {
log.info(error)
if (!res.headersSent) {
const output: { [k: string]: unknown } = { message: error.message }
if (error.details != null) {
output.details = error.details
}
res.status(error.code).json(output)
}
} else {
log.error(error)
if (!res.headersSent) {
res.status(500).json({ message: error.stack, error })
}
}
}
export const app = express()
app.use(requestMonitoring)
app.options('*', allowCorsUnrestricted)
const handlers: { [k in APIPath]: APIHandler<k> } = {
health: health,
'get-supabase-token': getSupabaseToken,
'get-notifications': getNotifications,
'mark-all-notifs-read': markAllNotifsRead,
'user/:username': getUser,
'user/:username/lite': getDisplayUser,
'user/by-id/:id': getUser,
'user/by-id/:id/lite': getDisplayUser,
'user/by-id/:id/block': blockUser,
'user/by-id/:id/unblock': unblockUser,
'search-users': searchUsers,
'ban-user': banUser,
report: report,
'create-user': createUser,
'create-lover': createLover,
me: getMe,
'me/private': getCurrentPrivateUser,
'me/update': updateMe,
'update-notif-settings': updateNotifSettings,
'me/delete': deleteMe,
'update-lover': updateLover,
'like-lover': likeLover,
'ship-lovers': shipLovers,
'get-likes-and-ships': getLikesAndShips,
'has-free-like': hasFreeLike,
'star-lover': starLover,
'get-lovers': getLovers,
'get-lover-answers': getLoverAnswers,
'get-compatibility-questions': getCompatibilityQuestions,
'remove-pinned-photo': removePinnedPhoto,
'create-comment': createComment,
'hide-comment': hideComment,
'create-compatibility-question': createCompatibilityQuestion,
'compatible-lovers': getCompatibleLoversHandler,
'search-location': searchLocation,
'search-near-city': searchNearCity,
'create-private-user-message': createPrivateUserMessage,
'create-private-user-message-channel': createPrivateUserMessageChannel,
'update-private-user-message-channel': updatePrivateUserMessageChannel,
'leave-private-user-message-channel': leavePrivateUserMessageChannel,
'get-channel-memberships': getChannelMemberships,
'get-channel-messages': getChannelMessages,
'get-channel-seen-time': getLastSeenChannelTime,
'set-channel-seen-time': setChannelLastSeenTime,
}
Object.entries(handlers).forEach(([path, handler]) => {
const api = API[path as APIPath]
const cache = cacheController((api as any).cache)
const url = '/' + pathWithPrefix(path as APIPath)
const apiRoute = [
url,
express.json(),
allowCorsUnrestricted,
cache,
typedEndpoint(path as any, handler as any),
apiErrorHandler,
] as const
if (api.method === 'POST') {
app.post(...apiRoute)
} else if (api.method === 'GET') {
app.get(...apiRoute)
// } else if (api.method === 'PUT') {
// app.put(...apiRoute)
} else {
throw new Error('Unsupported API method')
}
})
app.use(allowCorsUnrestricted, (req, res) => {
if (req.method === 'OPTIONS') {
res.status(200).send()
} else {
res
.status(404)
.set('Content-Type', 'application/json')
.json({
message: `The requested route '${req.path}' does not exist. Please check your URL for any misspellings or refer to app.ts`,
})
}
})

View File

@@ -0,0 +1,21 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { trackPublicEvent } from 'shared/analytics'
import { throwErrorIfNotMod } from 'shared/helpers/auth'
import { isAdminId } from 'common/envs/constants'
import { log } from 'shared/utils'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { updateUser } from 'shared/supabase/users'
export const banUser: APIHandler<'ban-user'> = async (body, auth) => {
const { userId, unban } = body
const db = createSupabaseDirectClient()
await throwErrorIfNotMod(auth.uid)
if (isAdminId(userId)) throw new APIError(403, 'Cannot ban admin')
await trackPublicEvent(auth.uid, 'ban user', {
userId,
})
await updateUser(db, userId, {
isBannedFromPosting: !unban,
})
log('updated user')
}

View File

@@ -0,0 +1,36 @@
import { APIError, APIHandler } from './helpers/endpoint'
import { FieldVal } from 'shared/supabase/utils'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { updatePrivateUser } from 'shared/supabase/users'
export const blockUser: APIHandler<'user/by-id/:id/block'> = async (
{ id },
auth
) => {
if (auth.uid === id) throw new APIError(400, 'You cannot block yourself')
const pg = createSupabaseDirectClient()
await pg.tx(async (tx) => {
await updatePrivateUser(tx, auth.uid, {
blockedUserIds: FieldVal.arrayConcat(id),
})
await updatePrivateUser(tx, id, {
blockedByUserIds: FieldVal.arrayConcat(auth.uid),
})
})
}
export const unblockUser: APIHandler<'user/by-id/:id/unblock'> = async (
{ id },
auth
) => {
const pg = createSupabaseDirectClient()
await pg.tx(async (tx) => {
await updatePrivateUser(tx, auth.uid, {
blockedUserIds: FieldVal.arrayRemove(id),
})
await updatePrivateUser(tx, id, {
blockedByUserIds: FieldVal.arrayRemove(auth.uid),
})
})
}

View File

@@ -0,0 +1,61 @@
import { groupBy, sortBy } from 'lodash'
import { APIError, type APIHandler } from 'api/helpers/endpoint'
import { getCompatibilityScore } from 'common/love/compatibility-score'
import {
getLover,
getCompatibilityAnswers,
getGenderCompatibleLovers,
} from 'shared/love/supabase'
import { log } from 'shared/utils'
export const getCompatibleLoversHandler: APIHandler<
'compatible-lovers'
> = async (props) => {
return getCompatibleLovers(props.userId)
}
export const getCompatibleLovers = async (userId: string) => {
const lover = await getLover(userId)
log('got lover', {
id: lover?.id,
userId: lover?.user_id,
username: lover?.user?.username,
})
if (!lover) throw new APIError(404, 'Lover not found')
const lovers = await getGenderCompatibleLovers(lover)
const loverAnswers = await getCompatibilityAnswers([
userId,
...lovers.map((l) => l.user_id),
])
log('got lover answers ' + loverAnswers.length)
const answersByUserId = groupBy(loverAnswers, 'creator_id')
const loverCompatibilityScores = Object.fromEntries(
lovers.map(
(l) =>
[
l.user_id,
getCompatibilityScore(
answersByUserId[lover.user_id] ?? [],
answersByUserId[l.user_id] ?? []
),
] as const
)
)
const sortedCompatibleLovers = sortBy(
lovers,
(l) => loverCompatibilityScores[l.user_id].score
).reverse()
return {
status: 'success',
lover,
compatibleLovers: sortedCompatibleLovers,
loverCompatibilityScores,
}
}

View File

@@ -0,0 +1,129 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { type JSONContent } from '@tiptap/core'
import { getPrivateUser, getUser } from 'shared/utils'
import {
createSupabaseDirectClient,
SupabaseDirectClient,
} from 'shared/supabase/init'
import { getNotificationDestinationsForUser } from 'common/user-notification-preferences'
import { Notification } from 'common/notifications'
import { insertNotificationToSupabase } from 'shared/supabase/notifications'
import { User } from 'common/user'
import { richTextToString } from 'common/util/parse'
import * as crypto from 'crypto'
import { sendNewEndorsementEmail } from 'email/functions/helpers'
import { type Row } from 'common/supabase/utils'
import { broadcastUpdatedComment } from 'shared/websockets/helpers'
import { convertComment } from 'common/supabase/comment'
export const MAX_COMMENT_JSON_LENGTH = 20000
export const createComment: APIHandler<'create-comment'> = async (
{ userId, content: submittedContent, replyToCommentId },
auth
) => {
const { creator, content } = await validateComment(
userId,
auth.uid,
submittedContent
)
const onUser = await getUser(userId)
if (!onUser) throw new APIError(404, 'User not found')
const pg = createSupabaseDirectClient()
const comment = await pg.one<Row<'lover_comments'>>(
`insert into lover_comments (user_id, user_name, user_username, user_avatar_url, on_user_id, content, reply_to_comment_id)
values ($1, $2, $3, $4, $5, $6, $7) returning *`,
[
creator.id,
creator.name,
creator.username,
creator.avatarUrl,
userId,
content,
replyToCommentId,
]
)
if (onUser.id !== creator.id)
await createNewCommentOnLoverNotification(
onUser,
creator,
richTextToString(content),
comment.id,
pg
)
broadcastUpdatedComment(convertComment(comment))
return { status: 'success' }
}
const validateComment = async (
userId: string,
creatorId: string,
content: JSONContent
) => {
const creator = await getUser(creatorId)
if (!creator) throw new APIError(401, 'Your account was not found')
if (creator.isBannedFromPosting) throw new APIError(403, 'You are banned')
const otherUser = await getPrivateUser(userId)
if (!otherUser) throw new APIError(404, 'Other user not found')
if (otherUser.blockedUserIds.includes(creatorId)) {
throw new APIError(404, 'User has blocked you')
}
if (JSON.stringify(content).length > MAX_COMMENT_JSON_LENGTH) {
throw new APIError(
400,
`Comment is too long; should be less than ${MAX_COMMENT_JSON_LENGTH} as a JSON string.`
)
}
return { content, creator }
}
const createNewCommentOnLoverNotification = async (
onUser: User,
creator: User,
sourceText: string,
commentId: number,
pg: SupabaseDirectClient
) => {
const privateUser = await getPrivateUser(onUser.id)
if (!privateUser) return
const id = crypto.randomUUID()
const reason = 'new_endorsement'
const { sendToBrowser, sendToMobile, sendToEmail } =
getNotificationDestinationsForUser(privateUser, reason)
const notification: Notification = {
id,
userId: privateUser.id,
reason,
createdTime: Date.now(),
isSeen: false,
sourceId: commentId.toString(),
sourceType: 'comment_on_lover',
sourceUpdateType: 'created',
sourceUserName: creator.name,
sourceUserUsername: creator.username,
sourceUserAvatarUrl: creator.avatarUrl,
sourceText: sourceText,
sourceSlug: onUser.username,
}
if (sendToBrowser) {
await insertNotificationToSupabase(notification, pg)
}
if (sendToMobile) {
// await createPushNotification(
// notification,
// privateUser,
// `${creator.name} commented on your profile`,
// sourceText
// )
}
if (sendToEmail) {
await sendNewEndorsementEmail(privateUser, creator, onUser, sourceText)
}
}

View File

@@ -0,0 +1,27 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { getUser } from 'shared/utils'
import { APIHandler, APIError } from './helpers/endpoint'
import { insert } from 'shared/supabase/utils'
import { tryCatch } from 'common/util/try-catch'
export const createCompatibilityQuestion: APIHandler<
'create-compatibility-question'
> = async ({ question, options }, auth) => {
const creator = await getUser(auth.uid)
if (!creator) throw new APIError(401, 'Your account was not found')
const pg = createSupabaseDirectClient()
const { data, error } = await tryCatch(
insert(pg, 'love_questions', {
creator_id: creator.id,
question,
answer_type: 'compatibility_multiple_choice',
multiple_choice_options: options,
})
)
if (error) throw new APIError(401, 'Error creating question')
return { question: data }
}

View File

@@ -0,0 +1,46 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { log, getUser } from 'shared/utils'
import { HOUR_MS } from 'common/util/time'
import { removePinnedUrlFromPhotoUrls } from 'shared/love/parse-photos'
import { track } from 'shared/analytics'
import { updateUser } from 'shared/supabase/users'
import { tryCatch } from 'common/util/try-catch'
import { insert } from 'shared/supabase/utils'
export const createLover: APIHandler<'create-lover'> = async (body, auth) => {
const pg = createSupabaseDirectClient()
const { data: existingUser } = await tryCatch(
pg.oneOrNone<{ id: string }>('select id from lovers where user_id = $1', [
auth.uid,
])
)
if (existingUser) {
throw new APIError(400, 'User already exists')
}
await removePinnedUrlFromPhotoUrls(body)
const user = await getUser(auth.uid)
if (!user) throw new APIError(401, 'Your account was not found')
if (user.createdTime > Date.now() - HOUR_MS) {
// If they just signed up, set their avatar to be their pinned photo
updateUser(pg, auth.uid, { avatarUrl: body.pinned_url })
}
console.log('body', body)
const { data, error } = await tryCatch(
insert(pg, 'lovers', { user_id: auth.uid, ...body })
)
if (error) {
log.error('Error creating user: ' + error.message)
throw new APIError(500, 'Error creating user')
}
log('Created user', data)
await track(user.id, 'create lover', { username: user.username })
return data
}

View File

@@ -0,0 +1,71 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { filterDefined } from 'common/util/array'
import { uniq } from 'lodash'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { addUsersToPrivateMessageChannel } from 'api/junk-drawer/private-messages'
import { getPrivateUser, getUser } from 'shared/utils'
export const createPrivateUserMessageChannel: APIHandler<
'create-private-user-message-channel'
> = async (body, auth) => {
const userIds = uniq(body.userIds.concat(auth.uid))
const pg = createSupabaseDirectClient()
const creatorId = auth.uid
const creator = await getUser(creatorId)
if (!creator) throw new APIError(401, 'Your account was not found')
if (creator.isBannedFromPosting) throw new APIError(403, 'You are banned')
const toPrivateUsers = filterDefined(
await Promise.all(userIds.map((id) => getPrivateUser(id)))
)
if (toPrivateUsers.length !== userIds.length)
throw new APIError(
404,
`Private user ${userIds.find(
(uid) => !toPrivateUsers.map((p) => p.id).includes(uid)
)} not found`
)
if (
toPrivateUsers.some((user) =>
user.blockedUserIds.some((blockedId) => userIds.includes(blockedId))
)
) {
throw new APIError(
403,
'One of the users has blocked another user in the list'
)
}
const currentChannel = await pg.oneOrNone(
`
select channel_id from private_user_message_channel_members
group by channel_id
having array_agg(user_id::text) @> array[$1]::text[]
and array_agg(user_id::text) <@ array[$1]::text[]
`,
[userIds]
)
if (currentChannel)
return {
status: 'success',
channelId: Number(currentChannel.channel_id),
}
const channel = await pg.one(
`insert into private_user_message_channels default values returning id`
)
await pg.none(
`insert into private_user_message_channel_members (channel_id, user_id, role, status)
values ($1, $2, 'creator', 'joined')
`,
[channel.id, creatorId]
)
const memberIds = userIds.filter((id) => id !== creatorId)
await addUsersToPrivateMessageChannel(memberIds, channel.id, pg)
return { status: 'success', channelId: Number(channel.id) }
}

View File

@@ -0,0 +1,28 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { getUser } from 'shared/utils'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { MAX_COMMENT_JSON_LENGTH } from 'api/create-comment'
import { createPrivateUserMessageMain } from 'api/junk-drawer/private-messages'
export const createPrivateUserMessage: APIHandler<
'create-private-user-message'
> = async (body, auth) => {
const { content, channelId } = body
if (JSON.stringify(content).length > MAX_COMMENT_JSON_LENGTH) {
throw new APIError(
400,
`Message JSON should be less than ${MAX_COMMENT_JSON_LENGTH}`
)
}
const pg = createSupabaseDirectClient()
const creator = await getUser(auth.uid)
if (!creator) throw new APIError(401, 'Your account was not found')
if (creator.isBannedFromPosting) throw new APIError(403, 'You are banned')
return await createPrivateUserMessageMain(
creator,
channelId,
content,
pg,
'private'
)
}

View File

@@ -0,0 +1,158 @@
import * as admin from 'firebase-admin'
import { PrivateUser } from 'common/user'
import { randomString } from 'common/util/random'
import { cleanDisplayName, cleanUsername } from 'common/util/clean-username'
import { getIp, track } from 'shared/analytics'
import { APIError, APIHandler } from './helpers/endpoint'
import { getDefaultNotificationPreferences } from 'common/user-notification-preferences'
import { removeUndefinedProps } from 'common/util/object'
import { generateAvatarUrl } from 'shared/helpers/generate-and-update-avatar-urls'
import { getStorage } from 'firebase-admin/storage'
import { DEV_CONFIG } from 'common/envs/dev'
import { PROD_CONFIG } from 'common/envs/prod'
import { RESERVED_PATHS } from 'common/envs/constants'
import { log, isProd, getUser, getUserByUsername } from 'shared/utils'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { insert } from 'shared/supabase/utils'
import { convertPrivateUser, convertUser } from 'common/supabase/users'
export const createUser: APIHandler<'create-user'> = async (
props,
auth,
req
) => {
const { deviceToken: preDeviceToken, adminToken } = props
const firebaseUser = await admin.auth().getUser(auth.uid)
const testUserAKAEmailPasswordUser =
firebaseUser.providerData[0].providerId === 'password'
// if (
// testUserAKAEmailPasswordUser &&
// adminToken !== process.env.TEST_CREATE_USER_KEY
// ) {
// throw new APIError(
// 401,
// 'Must use correct TEST_CREATE_USER_KEY to create user with email/password'
// )
// }
const host = req.get('referer')
log(`Create user from: ${host}`)
const ip = getIp(req)
const deviceToken = testUserAKAEmailPasswordUser
? randomString() + randomString()
: preDeviceToken
const fbUser = await admin.auth().getUser(auth.uid)
const email = fbUser.email
const emailName = email?.replace(/@.*$/, '')
const rawName = fbUser.displayName || emailName || 'User' + randomString(4)
const name = cleanDisplayName(rawName)
const bucket = getStorage().bucket(getStorageBucketId())
const avatarUrl = fbUser.photoURL
? fbUser.photoURL
: await generateAvatarUrl(auth.uid, name, bucket)
const pg = createSupabaseDirectClient()
let username = cleanUsername(name)
// Check username case-insensitive
const dupes = await pg.one<number>(
`select count(*) from users where username ilike $1`,
[username],
(r) => r.count
)
const usernameExists = dupes > 0
const isReservedName = RESERVED_PATHS.includes(username)
if (usernameExists || isReservedName) username += randomString(4)
const { user, privateUser } = await pg.tx(async (tx) => {
const preexistingUser = await getUser(auth.uid, tx)
if (preexistingUser)
throw new APIError(403, 'User already exists', {
userId: auth.uid,
})
// Check exact username to avoid problems with duplicate requests
const sameNameUser = await getUserByUsername(username, tx)
if (sameNameUser)
throw new APIError(403, 'Username already taken', { username })
const user = removeUndefinedProps({
avatarUrl,
isBannedFromPosting: Boolean(
(deviceToken && bannedDeviceTokens.includes(deviceToken)) ||
(ip && bannedIpAddresses.includes(ip))
),
link: {},
})
const privateUser: PrivateUser = {
id: auth.uid,
email,
initialIpAddress: ip,
initialDeviceToken: deviceToken,
notificationPreferences: getDefaultNotificationPreferences(),
blockedUserIds: [],
blockedByUserIds: [],
}
const newUserRow = await insert(tx, 'users', {
id: auth.uid,
name,
username,
data: user,
})
const newPrivateUserRow = await insert(tx, 'private_users', {
id: privateUser.id,
data: privateUser,
})
return {
user: convertUser(newUserRow),
privateUser: convertPrivateUser(newPrivateUserRow),
}
})
log('created user ', { username: user.username, firebaseId: auth.uid })
const continuation = async () => {
await track(auth.uid, 'create lover', { username: user.username })
}
return {
result: {
user,
privateUser,
},
continue: continuation,
}
}
function getStorageBucketId() {
return isProd()
? PROD_CONFIG.firebaseConfig.storageBucket
: DEV_CONFIG.firebaseConfig.storageBucket
}
// Automatically ban users with these device tokens or ip addresses.
const bannedDeviceTokens = [
'fa807d664415',
'dcf208a11839',
'bbf18707c15d',
'4c2d15a6cc0c',
'0da6b4ea79d3',
]
const bannedIpAddresses: string[] = [
'24.176.214.250',
'2607:fb90:bd95:dbcd:ac39:6c97:4e35:3fed',
'2607:fb91:389:ddd0:ac39:8397:4e57:f060',
'2607:fb90:ed9a:4c8f:ac39:cf57:4edd:4027',
'2607:fb90:bd36:517a:ac39:6c91:812c:6328',
]

View File

@@ -0,0 +1,28 @@
import { getUser } from 'shared/utils'
import { APIError, APIHandler } from './helpers/endpoint'
import { updatePrivateUser, updateUser } from 'shared/supabase/users'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { FieldVal } from 'shared/supabase/utils'
export const deleteMe: APIHandler<'me/delete'> = async (body, auth) => {
const { username } = body
const user = await getUser(auth.uid)
if (!user) {
throw new APIError(401, 'Your account was not found')
}
if (user.username != username) {
throw new APIError(
400,
`Incorrect username. You are logged in as ${user.username}. Are you sure you want to delete this account?`
)
}
const pg = createSupabaseDirectClient()
await updateUser(pg, auth.uid, {
userDeleted: true,
isBannedFromPosting: true,
})
await updatePrivateUser(pg, auth.uid, {
email: FieldVal.delete(),
})
}

View File

@@ -0,0 +1,41 @@
import { type APIHandler } from 'api/helpers/endpoint'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { Row } from 'common/supabase/utils'
export const getCompatibilityQuestions: APIHandler<
'get-compatibility-questions'
> = async (_props, _auth) => {
const pg = createSupabaseDirectClient()
const questions = await pg.manyOrNone<
Row<'love_questions'> & { answer_count: number; score: number }
>(
`SELECT
love_questions.*,
COUNT(love_compatibility_answers.question_id) as answer_count,
AVG(POWER(love_compatibility_answers.importance + 1 + CASE WHEN love_compatibility_answers.explanation IS NULL THEN 1 ELSE 0 END, 2)) as score
FROM
love_questions
LEFT JOIN
love_compatibility_answers ON love_questions.id = love_compatibility_answers.question_id
WHERE
love_questions.answer_type = 'compatibility_multiple_choice'
GROUP BY
love_questions.id
ORDER BY
score DESC
`,
[]
)
if (false)
console.log(
'got questions',
questions.map((q) => q.question + ' ' + q.score)
)
return {
status: 'success',
questions,
}
}

View File

@@ -0,0 +1,32 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIError, APIHandler } from './helpers/endpoint'
import { PrivateUser } from 'common/user'
import { Row } from 'common/supabase/utils'
import { tryCatch } from 'common/util/try-catch'
export const getCurrentPrivateUser: APIHandler<'me/private'> = async (
_,
auth
) => {
const pg = createSupabaseDirectClient()
const { data, error } = await tryCatch(
pg.oneOrNone<Row<'private_users'>>(
'select * from private_users where id = $1',
[auth.uid]
)
)
if (error) {
throw new APIError(
500,
'Error fetching private user data: ' + error.message
)
}
if (!data) {
throw new APIError(401, 'Your account was not found')
}
return data.data as PrivateUser
}

View File

@@ -0,0 +1,106 @@
import { type APIHandler } from 'api/helpers/endpoint'
import { createSupabaseDirectClient } from 'shared/supabase/init'
export const getLikesAndShips: APIHandler<'get-likes-and-ships'> = async (
props
) => {
const { userId } = props
return {
status: 'success',
...(await getLikesAndShipsMain(userId)),
}
}
export const getLikesAndShipsMain = async (userId: string) => {
const pg = createSupabaseDirectClient()
const likesGiven = await pg.map<{
user_id: string
created_time: number
}>(
`
select target_id, love_likes.created_time
from love_likes
join lovers on lovers.user_id = love_likes.target_id
join users on users.id = love_likes.target_id
where creator_id = $1
and looking_for_matches
and lovers.pinned_url is not null
and (data->>'isBannedFromPosting' != 'true' or data->>'isBannedFromPosting' is null)
order by created_time desc
`,
[userId],
(r) => ({
user_id: r.target_id,
created_time: new Date(r.created_time).getTime(),
})
)
const likesReceived = await pg.map<{
user_id: string
created_time: number
}>(
`
select creator_id, love_likes.created_time
from love_likes
join lovers on lovers.user_id = love_likes.creator_id
join users on users.id = love_likes.creator_id
where target_id = $1
and looking_for_matches
and lovers.pinned_url is not null
and (data->>'isBannedFromPosting' != 'true' or data->>'isBannedFromPosting' is null)
order by created_time desc
`,
[userId],
(r) => ({
user_id: r.creator_id,
created_time: new Date(r.created_time).getTime(),
})
)
const ships = await pg.map<{
creator_id: string
target_id: string
target1_id: string
target2_id: string
created_time: number
}>(
`
select
target1_id, target2_id, creator_id, love_ships.created_time,
target1_id as target_id
from love_ships
join lovers on lovers.user_id = love_ships.target1_id
join users on users.id = love_ships.target1_id
where target2_id = $1
and lovers.looking_for_matches
and lovers.pinned_url is not null
and (users.data->>'isBannedFromPosting' != 'true' or users.data->>'isBannedFromPosting' is null)
union all
select
target1_id, target2_id, creator_id, love_ships.created_time,
target2_id as target_id
from love_ships
join lovers on lovers.user_id = love_ships.target2_id
join users on users.id = love_ships.target2_id
where target1_id = $1
and lovers.looking_for_matches
and lovers.pinned_url is not null
and (users.data->>'isBannedFromPosting' != 'true' or users.data->>'isBannedFromPosting' is null)
`,
[userId],
(r) => ({
...r,
created_time: new Date(r.created_time).getTime(),
})
)
return {
likesGiven,
likesReceived,
ships,
}
}

View File

@@ -0,0 +1,25 @@
import { type APIHandler } from 'api/helpers/endpoint'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { Row } from 'common/supabase/utils'
export const getLoverAnswers: APIHandler<'get-lover-answers'> = async (
props,
_auth
) => {
const { userId } = props
const pg = createSupabaseDirectClient()
const answers = await pg.manyOrNone<Row<'love_compatibility_answers'>>(
`select * from love_compatibility_answers
where
creator_id = $1
order by created_time desc
`,
[userId]
)
return {
status: 'success',
answers,
}
}

View File

@@ -0,0 +1,134 @@
import { type APIHandler } from 'api/helpers/endpoint'
import { convertRow } from 'shared/love/supabase'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import {
from,
join,
limit,
orderBy,
renderSql,
select,
where,
} from 'shared/supabase/sql-builder'
import { getCompatibleLovers } from 'api/compatible-lovers'
import { intersection } from 'lodash'
export const getLovers: APIHandler<'get-lovers'> = async (props, _auth) => {
const pg = createSupabaseDirectClient()
const {
limit: limitParam,
after,
name,
genders,
pref_gender,
pref_age_min,
pref_age_max,
pref_relation_styles,
wants_kids_strength,
has_kids,
is_smoker,
geodbCityIds,
compatibleWithUserId,
orderBy: orderByParam,
} = props
// compatibility. TODO: do this in sql
if (orderByParam === 'compatibility_score') {
if (!compatibleWithUserId) return { status: 'fail', lovers: [] }
const { compatibleLovers } = await getCompatibleLovers(compatibleWithUserId)
const lovers = compatibleLovers.filter(
(l) =>
(!name || l.user.name.toLowerCase().includes(name.toLowerCase())) &&
(!genders || genders.includes(l.gender)) &&
(!pref_gender || intersection(pref_gender, l.pref_gender).length) &&
(!pref_age_min || l.age >= pref_age_min) &&
(!pref_age_max || l.age <= pref_age_max) &&
(!pref_relation_styles ||
intersection(pref_relation_styles, l.pref_relation_styles).length) &&
(!wants_kids_strength ||
wants_kids_strength == -1 ||
(wants_kids_strength >= 2
? l.wants_kids_strength >= wants_kids_strength
: l.wants_kids_strength <= wants_kids_strength)) &&
(has_kids == undefined ||
has_kids == -1 ||
(has_kids == 0 && !l.has_kids) ||
(l.has_kids && l.has_kids > 0)) &&
(!is_smoker || l.is_smoker === is_smoker) &&
(!geodbCityIds ||
(l.geodb_city_id && geodbCityIds.includes(l.geodb_city_id)))
)
const cursor = after
? lovers.findIndex((l) => l.id.toString() === after) + 1
: 0
console.log(cursor)
return {
status: 'success',
lovers: lovers.slice(cursor, cursor + limitParam),
}
}
const query = renderSql(
select('lovers.*, name, username, users.data as user'),
from('lovers'),
join('users on users.id = lovers.user_id'),
where('looking_for_matches = true'),
// where(`pinned_url is not null and pinned_url != ''`),
where(
`(data->>'isBannedFromPosting' != 'true' or data->>'isBannedFromPosting' is null)`
),
where(`data->>'userDeleted' != 'true' or data->>'userDeleted' is null`),
name &&
where(`lower(users.name) ilike '%' || lower($(name)) || '%'`, { name }),
genders?.length && where(`gender = ANY($(gender))`, { gender: genders }),
pref_gender?.length &&
where(`pref_gender && $(pref_gender)`, { pref_gender }),
pref_age_min !== undefined &&
where(`age >= $(pref_age_min)`, { pref_age_min }),
pref_age_max !== undefined &&
where(`age <= $(pref_age_max)`, { pref_age_max }),
pref_relation_styles?.length &&
where(`pref_relation_styles && $(pref_relation_styles)`, {
pref_relation_styles,
}),
wants_kids_strength !== undefined &&
wants_kids_strength !== -1 &&
where(
wants_kids_strength >= 2
? `wants_kids_strength >= $(wants_kids_strength)`
: `wants_kids_strength <= $(wants_kids_strength)`,
{ wants_kids_strength }
),
has_kids === 0 && where(`has_kids IS NULL OR has_kids = 0`),
has_kids && has_kids > 0 && where(`has_kids > 0`),
is_smoker !== undefined && where(`is_smoker = $(is_smoker)`, { is_smoker }),
geodbCityIds?.length &&
where(`geodb_city_id = ANY($(geodbCityIds))`, { geodbCityIds }),
orderBy(`${orderByParam} desc`),
after &&
where(
`lovers.${orderByParam} < (select lovers.${orderByParam} from lovers where id = $(after))`,
{ after }
),
limit(limitParam)
)
const lovers = await pg.map(query, [], convertRow)
return { status: 'success', lovers: lovers }
}

View File

@@ -0,0 +1,6 @@
import { type APIHandler } from './helpers/endpoint'
import { getUser } from 'api/get-user'
export const getMe: APIHandler<'me'> = async (_, auth) => {
return getUser({ id: auth.uid })
}

View File

@@ -0,0 +1,23 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIHandler } from 'api/helpers/endpoint'
import { Notification } from 'common/notifications'
export const getNotifications: APIHandler<'get-notifications'> = async (
props,
auth
) => {
const { limit, after } = props
const pg = createSupabaseDirectClient()
const query = `
select data from user_notifications
where user_id = $1
and ($3 is null or (data->'createdTime')::bigint > $3)
order by (data->'createdTime')::bigint desc
limit $2
`
return await pg.map(
query,
[auth.uid, limit, after],
(row) => row.data as Notification
)
}

View File

@@ -0,0 +1,147 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIHandler } from './helpers/endpoint'
import {
convertPrivateChatMessage,
PrivateMessageChannel,
} from 'common/supabase/private-messages'
import { groupBy, mapValues } from 'lodash'
export const getChannelMemberships: APIHandler<
'get-channel-memberships'
> = async (props, auth) => {
const pg = createSupabaseDirectClient()
const { channelId, lastUpdatedTime, createdTime, limit } = props
let channels: PrivateMessageChannel[]
const convertRow = (r: any) => ({
channel_id: r.channel_id as number,
notify_after_time: r.notify_after_time as string,
created_time: r.created_time as string,
last_updated_time: r.last_updated_time as string,
})
if (channelId) {
channels = await pg.map(
`select channel_id, notify_after_time, pumcm.created_time, last_updated_time
from private_user_message_channel_members pumcm
join private_user_message_channels pumc on pumc.id= pumcm.channel_id
where user_id = $1
and channel_id = $2
limit $3
`,
[auth.uid, channelId, limit],
convertRow
)
} else {
channels = await pg.map(
`with latest_channels as (
select distinct on (pumc.id) pumc.id as channel_id, notify_after_time, pumc.created_time,
(select created_time
from private_user_messages
where channel_id = pumc.id
and visibility != 'system_status'
and user_id != $1
order by created_time desc
limit 1) as last_updated_time, -- last_updated_time is the last possible unseen message time
pumc.last_updated_time as last_updated_channel_time -- last_updated_channel_time is the last time the channel was updated
from private_user_message_channels pumc
join private_user_message_channel_members pumcm on pumcm.channel_id = pumc.id
inner join private_user_messages pum on pumc.id = pum.channel_id
and (pum.visibility != 'introduction' or pum.user_id != $1)
where pumcm.user_id = $1
and not status = 'left'
and ($2 is null or pumcm.created_time > $2)
and ($4 is null or pumc.last_updated_time > $4)
order by pumc.id, pumc.last_updated_time desc
)
select * from latest_channels
order by last_updated_channel_time desc
limit $3
`,
[auth.uid, createdTime ?? null, limit, lastUpdatedTime ?? null],
convertRow
)
}
if (!channels || channels.length === 0)
return { channels: [], memberIdsByChannelId: {} }
const channelIds = channels.map((c) => c.channel_id)
const members = await pg.map(
`select channel_id, user_id
from private_user_message_channel_members
where not user_id = $1
and channel_id in ($2:list)
and not status = 'left'
`,
[auth.uid, channelIds],
(r) => ({
channel_id: r.channel_id as number,
user_id: r.user_id as string,
})
)
const memberIdsByChannelId = mapValues(
groupBy(members, 'channel_id'),
(members) => members.map((m) => m.user_id)
)
return {
channels,
memberIdsByChannelId,
}
}
export const getChannelMessages: APIHandler<'get-channel-messages'> = async (
props,
auth
) => {
const pg = createSupabaseDirectClient()
const { channelId, limit, id } = props
return await pg.map(
`select *, created_time as created_time_ts
from private_user_messages
where channel_id = $1
and exists (select 1 from private_user_message_channel_members pumcm
where pumcm.user_id = $2
and pumcm.channel_id = $1
)
and ($4 is null or id > $4)
and not visibility = 'system_status'
order by created_time desc
limit $3
`,
[channelId, auth.uid, limit, id],
convertPrivateChatMessage
)
}
export const getLastSeenChannelTime: APIHandler<
'get-channel-seen-time'
> = async (props, auth) => {
const pg = createSupabaseDirectClient()
const { channelIds } = props
const unseens = await pg.map(
`select distinct on (channel_id) channel_id, created_time
from private_user_seen_message_channels
where channel_id = any($1)
and user_id = $2
order by channel_id, created_time desc
`,
[channelIds, auth.uid],
(r) => [r.channel_id as number, r.created_time as string]
)
return unseens as [number, string][]
}
export const setChannelLastSeenTime: APIHandler<
'set-channel-seen-time'
> = async (props, auth) => {
const pg = createSupabaseDirectClient()
const { channelId } = props
await pg.none(
`insert into private_user_seen_message_channels (user_id, channel_id)
values ($1, $2)
`,
[auth.uid, channelId]
)
}

View File

@@ -0,0 +1,33 @@
import { sign } from 'jsonwebtoken'
import { APIError, APIHandler } from './helpers/endpoint'
import { DEV_CONFIG } from 'common/envs/dev'
import { PROD_CONFIG } from 'common/envs/prod'
import { isProd } from 'shared/utils'
export const getSupabaseToken: APIHandler<'get-supabase-token'> = async (
_,
auth
) => {
const jwtSecret = process.env.SUPABASE_JWT_SECRET
if (jwtSecret == null) {
throw new APIError(500, "No SUPABASE_JWT_SECRET; couldn't sign token.")
}
const instanceId = isProd()
? PROD_CONFIG.supabaseInstanceId
: DEV_CONFIG.supabaseInstanceId
if (!instanceId) {
throw new APIError(500, 'No Supabase instance ID in config.')
}
const payload = { role: 'anon' } // postgres role
return {
jwt: sign(payload, jwtSecret, {
algorithm: 'HS256', // same as what supabase uses for its auth tokens
expiresIn: '1d',
audience: instanceId,
issuer: isProd()
? PROD_CONFIG.firebaseConfig.projectId
: DEV_CONFIG.firebaseConfig.projectId,
subject: auth.uid,
}),
}
}

View File

@@ -0,0 +1,33 @@
import { toUserAPIResponse } from 'common/api/user-types'
import { convertUser, displayUserColumns } from 'common/supabase/users'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIError } from 'common/api/utils'
import { removeNullOrUndefinedProps } from 'common/util/object'
export const getUser = async (props: { id: string } | { username: string }) => {
const pg = createSupabaseDirectClient()
const user = await pg.oneOrNone(
`select * from users
where ${'id' in props ? 'id' : 'username'} = $1`,
['id' in props ? props.id : props.username],
(r) => (r ? convertUser(r) : null)
)
if (!user) throw new APIError(404, 'User not found')
return toUserAPIResponse(user)
}
export const getDisplayUser = async (
props: { id: string } | { username: string }
) => {
const pg = createSupabaseDirectClient()
const liteUser = await pg.oneOrNone(
`select ${displayUserColumns}
from users
where ${'id' in props ? 'id' : 'username'} = $1`,
['id' in props ? props.id : props.username]
)
if (!liteUser) throw new APIError(404, 'User not found')
return removeNullOrUndefinedProps(liteUser)
}

View File

@@ -0,0 +1,29 @@
import { type APIHandler } from 'api/helpers/endpoint'
import { createSupabaseDirectClient } from 'shared/supabase/init'
export const hasFreeLike: APIHandler<'has-free-like'> = async (
_props,
auth
) => {
return {
status: 'success',
hasFreeLike: await getHasFreeLike(auth.uid),
}
}
export const getHasFreeLike = async (userId: string) => {
const pg = createSupabaseDirectClient()
const likeGivenToday = await pg.oneOrNone<object>(
`
select 1
from love_likes
where creator_id = $1
and created_time at time zone 'UTC' at time zone 'America/Los_Angeles' >= (now() at time zone 'UTC' at time zone 'America/Los_Angeles')::date
and created_time at time zone 'UTC' at time zone 'America/Los_Angeles' < ((now() at time zone 'UTC' at time zone 'America/Los_Angeles')::date + interval '1 day')
limit 1
`,
[userId]
)
return !likeGivenToday
}

View File

@@ -0,0 +1,8 @@
import { APIHandler } from './helpers/endpoint'
export const health: APIHandler<'health'> = async (_, auth) => {
return {
message: 'Server is working.',
uid: auth?.uid,
}
}

View File

@@ -0,0 +1,219 @@
import * as admin from 'firebase-admin'
import { z } from 'zod'
import { Request, Response, NextFunction } from 'express'
import { PrivateUser } from 'common/user'
import { APIError } from 'common/api/utils'
export { APIError } from 'common/api/utils'
import {
API,
APIPath,
APIResponseOptionalContinue,
APISchema,
ValidatedAPIParams,
} from 'common/api/schema'
import { log } from 'shared/utils'
import { getPrivateUserByKey } from 'shared/utils'
export type Json = Record<string, unknown> | Json[]
export type JsonHandler<T extends Json> = (
req: Request,
res: Response
) => Promise<T>
export type AuthedHandler<T extends Json> = (
req: Request,
user: AuthedUser,
res: Response
) => Promise<T>
export type MaybeAuthedHandler<T extends Json> = (
req: Request,
user: AuthedUser | undefined,
res: Response
) => Promise<T>
export type AuthedUser = {
uid: string
creds: JwtCredentials | (KeyCredentials & { privateUser: PrivateUser })
}
type JwtCredentials = { kind: 'jwt'; data: admin.auth.DecodedIdToken }
type KeyCredentials = { kind: 'key'; data: string }
type Credentials = JwtCredentials | KeyCredentials
export const parseCredentials = async (req: Request): Promise<Credentials> => {
const auth = admin.auth()
const authHeader = req.get('Authorization')
if (!authHeader) {
throw new APIError(401, 'Missing Authorization header.')
}
const authParts = authHeader.split(' ')
if (authParts.length !== 2) {
throw new APIError(401, 'Invalid Authorization header.')
}
const [scheme, payload] = authParts
switch (scheme) {
case 'Bearer':
if (payload === 'undefined') {
throw new APIError(401, 'Firebase JWT payload undefined.')
}
try {
return { kind: 'jwt', data: await auth.verifyIdToken(payload) }
} catch (err) {
// This is somewhat suspicious, so get it into the firebase console
console.error('Error verifying Firebase JWT: ', err, scheme, payload)
throw new APIError(500, 'Error validating token.')
}
case 'Key':
return { kind: 'key', data: payload }
default:
throw new APIError(401, 'Invalid auth scheme; must be "Key" or "Bearer".')
}
}
export const lookupUser = async (creds: Credentials): Promise<AuthedUser> => {
switch (creds.kind) {
case 'jwt': {
if (typeof creds.data.user_id !== 'string') {
throw new APIError(401, 'JWT must contain user ID.')
}
return { uid: creds.data.user_id, creds }
}
case 'key': {
const key = creds.data
const privateUser = await getPrivateUserByKey(key)
if (!privateUser) {
throw new APIError(401, `No private user exists with API key ${key}.`)
}
return { uid: privateUser.id, creds: { privateUser, ...creds } }
}
default:
throw new APIError(401, 'Invalid credential type.')
}
}
export const validate = <T extends z.ZodTypeAny>(schema: T, val: unknown) => {
const result = schema.safeParse(val)
if (!result.success) {
const issues = result.error.issues.map((i) => {
return {
field: i.path.join('.') || null,
error: i.message,
}
})
if (issues.length > 0) {
log.error(issues.map((i) => `${i.field}: ${i.error}`).join('\n'))
}
throw new APIError(400, 'Error validating request.', issues)
} else {
return result.data as z.infer<T>
}
}
export const jsonEndpoint = <T extends Json>(fn: JsonHandler<T>) => {
return async (req: Request, res: Response, next: NextFunction) => {
try {
res.status(200).json(await fn(req, res))
} catch (e) {
next(e)
}
}
}
export const authEndpoint = <T extends Json>(fn: AuthedHandler<T>) => {
return async (req: Request, res: Response, next: NextFunction) => {
try {
const authedUser = await lookupUser(await parseCredentials(req))
res.status(200).json(await fn(req, authedUser, res))
} catch (e) {
next(e)
}
}
}
export const MaybeAuthedEndpoint = <T extends Json>(
fn: MaybeAuthedHandler<T>
) => {
return async (req: Request, res: Response, next: NextFunction) => {
let authUser: AuthedUser | undefined = undefined
try {
authUser = await lookupUser(await parseCredentials(req))
} catch {
// it's treated as an anon request
}
try {
res.status(200).json(await fn(req, authUser, res))
} catch (e) {
next(e)
}
}
}
export type APIHandler<N extends APIPath> = (
props: ValidatedAPIParams<N>,
auth: APISchema<N> extends { authed: true }
? AuthedUser
: AuthedUser | undefined,
req: Request
) => Promise<APIResponseOptionalContinue<N>>
export const typedEndpoint = <N extends APIPath>(
name: N,
handler: APIHandler<N>
) => {
const { props: propSchema, authed: authRequired, method } = API[name]
return async (req: Request, res: Response, next: NextFunction) => {
let authUser: AuthedUser | undefined = undefined
try {
authUser = await lookupUser(await parseCredentials(req))
} catch (e) {
if (authRequired) return next(e)
}
const props = {
...(method === 'GET' ? req.query : req.body),
...req.params,
}
try {
const resultOptionalContinue = await handler(
validate(propSchema, props),
authUser as AuthedUser,
req
)
const hasContinue =
resultOptionalContinue &&
'continue' in resultOptionalContinue &&
'result' in resultOptionalContinue
const result = hasContinue
? resultOptionalContinue.result
: resultOptionalContinue
if (!res.headersSent) {
// Convert bigint to number, b/c JSON doesn't support bigint.
const convertedResult = deepConvertBigIntToNumber(result)
res.status(200).json(convertedResult ?? { success: true })
}
if (hasContinue) {
await resultOptionalContinue.continue()
}
} catch (error) {
next(error)
}
}
}
const deepConvertBigIntToNumber = (obj: any): any => {
if (typeof obj === 'bigint') {
return Number(obj)
} else if (obj && typeof obj === 'object') {
for (const [key, value] of Object.entries(obj)) {
obj[key] = deepConvertBigIntToNumber(value)
}
}
return obj
}

View File

@@ -0,0 +1,35 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { isAdminId } from 'common/envs/constants'
import { convertComment } from 'common/supabase/comment'
import { Row } from 'common/supabase/utils'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { broadcastUpdatedComment } from 'shared/websockets/helpers'
export const hideComment: APIHandler<'hide-comment'> = async (
{ commentId, hide },
auth
) => {
const pg = createSupabaseDirectClient()
const comment = await pg.oneOrNone<Row<'lover_comments'>>(
`select * from lover_comments where id = $1`,
[commentId]
)
if (!comment) {
throw new APIError(404, 'Comment not found')
}
if (
!isAdminId(auth.uid) &&
comment.user_id !== auth.uid &&
comment.on_user_id !== auth.uid
) {
throw new APIError(403, 'You are not allowed to hide this comment')
}
await pg.none(`update lover_comments set hidden = $2 where id = $1`, [
commentId,
hide,
])
broadcastUpdatedComment(convertComment(comment))
}

View File

@@ -0,0 +1,181 @@
import { Json } from 'common/supabase/schema'
import { SupabaseDirectClient } from 'shared/supabase/init'
import { ChatVisibility } from 'common/chat-message'
import { User } from 'common/user'
import { first } from 'lodash'
import { log } from 'shared/monitoring/log'
import { getPrivateUser, getUser } from 'shared/utils'
import { type JSONContent } from '@tiptap/core'
import { APIError } from 'common/api/utils'
import { broadcast } from 'shared/websockets/server'
import { track } from 'shared/analytics'
import { sendNewMessageEmail } from 'email/functions/helpers'
import dayjs from 'dayjs'
import utc from 'dayjs/plugin/utc'
import timezone from 'dayjs/plugin/timezone'
dayjs.extend(utc)
dayjs.extend(timezone)
export const leaveChatContent = (userName: string) => ({
type: 'doc',
content: [
{
type: 'paragraph',
content: [{ text: `${userName} left the chat`, type: 'text' }],
},
],
})
export const joinChatContent = (userName: string) => {
return {
type: 'doc',
content: [
{
type: 'paragraph',
content: [{ text: `${userName} joined the chat!`, type: 'text' }],
},
],
}
}
export const insertPrivateMessage = async (
content: Json,
channelId: number,
userId: string,
visibility: ChatVisibility,
pg: SupabaseDirectClient
) => {
const lastMessage = await pg.one(
`insert into private_user_messages (content, channel_id, user_id, visibility)
values ($1, $2, $3, $4) returning created_time`,
[content, channelId, userId, visibility]
)
await pg.none(
`update private_user_message_channels set last_updated_time = $1 where id = $2`,
[lastMessage.created_time, channelId]
)
}
export const addUsersToPrivateMessageChannel = async (
userIds: string[],
channelId: number,
pg: SupabaseDirectClient
) => {
await Promise.all(
userIds.map((id) =>
pg.none(
`insert into private_user_message_channel_members (channel_id, user_id, role, status)
values
($1, $2, 'member', 'proposed')
on conflict do nothing
`,
[channelId, id]
)
)
)
await pg.none(
`update private_user_message_channels set last_updated_time = now() where id = $1`,
[channelId]
)
}
export const createPrivateUserMessageMain = async (
creator: User,
channelId: number,
content: JSONContent,
pg: SupabaseDirectClient,
visibility: ChatVisibility
) => {
log('createPrivateUserMessageMain', creator, channelId, content)
// Normally, users can only submit messages to channels that they are members of
const authorized = await pg.oneOrNone(
`select 1
from private_user_message_channel_members
where channel_id = $1
and user_id = $2`,
[channelId, creator.id]
)
if (!authorized)
throw new APIError(403, 'You are not authorized to post to this channel')
await notifyOtherUserInChannelIfInactive(channelId, creator, pg)
await insertPrivateMessage(content, channelId, creator.id, visibility, pg)
const privateMessage = {
content: content as Json,
channel_id: channelId,
user_id: creator.id,
}
const otherUserIds = await pg.map<string>(
`select user_id from private_user_message_channel_members
where channel_id = $1 and user_id != $2
and status != 'left'
`,
[channelId, creator.id],
(r) => r.user_id
)
otherUserIds.concat(creator.id).forEach((otherUserId) => {
broadcast(`private-user-messages/${otherUserId}`, {})
})
track(creator.id, 'send private message', {
channelId,
otherUserIds,
})
return privateMessage
}
const notifyOtherUserInChannelIfInactive = async (
channelId: number,
creator: User,
pg: SupabaseDirectClient
) => {
const otherUserIds = await pg.manyOrNone<{ user_id: string }>(
`select user_id from private_user_message_channel_members
where channel_id = $1 and user_id != $2
and status != 'left'
`,
[channelId, creator.id]
)
// We're only sending notifs for 1:1 channels
if (!otherUserIds || otherUserIds.length > 1) return
const otherUserId = first(otherUserIds)
if (!otherUserId) return
const startOfDay = dayjs()
.tz('America/Los_Angeles')
.startOf('day')
.toISOString()
const previousMessagesThisDayBetweenTheseUsers = await pg.one(
`select count(*) from private_user_messages
where channel_id = $1
and user_id = $2
and created_time > $3
`,
[channelId, creator.id, startOfDay]
)
log('previous messages this day', previousMessagesThisDayBetweenTheseUsers)
if (previousMessagesThisDayBetweenTheseUsers.count > 0) return
// TODO: notification only for active user
const otherUser = await getUser(otherUserId.user_id)
console.log('otherUser:', otherUser)
if (!otherUser) return
await createNewMessageNotification(creator, otherUser, channelId)
}
const createNewMessageNotification = async (
fromUser: User,
toUser: User,
channelId: number
) => {
const privateUser = await getPrivateUser(toUser.id)
console.log('privateUser:', privateUser)
if (!privateUser) return
await sendNewMessageEmail(privateUser, fromUser, toUser, channelId)
}

View File

@@ -0,0 +1,43 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { log, getUser } from 'shared/utils'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import {
insertPrivateMessage,
leaveChatContent,
} from 'api/junk-drawer/private-messages'
export const leavePrivateUserMessageChannel: APIHandler<
'leave-private-user-message-channel'
> = async ({ channelId }, auth) => {
const pg = createSupabaseDirectClient()
const user = await getUser(auth.uid)
if (!user) throw new APIError(401, 'Your account was not found')
const membershipStatus = await pg.oneOrNone(
`select status from private_user_message_channel_members
where channel_id = $1 and user_id = $2`,
[channelId, auth.uid]
)
if (!membershipStatus)
throw new APIError(403, 'You are not authorized to post to this channel')
log('membershipStatus: ' + membershipStatus)
// add message that the user left the channel
await pg.none(
`
update private_user_message_channel_members
set status = 'left'
where channel_id=$1 and user_id=$2;
`,
[channelId, auth.uid]
)
await insertPrivateMessage(
leaveChatContent(user.name),
channelId,
auth.uid,
'system_status',
pg
)
return { status: 'success', channelId: Number(channelId) }
}

View File

@@ -0,0 +1,69 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIError, APIHandler } from './helpers/endpoint'
import { createLoveLikeNotification } from 'shared/create-love-notification'
import { getHasFreeLike } from './has-free-like'
import { log } from 'shared/utils'
import { tryCatch } from 'common/util/try-catch'
import { Row } from 'common/supabase/utils'
export const likeLover: APIHandler<'like-lover'> = async (props, auth) => {
const { targetUserId, remove } = props
const creatorId = auth.uid
const pg = createSupabaseDirectClient()
if (remove) {
const { error } = await tryCatch(
pg.none(
'delete from love_likes where creator_id = $1 and target_id = $2',
[creatorId, targetUserId]
)
)
if (error) {
throw new APIError(500, 'Failed to remove like: ' + error.message)
}
return { status: 'success' }
}
// Check if like already exists
const { data: existing } = await tryCatch(
pg.oneOrNone<Row<'love_likes'>>(
'select * from love_likes where creator_id = $1 and target_id = $2',
[creatorId, targetUserId]
)
)
if (existing) {
log('Like already exists, do nothing')
return { status: 'success' }
}
const hasFreeLike = await getHasFreeLike(creatorId)
if (!hasFreeLike) {
// Charge for like.
throw new APIError(403, 'You already liked someone today!')
}
// Insert the new like
const { data, error } = await tryCatch(
pg.one<Row<'love_likes'>>(
'insert into love_likes (creator_id, target_id) values ($1, $2) returning *',
[creatorId, targetUserId]
)
)
if (error) {
throw new APIError(500, 'Failed to add like: ' + error.message)
}
const continuation = async () => {
await createLoveLikeNotification(data)
}
return {
result: { status: 'success' },
continue: continuation,
}
}

View File

@@ -0,0 +1,16 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIHandler } from './helpers/endpoint'
export const markAllNotifsRead: APIHandler<'mark-all-notifs-read'> = async (
_,
auth
) => {
const pg = createSupabaseDirectClient()
await pg.none(
`update user_notifications
SET data = jsonb_set(data, '{isSeen}', 'true'::jsonb)
where user_id = $1
and data->>'isSeen' = 'false'`,
[auth.uid]
)
}

View File

@@ -0,0 +1,30 @@
import { APIError } from 'api/helpers/endpoint'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { type APIHandler } from 'api/helpers/endpoint'
import { isAdminId } from 'common/envs/constants'
import { log } from 'shared/utils'
import { tryCatch } from 'common/util/try-catch'
export const removePinnedPhoto: APIHandler<'remove-pinned-photo'> = async (
body: { userId: string },
auth
) => {
const { userId } = body
log('remove pinned url', { userId })
if (!isAdminId(auth.uid))
throw new APIError(403, 'Only admins can remove pinned photo')
const pg = createSupabaseDirectClient()
const { error } = await tryCatch(
pg.none('update lovers set pinned_url = null where user_id = $1', [userId])
)
if (error) {
throw new APIError(500, 'Failed to remove pinned photo')
}
return {
success: true,
}
}

37
backend/api/src/report.ts Normal file
View File

@@ -0,0 +1,37 @@
import { APIError, APIHandler } from './helpers/endpoint'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { tryCatch } from 'common/util/try-catch'
import { insert } from 'shared/supabase/utils'
// abusable: people can report the wrong person, that didn't write the comment
// but in practice we check it manually and nothing bad happens to them automatically
export const report: APIHandler<'report'> = async (body, auth) => {
const {
contentOwnerId,
contentType,
contentId,
description,
parentId,
parentType,
} = body
const pg = createSupabaseDirectClient()
const result = await tryCatch(
insert(pg, 'reports', {
user_id: auth.uid,
content_owner_id: contentOwnerId,
content_type: contentType,
content_id: contentId,
description,
parent_id: parentId,
parent_type: parentType,
})
)
if (result.error) {
throw new APIError(500, 'Failed to create report: ' + result.error.message)
}
return { success: true }
}

View File

@@ -0,0 +1,36 @@
import { APIHandler } from './helpers/endpoint'
export const searchLocation: APIHandler<'search-location'> = async (body) => {
const { term, limit } = body
const apiKey = process.env.GEODB_API_KEY
console.log('GEODB_API_KEY', apiKey)
if (!apiKey) {
return { status: 'failure', data: 'Missing GEODB API key' }
}
const host = 'wft-geo-db.p.rapidapi.com'
const baseUrl = `https://${host}/v1/geo`
const url = `${baseUrl}/cities?namePrefix=${term}&limit=${
limit ?? 10
}&offset=0&sort=-population`
try {
const res = await fetch(url, {
method: 'GET',
headers: {
'X-RapidAPI-Key': apiKey,
'X-RapidAPI-Host': host,
},
})
if (!res.ok) {
throw new Error(`HTTP error! Status: ${res.status} ${await res.text()}`)
}
const data = await res.json()
// console.log('GEO DB', data)
return { status: 'success', data: data }
} catch (error: any) {
console.log('failure', error)
return { status: 'failure', data: error.message }
}
}

View File

@@ -0,0 +1,45 @@
import { APIHandler } from './helpers/endpoint'
export const searchNearCity: APIHandler<'search-near-city'> = async (body) => {
const { cityId, radius } = body
return await searchNearCityMain(cityId, radius)
}
const searchNearCityMain = async (cityId: string, radius: number) => {
const apiKey = process.env.GEODB_API_KEY
if (!apiKey) {
return { status: 'failure', data: 'Missing GEODB API key' }
}
const host = 'wft-geo-db.p.rapidapi.com'
const baseUrl = `https://${host}/v1/geo`
const url = `${baseUrl}/cities/${cityId}/nearbyCities?radius=${radius}&offset=0&sort=-population&limit=100`
try {
const res = await fetch(url, {
method: 'GET',
headers: {
'X-RapidAPI-Key': apiKey,
'X-RapidAPI-Host': host,
},
})
if (!res.ok) {
throw new Error(`HTTP error! Status: ${res.status}`)
}
const data = await res.json()
return { status: 'success', data: data }
} catch (error) {
return { status: 'failure', data: error }
}
}
export const getNearbyCities = async (cityId: string, radius: number) => {
const result = await searchNearCityMain(cityId, radius)
const cityIds = (result.data.data as any[]).map(
(city) => city.id.toString() as string
)
return cityIds
}

View File

@@ -0,0 +1,70 @@
import { constructPrefixTsQuery } from 'shared/helpers/search'
import {
from,
join,
limit,
orderBy,
renderSql,
select,
where,
} from 'shared/supabase/sql-builder'
import { type APIHandler } from './helpers/endpoint'
import { convertUser } from 'common/supabase/users'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { toUserAPIResponse } from 'common/api/user-types'
import { uniqBy } from 'lodash'
export const searchUsers: APIHandler<'search-users'> = async (props, auth) => {
const { term, page, limit } = props
const pg = createSupabaseDirectClient()
const offset = page * limit
const userId = auth?.uid
const searchFollowersSQL = getSearchUserSQL({ term, offset, limit, userId })
const searchAllSQL = getSearchUserSQL({ term, offset, limit })
const [followers, all] = await Promise.all([
pg.map(searchFollowersSQL, null, convertUser),
pg.map(searchAllSQL, null, convertUser),
])
return uniqBy([...followers, ...all], 'id')
.map(toUserAPIResponse)
.slice(0, limit)
}
function getSearchUserSQL(props: {
term: string
offset: number
limit: number
userId?: string // search only this user's followers
}) {
const { term, userId } = props
return renderSql(
userId
? [
select('users.*'),
from('users'),
join('user_follows on user_follows.follow_id = users.id'),
where('user_follows.user_id = $1', [userId]),
]
: [select('*'), from('users')],
term
? [
where(
`name_username_vector @@ websearch_to_tsquery('english', $1)
or name_username_vector @@ to_tsquery('english', $2)`,
[term, constructPrefixTsQuery(term)]
),
orderBy(
`ts_rank(name_username_vector, websearch_to_tsquery($1)) desc,
data->>'lastBetTime' desc nulls last`,
[term]
),
]
: orderBy(`data->'creatorTraders'->'allTime' desc nulls last`),
limit(props.limit, props.offset)
)
}

41
backend/api/src/serve.ts Normal file
View File

@@ -0,0 +1,41 @@
import * as admin from 'firebase-admin'
import { getLocalEnv, initAdmin } from 'shared/init-admin'
import { loadSecretsToEnv, getServiceAccountCredentials } from 'common/secrets'
import { LOCAL_DEV, log } from 'shared/utils'
import { METRIC_WRITER } from 'shared/monitoring/metric-writer'
import { listen as webSocketListen } from 'shared/websockets/server'
log('Api server starting up....')
if (LOCAL_DEV) {
initAdmin()
} else {
const projectId = process.env.GOOGLE_CLOUD_PROJECT
admin.initializeApp({
projectId,
storageBucket: `${projectId}.appspot.com`,
})
}
METRIC_WRITER.start()
import { app } from './app'
const credentials = LOCAL_DEV
? getServiceAccountCredentials(getLocalEnv())
: // No explicit credentials needed for deployed service.
undefined
const startupProcess = async () => {
await loadSecretsToEnv(credentials)
log('Secrets loaded.')
const PORT = process.env.PORT ?? 8088
const httpServer = app.listen(PORT, () => {
log.info(`Serving API on port ${PORT}.`)
})
webSocketListen(httpServer, '/ws')
log('Server started successfully')
}
startupProcess()

View File

@@ -0,0 +1,73 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIError, APIHandler } from './helpers/endpoint'
import { createLoveShipNotification } from 'shared/create-love-notification'
import { log } from 'shared/utils'
import { tryCatch } from 'common/util/try-catch'
import { insert } from 'shared/supabase/utils'
export const shipLovers: APIHandler<'ship-lovers'> = async (props, auth) => {
const { targetUserId1, targetUserId2, remove } = props
const creatorId = auth.uid
const pg = createSupabaseDirectClient()
// Check if ship already exists or with swapped target IDs
const existing = await tryCatch(
pg.oneOrNone<{ ship_id: string }>(
`select ship_id from love_ships
where creator_id = $1
and (
target1_id = $2 and target2_id = $3
or target1_id = $3 and target2_id = $2
)`,
[creatorId, targetUserId1, targetUserId2]
)
)
if (existing.error)
throw new APIError(
500,
'Error when checking ship: ' + existing.error.message
)
if (existing.data) {
if (remove) {
const { error } = await tryCatch(
pg.none('delete from love_ships where ship_id = $1', [
existing.data.ship_id,
])
)
if (error) {
throw new APIError(500, 'Failed to remove ship: ' + error.message)
}
} else {
log('Ship already exists, do nothing')
}
return { status: 'success' }
}
// Insert the new ship
const { data, error } = await tryCatch(
insert(pg, 'love_ships', {
creator_id: creatorId,
target1_id: targetUserId1,
target2_id: targetUserId2,
})
)
if (error) {
throw new APIError(500, 'Failed to create ship: ' + error.message)
}
const continuation = async () => {
await Promise.all([
createLoveShipNotification(data, data.target1_id),
createLoveShipNotification(data, data.target2_id),
])
}
return {
result: { status: 'success' },
continue: continuation,
}
}

View File

@@ -0,0 +1,51 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { APIError, APIHandler } from './helpers/endpoint'
import { log } from 'shared/utils'
import { tryCatch } from 'common/util/try-catch'
import { Row } from 'common/supabase/utils'
import { insert } from 'shared/supabase/utils'
export const starLover: APIHandler<'star-lover'> = async (props, auth) => {
const { targetUserId, remove } = props
const creatorId = auth.uid
const pg = createSupabaseDirectClient()
if (remove) {
const { error } = await tryCatch(
pg.none(
'delete from love_stars where creator_id = $1 and target_id = $2',
[creatorId, targetUserId]
)
)
if (error) {
throw new APIError(500, 'Failed to remove star: ' + error.message)
}
return { status: 'success' }
}
// Check if star already exists
const { data: existing } = await tryCatch(
pg.oneOrNone<Row<'love_stars'>>(
'select * from love_stars where creator_id = $1 and target_id = $2',
[creatorId, targetUserId]
)
)
if (existing) {
log('star already exists, do nothing')
return { status: 'success' }
}
// Insert the new star
const { error } = await tryCatch(
insert(pg, 'love_stars', { creator_id: creatorId, target_id: targetUserId })
)
if (error) {
throw new APIError(500, 'Failed to add star: ' + error.message)
}
return { status: 'success' }
}

View File

@@ -0,0 +1,45 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { removePinnedUrlFromPhotoUrls } from 'shared/love/parse-photos'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { updateUser } from 'shared/supabase/users'
import { log } from 'shared/utils'
import { tryCatch } from 'common/util/try-catch'
import { update } from 'shared/supabase/utils'
import { type Row } from 'common/supabase/utils'
export const updateLover: APIHandler<'update-lover'> = async (
parsedBody,
auth
) => {
log('parsedBody', parsedBody)
const pg = createSupabaseDirectClient()
const { data: existingLover } = await tryCatch(
pg.oneOrNone<Row<'lovers'>>('select * from lovers where user_id = $1', [
auth.uid,
])
)
if (!existingLover) {
throw new APIError(404, 'Lover not found')
}
!parsedBody.last_online_time &&
log('Updating lover', { userId: auth.uid, parsedBody })
await removePinnedUrlFromPhotoUrls(parsedBody)
if (parsedBody.avatar_url) {
await updateUser(pg, auth.uid, { avatarUrl: parsedBody.avatar_url })
}
const { data, error } = await tryCatch(
update(pg, 'lovers', 'user_id', { user_id: auth.uid, ...parsedBody })
)
if (error) {
log('Error updating lover', error)
throw new APIError(500, 'Error updating lover')
}
return data
}

View File

@@ -0,0 +1,101 @@
import { toUserAPIResponse } from 'common/api/user-types'
import { RESERVED_PATHS } from 'common/envs/constants'
import { cleanDisplayName, cleanUsername } from 'common/util/clean-username'
import { removeUndefinedProps } from 'common/util/object'
import { cloneDeep, mapValues } from 'lodash'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { getUser, getUserByUsername } from 'shared/utils'
import { APIError, APIHandler } from './helpers/endpoint'
import { updateUser } from 'shared/supabase/users'
import { broadcastUpdatedUser } from 'shared/websockets/helpers'
import { strip } from 'common/socials'
export const updateMe: APIHandler<'me/update'> = async (props, auth) => {
const update = cloneDeep(props)
const user = await getUser(auth.uid)
if (!user) throw new APIError(401, 'Your account was not found')
if (update.name) {
update.name = cleanDisplayName(update.name)
}
if (update.username) {
const cleanedUsername = cleanUsername(update.username)
if (!cleanedUsername) throw new APIError(400, 'Invalid username')
const reservedName = RESERVED_PATHS.includes(cleanedUsername)
if (reservedName) throw new APIError(403, 'This username is reserved')
const otherUserExists = await getUserByUsername(cleanedUsername)
if (otherUserExists) throw new APIError(403, 'Username already taken')
update.username = cleanedUsername
}
const pg = createSupabaseDirectClient()
const { name, username, avatarUrl, link = {}, ...rest } = update
await updateUser(pg, auth.uid, removeUndefinedProps(rest))
if (update.website != undefined) link.site = update.website
if (update.twitterHandle != undefined) link.x = update.twitterHandle
if (update.discordHandle != undefined) link.discord = update.discordHandle
const stripped = mapValues(
link,
(value, site) => value && strip(site as any, value)
)
const adds = {} as { [key: string]: string }
const removes = []
for (const [key, value] of Object.entries(stripped)) {
if (value === null || value === '') {
removes.push(key)
} else if (value) {
adds[key] = value
}
}
let newLinks: any = null
if (Object.keys(adds).length > 0 || removes.length > 0) {
const data = await pg.oneOrNone(
`update users
set data = jsonb_set(
data, '{link}',
(data->'link' || $(adds)) - $(removes)
)
where id = $(id)
returning data->'link' as link`,
{ adds, removes, id: auth.uid }
)
newLinks = data?.link
}
if (name || username || avatarUrl) {
if (name) {
await pg.none(`update users set name = $1 where id = $2`, [
name,
auth.uid,
])
}
if (username) {
await pg.none(`update users set username = $1 where id = $2`, [
username,
auth.uid,
])
}
if (avatarUrl) {
await updateUser(pg, auth.uid, { avatarUrl })
}
broadcastUpdatedUser(
removeUndefinedProps({
id: auth.uid,
name,
username,
avatarUrl,
link: newLinks ?? undefined,
})
)
}
return toUserAPIResponse({ ...user, ...update, link: newLinks })
}

View File

@@ -0,0 +1,28 @@
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { updatePrivateUser } from 'shared/supabase/users'
import { type APIHandler } from './helpers/endpoint'
import { broadcastUpdatedPrivateUser } from 'shared/websockets/helpers'
export const updateNotifSettings: APIHandler<'update-notif-settings'> = async (
{ type, medium, enabled },
auth
) => {
const pg = createSupabaseDirectClient()
if (type === 'opt_out_all' && medium === 'mobile') {
await updatePrivateUser(pg, auth.uid, {
interestedInPushNotifications: !enabled,
})
} else {
// deep update array at data.notificationPreferences[type]
await pg.none(
`update private_users
set data = jsonb_set(data, '{notificationPreferences, $1:raw}',
coalesce(data->'notificationPreferences'->$1, '[]'::jsonb)
${enabled ? `|| '[$2:name]'::jsonb` : `- $2`}
)
where id = $3`,
[type, medium, auth.uid]
)
broadcastUpdatedPrivateUser(auth.uid)
}
}

View File

@@ -0,0 +1,33 @@
import { APIError, APIHandler } from 'api/helpers/endpoint'
import { log, getUser } from 'shared/utils'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { millisToTs } from 'common/supabase/utils'
export const updatePrivateUserMessageChannel: APIHandler<
'update-private-user-message-channel'
> = async (body, auth) => {
const { channelId, notifyAfterTime } = body
const pg = createSupabaseDirectClient()
const user = await getUser(auth.uid)
if (!user) throw new APIError(401, 'Your account was not found')
const membershipStatus = await pg.oneOrNone(
`select status from private_user_message_channel_members
where channel_id = $1 and user_id = $2`,
[channelId, auth.uid]
)
if (!membershipStatus)
throw new APIError(403, 'You are not authorized to this channel')
log('membershipStatus ' + membershipStatus)
await pg.none(
`
update private_user_message_channel_members
set notify_after_time = $3
where channel_id=$1 and user_id=$2;
`,
[channelId, auth.uid, millisToTs(notifyAfterTime)]
)
return { status: 'success', channelId: Number(channelId) }
}

37
backend/api/ssh-api.sh Executable file
View File

@@ -0,0 +1,37 @@
#!/bin/bash
# Script to make it easy to tunnel into the currently running API instance on GCP
# so that you can debug the Node process, e.g. to set breakpoints (in dev!!), use the REPL,
# or do performance or memory profiling.
set -e
SERVICE_NAME="api"
SERVICE_GROUP="${SERVICE_NAME}-group"
ZONE="us-west1-c"
ENV=${1:-dev}
case $ENV in
dev)
GCLOUD_PROJECT=compass-130ba ;;
prod)
GCLOUD_PROJECT=compass-130ba ;;
*)
echo "Invalid environment; must be dev or prod."
exit 1
esac
echo "Looking for API instance on ${GCLOUD_PROJECT} to talk to..."
INSTANCE_ID=$(gcloud compute instances list \
--filter="zone:(us-west1-c)" \
--sort-by="~creationTimestamp" \
--format="value(name)" \
--limit=1)
echo "Forwarding debugging port 9229 to ${INSTANCE_ID}. Open chrome://inspect in Chrome to connect."
echo gcloud compute ssh ${INSTANCE_ID} --project=${GCLOUD_PROJECT} --zone=${ZONE}
gcloud compute ssh ${INSTANCE_ID} \
--project=${GCLOUD_PROJECT} \
--zone=${ZONE} \
# -- \
# -NL 9229:localhost:9229

32
backend/api/tsconfig.json Normal file
View File

@@ -0,0 +1,32 @@
{
"compilerOptions": {
"rootDir": "src",
"composite": true,
"module": "commonjs",
"noImplicitReturns": true,
"outDir": "./lib",
"tsBuildInfoFile": "lib/tsconfig.tsbuildinfo",
"sourceMap": true,
"strict": true,
"esModuleInterop": true,
"target": "esnext",
"skipLibCheck": true,
"jsx": "react-jsx",
"paths": {
"common/*": ["../../common/src/*", "../../../common/lib/*"],
"shared/*": ["../shared/src/*", "../../shared/lib/*"],
"email/*": ["../email/emails/*", "../../email/lib/*"],
"api/*": ["./src/*"]
}
},
"ts-node": {
"require": ["tsconfig-paths/register"]
},
"references": [
{ "path": "../../common" },
{ "path": "../shared" },
{ "path": "../email" }
],
"compileOnSave": true,
"include": ["src/**/*.ts"]
}

View File

@@ -0,0 +1,23 @@
name: aou-kb
defaultService: global/backendServices/api-lb-service-0
hostRules:
- hosts:
- '*'
pathMatcher: matcher-1
pathMatchers:
- name: matcher-1
defaultService: global/backendServices/api-lb-service-0
routeRules:
- priority: 1
routeAction:
weightedBackendServices:
- backendService: global/backendServices/api-lb-service-0
weight: 25
- backendService: global/backendServices/api-lb-service-1
weight: 25
- backendService: global/backendServices/api-lb-service-2
weight: 25
- backendService: global/backendServices/api-lb-service-3
weight: 25
# redeploy this by running:
# gcloud compute url-maps import aou-kb --source=url-map-config.yaml --project polylove --global

10
backend/email/.gitignore vendored Normal file
View File

@@ -0,0 +1,10 @@
# Compiled JavaScript files
lib/
# TypeScript v1 declaration files
typings/
# Node.js dependency directory
node_modules/
package-lock.json

23
backend/email/README.md Normal file
View File

@@ -0,0 +1,23 @@
# React Email Starter
A live preview right in your browser so you don't need to keep sending real emails during development.
## Getting Started
First, install the dependencies:
```sh
npm install
# or
yarn
```
Then, run the development server:
```sh
npm run dev
# or
yarn dev
```
Open [localhost:3000](http://localhost:3000) with your browser to see the result.

View File

@@ -0,0 +1,124 @@
import { PrivateUser, User } from 'common/user'
import { getNotificationDestinationsForUser } from 'common/user-notification-preferences'
import { sendEmail } from './send-email'
import { NewMatchEmail } from '../new-match'
import { NewMessageEmail } from '../new-message'
import { NewEndorsementEmail } from '../new-endorsement'
import { Test } from '../test'
import { getLover } from 'shared/love/supabase'
import {renderToStaticMarkup} from "react-dom/server";
const from = 'Compass <no-reply@compassmeet.com>'
export const sendNewMatchEmail = async (
privateUser: PrivateUser,
matchedWithUser: User
) => {
const { sendToEmail, unsubscribeUrl } = getNotificationDestinationsForUser(
privateUser,
'new_match'
)
if (!privateUser.email || !sendToEmail) return
const lover = await getLover(privateUser.id)
if (!lover) return
return await sendEmail({
from,
subject: `You have a new match!`,
to: privateUser.email,
react: (
<NewMatchEmail
onUser={lover.user}
matchedWithUser={matchedWithUser}
matchedLover={lover}
unsubscribeUrl={unsubscribeUrl}
/>
),
})
}
export const sendNewMessageEmail = async (
privateUser: PrivateUser,
fromUser: User,
toUser: User,
channelId: number
) => {
const { sendToEmail, unsubscribeUrl } = getNotificationDestinationsForUser(
privateUser,
'new_message'
)
if (!privateUser.email || !sendToEmail) return
const lover = await getLover(fromUser.id)
if (!lover) {
console.error('Could not send email notification: User not found')
return
}
console.log({
from,
subject: `${fromUser.name} sent you a message!`,
to: privateUser.email,
html: renderToStaticMarkup(
<NewMessageEmail
fromUser={fromUser}
fromUserLover={lover}
toUser={toUser}
channelId={channelId}
unsubscribeUrl={unsubscribeUrl}
/>
),
})
return await sendEmail({
from,
subject: `${fromUser.name} sent you a message!`,
to: privateUser.email,
html: renderToStaticMarkup(
<NewMessageEmail
fromUser={fromUser}
fromUserLover={lover}
toUser={toUser}
channelId={channelId}
unsubscribeUrl={unsubscribeUrl}
/>
),
})
}
export const sendNewEndorsementEmail = async (
privateUser: PrivateUser,
fromUser: User,
onUser: User,
text: string
) => {
const { sendToEmail, unsubscribeUrl } = getNotificationDestinationsForUser(
privateUser,
'new_endorsement'
)
if (!privateUser.email || !sendToEmail) return
return await sendEmail({
from,
subject: `${fromUser.name} just endorsed you!`,
to: privateUser.email,
react: (
<NewEndorsementEmail
fromUser={fromUser}
onUser={onUser}
endorsementText={text}
unsubscribeUrl={unsubscribeUrl}
/>
),
})
}
export const sendTestEmail = async (toEmail: string) => {
return await sendEmail({
from,
subject: 'Test email from Compass',
to: toEmail,
html: renderToStaticMarkup(<Test name="Test User" />),
})
}

View File

@@ -0,0 +1,204 @@
import { LoverRow } from 'common/love/lover'
import type { User } from 'common/user'
// for email template testing
export const sinclairUser: User = {
createdTime: 0,
bio: 'the futa in futarchy',
website: 'sincl.ai',
avatarUrl:
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FSinclair%2FbqSXdzkn1z.JPG?alt=media&token=7779230a-9f5d-42b5-839f-fbdfef31a3ac',
idVerified: true,
discordHandle: 'sinclaether#5570',
twitterHandle: 'singularitttt',
verifiedPhone: true,
// sweepstakesVerified: true,
id: '0k1suGSJKVUnHbCPEhHNpgZPkUP2',
username: 'Sinclair',
name: 'Sinclair',
// url: 'https://manifold.love/Sinclair',
// isAdmin: true,
// isTrustworthy: false,
link: {
site: 'sincl.ai',
x: 'singularitttt',
discord: 'sinclaether#5570',
},
}
export const sinclairLover: LoverRow = {
id: 55,
user_id: '0k1suGSJKVUnHbCPEhHNpgZPkUP2',
created_time: '2023-10-27T00:41:59.851776+00:00',
last_online_time: '2024-05-17T02:11:48.83+00:00',
city: 'San Francisco',
gender: 'trans-female',
pref_gender: ['female', 'trans-female'],
pref_age_min: 18,
pref_age_max: 21,
pref_relation_styles: ['poly', 'open', 'mono'],
wants_kids_strength: 3,
looking_for_matches: true,
visibility: 'public',
messaging_status: 'open',
comments_enabled: true,
has_kids: 0,
is_smoker: false,
drinks_per_month: 0,
is_vegetarian_or_vegan: null,
political_beliefs: ['e/acc', 'libertarian'],
religious_belief_strength: null,
religious_beliefs: null,
photo_urls: [
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FSinclair%2Flove-images%2FnJz22lr3Bl.jpg?alt=media&token=f1e99ba3-39cc-4637-8702-16a3a8dd49db',
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FSinclair%2Flove-images%2FygM0mGgP_j.HEIC?alt=media&token=573b23d9-693c-4d6e-919b-097309f370e1',
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FSinclair%2Flove-images%2FWPZNKxjHGV.HEIC?alt=media&token=190625e1-2cf0-49a6-824b-09b6f4002f2a',
'https://firebasestorage.googleapis.com/v0/b/polylove.firebasestorage.app/o/user-images%2FSinclair%2Flove-images%2FlVFKKoLHyV.jpg?alt=media&token=ecb3a003-3672-4382-9ba0-ca894247bb3f',
'https://firebasestorage.googleapis.com/v0/b/polylove.firebasestorage.app/o/user-images%2FSinclair%2Flove-images%2Fh659K0bmd4.jpg?alt=media&token=6561ed05-0e2d-4f31-95ee-c7c1c0b33ea6',
'https://firebasestorage.googleapis.com/v0/b/polylove.firebasestorage.app/o/user-images%2FSinclair%2Flove-images%2F5OMTo5rhB-.jpg?alt=media&token=4aba4e5a-5115-4d2e-9d57-1e6162e15708',
'https://firebasestorage.googleapis.com/v0/b/polylove.firebasestorage.app/o/user-images%2FSinclair%2Flove-images%2FwCT-Y-bgpc.jpg?alt=media&token=91994528-e436-4055-af69-421fa9e29e5c',
],
pinned_url:
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FSinclair%2Flove-images%2FYXD19m12D7.jpg?alt=media&token=6cb095b4-dfc8-4bc9-ae67-6f12f29be0a5',
ethnicity: ['asian'],
born_in_location: null,
height_in_inches: 70,
education_level: 'bachelors',
university: 'Santa Clara University',
occupation: null,
occupation_title: 'Founding Engineer',
company: 'Manifold Markets',
website: 'sincl.ai',
twitter: 'x.com/singularitttt',
region_code: 'CA',
country: 'United States of America',
city_latitude: 37.7775,
city_longitude: -122.416389,
geodb_city_id: '126964',
referred_by_username: null,
bio: {
type: 'doc',
content: [
{
type: 'paragraph',
content: [
{
type: 'text',
marks: [
{
type: 'link',
attrs: {
href: 'https://sinclaaair.notion.site/Date-Me-487ef432c1f54938bf5e7a45ef05d57b',
target: '_blank',
},
},
],
text: 'https://sinclaaair.notion.site/Date-Me-487ef432c1f54938bf5e7a45ef05d57b',
},
],
},
],
},
age: 25,
}
export const jamesUser: User = {
createdTime: 0,
bio: 'Manifold cofounder! We got the AMM (What!?). We got the order book (What!?). We got the combination AMM and order book!',
website: 'https://calendly.com/jamesgrugett/manifold',
avatarUrl:
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FJamesGrugett%2FefVzXKc9iz.png?alt=media&token=5c205402-04d5-4e64-be65-9d8b4836eb03',
idVerified: true,
// fromManifold: true,
discordHandle: '',
twitterHandle: 'jahooma',
verifiedPhone: true,
// sweepstakesVerified: true,
id: '5LZ4LgYuySdL1huCWe7bti02ghx2',
username: 'JamesGrugett',
name: 'James',
link: {
x: 'jahooma',
discord: '',
},
}
export const jamesLover: LoverRow = {
id: 2,
user_id: '5LZ4LgYuySdL1huCWe7bti02ghx2',
created_time: '2023-10-21T21:18:26.691211+00:00',
last_online_time: '2024-07-06T17:29:16.833+00:00',
city: 'San Francisco',
gender: 'male',
pref_gender: ['female'],
pref_age_min: 22,
pref_age_max: 32,
pref_relation_styles: ['mono'],
wants_kids_strength: 4,
looking_for_matches: true,
visibility: 'public',
messaging_status: 'open',
comments_enabled: true,
has_kids: 0,
is_smoker: false,
drinks_per_month: 5,
is_vegetarian_or_vegan: null,
political_beliefs: ['libertarian'],
religious_belief_strength: null,
religious_beliefs: '',
photo_urls: [
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FJamesGrugett%2Flove-images%2FKl0WtbZsZW.jpg?alt=media&token=c928604f-e5ff-4406-a229-152864a4aa48',
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FJamesGrugett%2Flove-images%2Fsii17zOItz.jpg?alt=media&token=474034b9-0d23-4005-97ad-5864abfd85fe',
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FJamesGrugett%2Flove-images%2F3ICeb-0mwB.jpg?alt=media&token=975dbdb9-5547-4553-b504-e6545eb82ec0',
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FJamesGrugett%2Flove-images%2FdtuSGk_13Q.jpg?alt=media&token=98191d86-9d10-4571-879c-d00ab9cab09e',
],
pinned_url:
'https://firebasestorage.googleapis.com/v0/b/mantic-markets.appspot.com/o/user-images%2FJamesGrugett%2Flove-images%2FXkLhuxZoOX.jpg?alt=media&token=7f2304dd-bace-4806-8e3c-78c35e57287c',
ethnicity: ['caucasian'],
born_in_location: 'Melbourne, FL',
height_in_inches: 70,
education_level: 'bachelors',
university: 'Carnegie Mellon',
occupation: 'Entrepreneur',
occupation_title: 'CEO',
company: 'Codebuff',
website: 'https://jamesgrugett.com/',
twitter: 'https://twitter.com/jahooma',
region_code: 'CA',
country: 'United States of America',
city_latitude: 37.7775,
city_longitude: -122.416389,
geodb_city_id: '126964',
referred_by_username: null,
bio: {
type: 'doc',
content: [
{
type: 'paragraph',
content: [
{
type: 'text',
text: "Optimist that's working to improve the world!",
},
],
},
{
type: 'paragraph',
},
{
type: 'paragraph',
content: [
{
type: 'text',
text: 'I like outdoor activities, hanging out with my housemates, strategy board games, libertarian and utilitarian ideas, getting boba, and riding my electric unicycle. I also enjoy working hard on bold new initiatives with huge potential for value creation!',
},
],
},
{
type: 'paragraph',
},
],
},
age: 32,
}

View File

@@ -0,0 +1,43 @@
import {
CreateEmailRequestOptions,
Resend,
type CreateEmailOptions,
} from 'resend'
import { log } from 'shared/utils'
/*
* typically: { subject: string, to: string | string[] } & ({ text: string } | { react: ReactNode })
*/
export const sendEmail = async (
payload: CreateEmailOptions,
options?: CreateEmailRequestOptions
) => {
const resend = getResend()
console.log(resend, payload, options)
const { data, error } = await resend.emails.send(
{ replyTo: 'Compass <no-reply@compassmeet.com>', ...payload },
options
)
console.log('resend.emails.send', data, error)
if (error) {
log.error(
`Failed to send email to ${payload.to} with subject ${payload.subject}`
)
log.error(error)
return null
}
log(`Sent email to ${payload.to} with subject ${payload.subject}`)
return data
}
let resend: Resend | null = null
const getResend = () => {
if (resend) return resend
const apiKey = process.env.RESEND_KEY as string
console.log(`RESEND_KEY: ${apiKey}`)
resend = new Resend(apiKey)
return resend
}

View File

@@ -0,0 +1,14 @@
import { sendTestEmail } from './helpers'
if (require.main === module) {
const email = process.argv[2]
if (!email) {
console.error('Please provide an email address')
console.log('Usage: ts-node send-test-email.ts your@email.com')
process.exit(1)
}
sendTestEmail(email)
.then(() => console.log('Email sent successfully!'))
.catch((error) => console.error('Failed to send email:', error))
}

View File

@@ -0,0 +1,180 @@
import {
Body,
Button,
Container,
Column,
Head,
Html,
Img,
Link,
Preview,
Row,
Section,
Text,
} from '@react-email/components'
import { type User } from 'common/user'
import { DOMAIN } from 'common/envs/constants'
import { jamesUser, sinclairUser } from './functions/mock'
interface NewEndorsementEmailProps {
fromUser: User
onUser: User
endorsementText: string
unsubscribeUrl: string
}
export const NewEndorsementEmail = ({
fromUser,
onUser,
endorsementText,
unsubscribeUrl,
}: NewEndorsementEmailProps) => {
const name = onUser.name.split(' ')[0]
const endorsementUrl = `https://${DOMAIN}/${onUser.username}`
return (
<Html>
<Head />
<Preview>New endorsement from {fromUser.name}</Preview>
<Body style={main}>
<Container style={container}>
{/*<Section style={logoContainer}>*/}
{/* <Img*/}
{/* src="..."*/}
{/* width="550"*/}
{/* height="auto"*/}
{/* alt="compassmeet.com"*/}
{/* />*/}
{/*</Section>*/}
<Section style={content}>
<Text style={paragraph}>Hi {name},</Text>
<Text style={paragraph}>{fromUser.name} just endorsed you!</Text>
<Section style={endorsementContainer}>
<Row>
<Column>
<Img
src={fromUser.avatarUrl}
width="50"
height="50"
alt=""
style={avatarImage}
/>
</Column>
<Column>
<Text style={endorsementTextStyle}>"{endorsementText}"</Text>
</Column>
</Row>
<Button href={endorsementUrl} style={button}>
View endorsement
</Button>
</Section>
</Section>
<Section style={footer}>
<Text style={footerText}>
This e-mail has been sent to {name},{' '}
{/* <Link href={unsubscribeUrl} style={footerLink}>
click here to unsubscribe from this type of notification
</Link>
. */}
</Text>
</Section>
</Container>
</Body>
</Html>
)
}
NewEndorsementEmail.PreviewProps = {
fromUser: jamesUser,
onUser: sinclairUser,
endorsementText:
"Sinclair is someone you want to have around because she injects creativity and humor into every conversation, and her laugh is infectious! Not to mention that she's a great employee, treats everyone with respect, and is even-tempered.",
unsubscribeUrl: 'https://compassmeet.com/unsubscribe',
} as NewEndorsementEmailProps
const main = {
backgroundColor: '#f4f4f4',
fontFamily: 'Arial, sans-serif',
wordSpacing: 'normal',
}
const container = {
margin: '0 auto',
maxWidth: '600px',
}
const logoContainer = {
padding: '20px 0px 5px 0px',
textAlign: 'center' as const,
backgroundColor: '#ffffff',
}
const content = {
backgroundColor: '#ffffff',
padding: '20px 25px',
}
const paragraph = {
fontSize: '18px',
lineHeight: '24px',
margin: '10px 0',
color: '#000000',
fontFamily: 'Arial, Helvetica, sans-serif',
}
const endorsementContainer = {
margin: '20px 0',
padding: '15px',
backgroundColor: '#f9f9f9',
borderRadius: '8px',
}
const avatarImage = {
borderRadius: '50%',
}
const endorsementTextStyle = {
fontSize: '16px',
lineHeight: '22px',
fontStyle: 'italic',
color: '#333333',
}
const button = {
backgroundColor: '#4887ec',
borderRadius: '12px',
color: '#ffffff',
fontFamily: 'Helvetica, Arial, sans-serif',
fontSize: '16px',
fontWeight: 'semibold',
textDecoration: 'none',
textAlign: 'center' as const,
display: 'inline-block',
padding: '6px 10px',
margin: '10px 0',
}
const footer = {
margin: '20px 0',
textAlign: 'center' as const,
}
const footerText = {
fontSize: '11px',
lineHeight: '22px',
color: '#000000',
fontFamily: 'Ubuntu, Helvetica, Arial, sans-serif',
}
const footerLink = {
color: 'inherit',
textDecoration: 'none',
}
export default NewEndorsementEmail

View File

@@ -0,0 +1,167 @@
import {
Body,
Button,
Container,
Head,
Html,
Img,
Link,
Preview,
Section,
Text,
} from '@react-email/components'
import { DOMAIN } from 'common/envs/constants'
import { type LoverRow } from 'common/love/lover'
import { getLoveOgImageUrl } from 'common/love/og-image'
import { type User } from 'common/user'
import { jamesLover, jamesUser, sinclairUser } from './functions/mock'
interface NewMatchEmailProps {
onUser: User
matchedWithUser: User
matchedLover: LoverRow
unsubscribeUrl: string
}
export const NewMatchEmail = ({
onUser,
matchedWithUser,
matchedLover,
unsubscribeUrl,
}: NewMatchEmailProps) => {
const name = onUser.name.split(' ')[0]
const userImgSrc = getLoveOgImageUrl(matchedWithUser, matchedLover)
const userUrl = `https://${DOMAIN}/${matchedWithUser.username}`
return (
<Html>
<Head />
<Preview>You have a new match!</Preview>
<Body style={main}>
<Container style={container}>
{/*<Section style={logoContainer}>*/}
{/* <Img*/}
{/* src="..."*/}
{/* width="550"*/}
{/* height="auto"*/}
{/* alt="compassmeet.com"*/}
{/* />*/}
{/*</Section>*/}
<Section style={content}>
<Text style={paragraph}>Hi {name},</Text>
<Text style={paragraph}>
{matchedWithUser.name} just matched with you!
</Text>
<Section style={imageContainer}>
<Link href={userUrl}>
<Img
src={userImgSrc}
width="375"
height="200"
alt=""
style={profileImage}
/>
</Link>
<Button href={userUrl} style={button}>
View profile
</Button>
</Section>
</Section>
<Section style={footer}>
<Text style={footerText}>
This e-mail has been sent to {name},{' '}
{/* <Link href={unsubscribeUrl} style={footerLink}>
click here to unsubscribe from this type of notification
</Link>
. */}
</Text>
</Section>
</Container>
</Body>
</Html>
)
}
NewMatchEmail.PreviewProps = {
onUser: sinclairUser,
matchedWithUser: jamesUser,
matchedLover: jamesLover,
unsubscribeUrl: 'https://compassmeet.com/unsubscribe',
} as NewMatchEmailProps
const main = {
backgroundColor: '#f4f4f4',
fontFamily: 'Arial, sans-serif',
wordSpacing: 'normal',
}
const container = {
margin: '0 auto',
maxWidth: '600px',
}
const logoContainer = {
padding: '20px 0px 5px 0px',
textAlign: 'center' as const,
backgroundColor: '#ffffff',
}
const content = {
backgroundColor: '#ffffff',
padding: '20px 25px',
}
const paragraph = {
fontSize: '18px',
lineHeight: '24px',
margin: '10px 0',
color: '#000000',
fontFamily: 'Arial, Helvetica, sans-serif',
}
const imageContainer = {
textAlign: 'center' as const,
margin: '20px 0',
}
const profileImage = {
// border: '1px solid #ec489a',
}
const button = {
backgroundColor: '#4887ec',
borderRadius: '12px',
color: '#ffffff',
fontFamily: 'Helvetica, Arial, sans-serif',
fontSize: '16px',
fontWeight: 'semibold',
textDecoration: 'none',
textAlign: 'center' as const,
display: 'inline-block',
padding: '6px 10px',
margin: '10px 0',
}
const footer = {
margin: '20px 0',
textAlign: 'center' as const,
}
const footerText = {
fontSize: '11px',
lineHeight: '22px',
color: '#000000',
fontFamily: 'Ubuntu, Helvetica, Arial, sans-serif',
}
const footerLink = {
color: 'inherit',
textDecoration: 'none',
}
export default NewMatchEmail

View File

@@ -0,0 +1,169 @@
import {
Body,
Button,
Container,
Head,
Html,
Img,
Link,
Preview,
Section,
Text,
} from '@react-email/components'
import { type User } from 'common/user'
import { type LoverRow } from 'common/love/lover'
import {
jamesLover,
jamesUser,
sinclairLover,
sinclairUser,
} from './functions/mock'
import { DOMAIN } from 'common/envs/constants'
import { getLoveOgImageUrl } from 'common/love/og-image'
interface NewMessageEmailProps {
fromUser: User
fromUserLover: LoverRow
toUser: User
channelId: number
unsubscribeUrl: string
}
export const NewMessageEmail = ({
fromUser,
fromUserLover,
toUser,
channelId,
unsubscribeUrl,
}: NewMessageEmailProps) => {
const name = toUser.name.split(' ')[0]
const creatorName = fromUser.name
const messagesUrl = `https://${DOMAIN}/messages/${channelId}`
const userImgSrc = getLoveOgImageUrl(fromUser, fromUserLover)
return (
<Html>
<Head />
<Preview>New message from {creatorName}</Preview>
<Body style={main}>
<Container style={container}>
{/*<Section style={logoContainer}>*/}
{/* <Img*/}
{/* src="..."*/}
{/* width="550"*/}
{/* height="auto"*/}
{/* alt="compassmeet.com"*/}
{/* />*/}
{/*</Section>*/}
<Section style={content}>
<Text style={paragraph}>Hi {name},</Text>
<Text style={paragraph}>{creatorName} just messaged you!</Text>
<Section style={imageContainer}>
<Link href={messagesUrl}>
<Img
src={userImgSrc}
width="375"
height="200"
alt={`${creatorName}'s profile`}
style={profileImage}
/>
</Link>
<Button href={messagesUrl} style={button}>
View message
</Button>
</Section>
</Section>
<Section style={footer}>
<Text style={footerText}>
This e-mail has been sent to {name},{' '}
{/* <Link href={unsubscribeUrl} style={{ color: 'inherit', textDecoration: 'none' }}>
click here to unsubscribe from this type of notification
</Link>
. */}
</Text>
</Section>
</Container>
</Body>
</Html>
)
}
NewMessageEmail.PreviewProps = {
fromUser: jamesUser,
fromUserLover: jamesLover,
toUser: sinclairUser,
channelId: 1,
unsubscribeUrl: 'https://compassmeet.com/unsubscribe',
} as NewMessageEmailProps
const main = {
backgroundColor: '#f4f4f4',
fontFamily: 'Arial, sans-serif',
wordSpacing: 'normal',
}
const container = {
margin: '0 auto',
maxWidth: '600px',
}
const logoContainer = {
padding: '20px 0px 5px 0px',
textAlign: 'center' as const,
backgroundColor: '#ffffff',
}
const content = {
backgroundColor: '#ffffff',
padding: '20px 25px',
}
const paragraph = {
fontSize: '18px',
lineHeight: '24px',
margin: '10px 0',
color: '#000000',
fontFamily: 'Arial, Helvetica, sans-serif',
}
const imageContainer = {
textAlign: 'center' as const,
margin: '20px 0',
}
const profileImage = {
// border: '1px solid #ec489a',
}
const button = {
backgroundColor: '#4887ec',
borderRadius: '12px',
color: '#ffffff',
fontFamily: 'Helvetica, Arial, sans-serif',
fontSize: '16px',
fontWeight: 'semibold',
textDecoration: 'none',
textAlign: 'center' as const,
display: 'inline-block',
padding: '6px 10px',
margin: '10px 0',
}
const footer = {
margin: '20px 0',
textAlign: 'center' as const,
}
const footerText = {
fontSize: '11px',
lineHeight: '22px',
color: '#000000',
fontFamily: 'Ubuntu, Helvetica, Arial, sans-serif',
}
export default NewMessageEmail

View File

@@ -0,0 +1,22 @@
'use server'
import { Head, Html, Preview, Tailwind, Text } from '@react-email/components'
import React from 'react'
export const Test = (props: { name: string }) => {
return (
<Html>
<Head />
<Preview>Helloo {props.name}</Preview>
<Tailwind>
<Text className="text-xl text-blue-800">Hello {props.name}</Text>
</Tailwind>
</Html>
)
}
Test.PreviewProps = {
name: 'Clarity',
}
export default Test

View File

@@ -0,0 +1,38 @@
# Email Knowledge
## Overview
The email module provides React Email components for sending beautiful, responsive emails from the application. We use the React Email for templates and Resend for delivery.
## Structure
- `emails/` - Contains all email templates and helper functions
- `functions/` - Helper functions for sending emails
- `helpers.tsx` - Core email sending functions
- `send-email.ts` - Low-level email sending utilities
- `static/` - This folder is useless. Includes image assets for the dev preview server.
## Usage
### Sending Emails
Import the helper functions from the email module to send emails:
```typescript
import { sendNewEndorsementEmail } from 'email/functions/helpers'
// Example usage
await sendNewEndorsementEmail(privateUser, creator, onUser, text)
```
### Creating New Email Templates
1. Create a new React component in the `emails/` directory
2. Use components from `@react-email/components` for email-safe HTML
3. Add preview props
4. Export the component as default
5. Add a helper function in `functions/helpers.tsx` to send the email
### Development
You may run typechecks but you don't need to start the email dev server. Assume the human developer is responsible for that.

View File

@@ -0,0 +1,22 @@
{
"name": "react-email-starter",
"version": "0.1.9",
"private": true,
"scripts": {
"dev": "email dev",
"build": "tsc -b"
},
"dependencies": {
"@react-email/components": "0.0.33",
"react": "19.0.0",
"react-dom": "19.0.0",
"react-email": "3.0.7",
"resend": "4.1.2"
},
"devDependencies": {
"@types/html-to-text": "9.0.4",
"@types/prismjs": "1.26.5",
"@types/react": "19.0.10",
"@types/react-dom": "19.0.4"
}
}

View File

@@ -0,0 +1,34 @@
{
"compilerOptions": {
"rootDir": "emails",
"composite": true,
"module": "commonjs",
"moduleResolution": "node",
"noImplicitReturns": true,
"outDir": "lib",
"tsBuildInfoFile": "lib/tsconfig.tsbuildinfo",
"sourceMap": true,
"strict": true,
"esModuleInterop": true,
"target": "esnext",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"incremental": true,
"resolveJsonModule": true,
"isolatedModules": false,
"declaration": true,
"jsx": "react-jsx",
"paths": {
"common/*": ["../../common/src/*", "../../../common/lib/*"],
"shared/*": ["../shared/src/*", "../../shared/lib/*"],
"email/*": ["./emails/*"]
}
},
"ts-node": {
"require": ["tsconfig-paths/register"]
},
"references": [{ "path": "../../common" }, { "path": "../shared" }],
"include": ["emails/**/*.ts", "emails/**/*.tsx"]
}

1776
backend/email/yarn.lock Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,7 @@
{
"projects": {
"default": "compass-130ba",
"prod": "compass-130ba",
"dev": "compass-130ba"
}
}

View File

@@ -0,0 +1,12 @@
{
"storage": [
{
"bucket": "compass-130ba.firebasestorage.app",
"rules": "storage.rules"
},
{
"bucket": "compass-130ba-private.firebasestorage.app",
"rules": "private-storage.rules"
}
]
}

View File

@@ -0,0 +1,10 @@
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /private-images/{userId}/{allPaths=**} {
allow read: if request.auth.uid == userId;
allow write: if request.auth.uid == userId && request.resource.size <= 20 * 1024 * 1024; // 20MB
}
}
}

View File

@@ -0,0 +1,11 @@
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read;
// Don't require auth, as dream uploads can be done by anyone
allow write: if request.resource.size <= 10 * 1024 * 1024; // 10MB
}
}
}

View File

@@ -0,0 +1,18 @@
update private_user_messages
set created_time = now() where created_time is null;
alter table private_user_messages
alter column created_time set not null,
alter column created_time set default now(),
alter column visibility set not null,
alter column visibility set default 'private',
alter column user_id set not null,
alter column content set not null,
alter column channel_id set not null;
alter table private_user_messages
rename column id to old_id;
alter table private_user_messages
add column id bigint generated always as identity primary key;

View File

@@ -0,0 +1,59 @@
import { removeUndefinedProps } from 'common/util/object'
import { runScript } from './run-script'
import { log } from 'shared/monitoring/log'
import { createSupabaseDirectClient } from 'shared/supabase/init'
import { bulkUpdateData } from 'shared/supabase/utils'
import { chunk } from 'lodash'
runScript(async ({ pg }) => {
const directClient = createSupabaseDirectClient()
// Get all users and their corresponding lovers
const users = await directClient.manyOrNone(`
select u.id, u.data, l.twitter
from users u
left join lovers l on l.user_id = u.id
`)
log('Found', users.length, 'users to migrate')
const updates = [] as { id: string; link: {} }[]
for (const { id, data, twitter } of users) {
const add = removeUndefinedProps({
discord: data.discordHandle,
manifold: data.manifoldHandle,
x: (twitter || data.twitterHandle)
?.trim()
.replace(/^(https?:\/\/)?(www\.)?(twitter|x)(\.com\/)/, '')
.replace(/^@/, '')
.replace(/\/$/, ''),
site: data.website?.trim().replace(/^(https?:\/\/)/, ''),
})
if (Object.keys(add).length) {
updates.push({ id, link: { ...add, ...(data.link || {}) } })
}
}
// console.log('updates', updates.slice(0, 10))
// return
let count = 0
for (const u of chunk(updates, 100)) {
log('updating users ', (count += u.length))
await bulkUpdateData(pg, 'users', u)
}
log('initializing the other users')
await pg.none(
`update users
set data = jsonb_set(
data,
'{link}',
COALESCE((data -> 'link'), '{}'::jsonb),
true
)
where data -> 'link' is null`
)
})

View File

@@ -0,0 +1,11 @@
-- historical hotfix. you shouldn't need to run this
update users
set
data = jsonb_set(
data,
'{link}',
coalesce((data -> 'link'), '{}'::jsonb),
true
)
where
data -> 'link' is null

View File

@@ -0,0 +1,72 @@
import { runScript } from './run-script'
import {
renderSql,
select,
from,
where,
} from '../shared/src/supabase/sql-builder'
import { SupabaseDirectClient } from 'shared/supabase/init'
runScript(async ({ pg }) => {
const tests = [
'mention',
'contract-mention',
'tiptapTweet',
'spoiler',
'iframe',
'linkPreview',
'gridCardsComponent',
]
for (const test of tests) {
await getNodes(pg, test)
}
})
const getNodes = async (pg: SupabaseDirectClient, nodeName: string) => {
console.log(`\nSearching comments for ${nodeName}...`)
const commentQuery = renderSql(
select('id, user_id, on_user_id, content'),
from('lover_comments'),
where(`jsonb_path_exists(content, '$.**.type ? (@ == "${nodeName}")')`)
)
const comments = await pg.manyOrNone(commentQuery)
console.log(`Found ${comments.length} comments:`)
comments.forEach((comment) => {
console.log('\nComment ID:', comment.id)
console.log('From user:', comment.user_id)
console.log('On user:', comment.on_user_id)
console.log('Content:', JSON.stringify(comment.content))
})
console.log(`\nSearching private messages for ${nodeName}...`)
const messageQuery = renderSql(
select('id, user_id, channel_id, content'),
from('private_user_messages'),
where(`jsonb_path_exists(content, '$.**.type ? (@ == "${nodeName}")')`)
)
const messages = await pg.manyOrNone(messageQuery)
console.log(`Found ${messages.length} private messages:`)
messages.forEach((msg) => {
console.log('\nMessage ID:', msg.id)
console.log('From user:', msg.user_id)
console.log('Channel:', msg.channel_id)
console.log('Content:', JSON.stringify(msg.content))
})
console.log(`\nSearching profiles for ${nodeName}...`)
const users = renderSql(
select('user_id, bio'),
from('lovers'),
where(`jsonb_path_exists(bio::jsonb, '$.**.type ? (@ == "${nodeName}")')`)
)
const usersWithMentions = await pg.manyOrNone(users)
console.log(`Found ${usersWithMentions.length} users:`)
usersWithMentions.forEach((user) => {
console.log('\nUser ID:', user.user_id)
console.log('Bio:', JSON.stringify(user.bio))
})
}

View File

@@ -0,0 +1,35 @@
-- Copy data into tables
insert into
users (data, id, name, username, created_time)
select
user_data, id, name, username, created_time
from
temp_users;
insert into
private_users (data, id)
select
private_user_data, id
from
temp_users;
-- Rename temp_love_messages
-- alter table temp_love_messages
-- rename to private_user_messages;
-- alter table private_user_messages
-- alter column channel_id set not null,
-- alter column content set not null,
-- alter column created_time set not null,
-- alter column created_time set default now(),
-- alter column id set not null,
-- alter column user_id set not null,
-- alter column visibility set not null,
-- alter column visibility set default 'private';
-- alter table private_user_messages
-- alter column id add generated always as identity;
-- alter table private_user_messages
-- add constraint private_user_messages_pkey primary key (id);

View File

@@ -0,0 +1,32 @@
#!/bin/bash
(
# Set PGPASSWORD
source ../../.env
# Target database connection info - replace with your target DB
# DB_NAME="db.gxbejryrwhsmuailcdur.supabase.co" # dev
DB_NAME="db.lltoaluoavlzrgjplire.supabase.co" # prod
DB_USER="postgres"
PORT="5432"
psql -U $DB_USER -d postgres -h $DB_NAME -p $PORT -w \
-f ./love-stars-dump.sql \
# psql -U $DB_USER -d postgres -h $DB_NAME -p $PORT -w \
# -c 'drop table temp_users cascade;'
# psql -U $DB_USER -d postgres -h $DB_NAME -p $PORT -w \
# -f ./temp-users-dump.sql \
# psql -U $DB_USER -d postgres -h $DB_NAME -p $PORT -w \
# -f ../supabase/private_users.sql \
# -f ../supabase/users.sql
# psql -U $DB_USER -d postgres -h $DB_NAME -p $PORT -w \
# -f './import-love-finalize.sql'
echo "Done"
)

View File

@@ -0,0 +1,357 @@
import * as fs from 'fs/promises'
import { execSync } from 'child_process'
import { type SupabaseDirectClient } from 'shared/supabase/init'
import { runScript } from 'run-script'
const outputDir = `../supabase/`
runScript(async ({ pg }) => {
// make the output directory if it doesn't exist
execSync(`mkdir -p ${outputDir}`)
// delete all sql files except seed.sql
execSync(
`cd ${outputDir} && find *.sql -type f ! -name seed.sql -delete || true`
)
await generateSQLFiles(pg)
})
async function getTableInfo(pg: SupabaseDirectClient, tableName: string) {
const columns = await pg.manyOrNone<{
name: string
type: string
not_null: boolean
default: string | null
identity: boolean
always: 'BY DEFAULT' | 'ALWAYS'
gen: string | null
stored: 'STORED' | 'VIRTUAL'
}>(
`SELECT
column_name as name,
format_type(a.atttypid, a.atttypmod) as type,
is_nullable = 'NO' as not_null,
column_default as default,
is_identity = 'YES' as identity,
identity_generation as always,
pg_get_expr(d.adbin, d.adrelid, true) AS gen,
CASE
WHEN a.attgenerated = 's' THEN 'STORED'
WHEN a.attgenerated = 'v' THEN 'VIRTUAL'
ELSE NULL
END AS stored
FROM information_schema.columns c
LEFT JOIN pg_catalog.pg_attribute a
ON a.attrelid = c.table_name::regclass
AND a.attname = c.column_name
AND NOT a.attisdropped
JOIN pg_catalog.pg_type t ON t.oid = a.atttypid
LEFT JOIN pg_catalog.pg_attrdef d
ON d.adrelid = a.attrelid
AND d.adnum = a.attnum
WHERE table_schema = 'public' AND table_name = $1
ORDER BY column_name`,
[tableName]
)
const checks = await pg.manyOrNone<{
name: string
definition: string
}>(
`SELECT
cc.constraint_name as name,
cc.check_clause as definition
FROM information_schema.table_constraints tc
join information_schema.check_constraints cc
ON tc.constraint_schema = cc.constraint_schema
AND tc.constraint_name = cc.constraint_name
WHERE tc.constraint_type = 'CHECK'
AND NOT cc.check_clause ilike '% IS NOT NULL'
AND tc.table_schema = 'public'
AND tc.table_name = $1`,
[tableName]
)
const primaryKeys = await pg.map(
`SELECT c.column_name
FROM
information_schema.table_constraints tc
JOIN
information_schema.constraint_column_usage AS ccu
USING (constraint_schema, constraint_name)
JOIN information_schema.columns AS c
ON c.table_schema = tc.constraint_schema
AND tc.table_name = c.table_name
AND ccu.column_name = c.column_name
WHERE constraint_type = 'PRIMARY KEY' AND tc.table_schema = 'public' AND tc.table_name = $1`,
[tableName],
(row) => row.column_name as string
)
const foreignKeys = await pg.manyOrNone<{
constraint_name: string
definition: string
}>(
`SELECT
conname AS constraint_name,
pg_get_constraintdef(c.oid) AS definition
FROM
pg_constraint c
JOIN
pg_namespace n ON n.oid = c.connamespace
WHERE
contype = 'f'
AND conrelid = $1::regclass`,
[tableName]
)
const triggers = await pg.manyOrNone<{
trigger_name: string
definition: string
}>(
`SELECT
tgname AS trigger_name,
pg_get_triggerdef(t.oid) AS definition
FROM
pg_trigger t
WHERE
tgrelid = $1::regclass
AND NOT tgisinternal`,
[tableName]
)
const rlsEnabled = await pg.one(
`SELECT relrowsecurity
FROM pg_class
WHERE oid = $1::regclass`,
[tableName]
)
const rls = !!rlsEnabled.relrowsecurity
const policies = await pg.any(
`SELECT
polname AS policy_name,
pg_get_expr(polqual, polrelid) AS expression,
pg_get_expr(polwithcheck, polrelid) AS with_check,
(select r.rolname from unnest(polroles) u join pg_roles r on r.oid = u.u) AS role,
CASE
WHEN polcmd = '*' THEN 'ALL'
WHEN polcmd = 'r' THEN 'SELECT'
WHEN polcmd = 'a' THEN 'INSERT'
WHEN polcmd = 'w' THEN 'UPDATE'
WHEN polcmd = 'd' THEN 'DELETE'
ELSE polcmd::text
END AS command
FROM
pg_policy
WHERE
polrelid = $1::regclass`,
[tableName]
)
const indexes = await pg.manyOrNone<{
index_name: string
definition: string
}>(
`SELECT
indexname AS index_name,
indexdef AS definition
FROM
pg_indexes
WHERE
schemaname = 'public'
AND tablename = $1
ORDER BY
indexname`,
[tableName]
)
return {
tableName,
columns,
checks,
primaryKeys,
foreignKeys,
triggers,
rls,
policies,
indexes,
}
}
async function getFunctions(pg: SupabaseDirectClient) {
console.log('Getting functions')
const rows = await pg.manyOrNone<{
function_name: string
definition: string
}>(
`SELECT
proname AS function_name,
pg_get_functiondef(oid) AS definition
FROM pg_proc
WHERE
pronamespace = 'public'::regnamespace
and prokind = 'f'
ORDER BY proname asc, pronargs asc, oid desc`
)
return rows.filter((f) => !f.definition.includes(`'$libdir/`))
}
async function getViews(pg: SupabaseDirectClient) {
console.log('Getting views')
return pg.manyOrNone<{ view_name: string; definition: string }>(
`SELECT
table_name AS view_name,
view_definition AS definition
FROM information_schema.views
where table_schema = 'public'
ORDER BY table_name asc`
)
}
async function generateSQLFiles(pg: SupabaseDirectClient) {
const tables = await pg.map(
"SELECT tablename FROM pg_tables WHERE schemaname = 'public'",
[],
(row) => row.tablename as string
)
console.log(`Getting info for ${tables.length} tables`)
const tableInfos = await Promise.all(
tables.map((table) => getTableInfo(pg, table))
)
const functions = await getFunctions(pg)
const views = await getViews(pg)
for (const tableInfo of tableInfos) {
let content = `-- This file is autogenerated from regen-schema.ts\n\n`
content += `CREATE TABLE IF NOT EXISTS ${tableInfo.tableName} (\n`
// organize check constraints by column
const checksByColumn: {
[col: string]: { name: string; definition: string }
} = {}
const remainingChecks = []
for (const check of tableInfo.checks) {
const matches = tableInfo.columns.filter((c) =>
check.definition.includes(c.name)
)
if (matches.length === 1) {
checksByColumn[matches[0].name] = check
} else {
remainingChecks.push(check)
}
}
const pkeys = tableInfo.primaryKeys
for (const c of tableInfo.columns) {
const isSerial = c.default?.startsWith('nextval(')
if (isSerial) {
content += ` ${c.name} ${c.type === 'bigint' ? 'bigserial' : 'serial'}`
} else {
content += ` ${c.name} ${c.type}`
if (pkeys.length === 1 && pkeys[0] === c.name)
content += ` PRIMARY KEY ${tableInfo.tableName}_pkey`
if (c.default) content += ` DEFAULT ${c.default}`
else if (c.identity) content += ` GENERATED ${c.always} AS IDENTITY`
else if (c.gen) content += ` GENERATED ALWAYS AS (${c.gen}) ${c.stored}`
}
if (c.not_null) content += ' NOT NULL'
const check = checksByColumn[c.name]
if (check)
content += ` CONSTRAINT ${check.name} CHECK ${check.definition}`
content += ',\n'
}
if (pkeys.length > 1) {
content += ` CONSTRAINT PRIMARY KEY (${pkeys.join(', ')}),\n`
}
for (const check of remainingChecks) {
content += ` CONSTRAINT ${check.name} CHECK ${check.definition},\n`
}
// remove the trailing comma
content = content.replace(/,(?=[^,]+$)/, '')
content += ');\n\n'
if (tableInfo.foreignKeys.length > 0) content += `-- Foreign Keys\n`
for (const fk of tableInfo.foreignKeys) {
content += `ALTER TABLE ${tableInfo.tableName} ADD CONSTRAINT ${fk.constraint_name} ${fk.definition};\n`
}
content += '\n'
const tableFunctions = []
if (tableInfo.triggers.length > 0) content += `-- Triggers\n`
for (const trigger of tableInfo.triggers) {
content += `${trigger.definition};\n`
const funcName = trigger.definition.match(/execute function (\w+)/i)?.[1]
if (funcName) tableFunctions.push(funcName)
}
content += '\n'
if (tableFunctions.length > 0) content += `-- Functions\n`
for (const func of tableFunctions) {
const i = functions.findIndex((f) => f.function_name === func)
if (i >= 0) {
content += `${functions[i].definition};\n\n`
functions.splice(i, 1) // remove from list so we don't duplicate
}
}
if (tableInfo.rls) {
content += `-- Row Level Security\n`
content += `ALTER TABLE ${tableInfo.tableName} ENABLE ROW LEVEL SECURITY;\n`
}
if (tableInfo.policies.length > 0) {
content += `-- Policies\n`
}
for (const policy of tableInfo.policies) {
content += `DROP POLICY IF EXISTS "${policy.policy_name}" ON ${tableInfo.tableName};\n`
content += `CREATE POLICY "${policy.policy_name}" ON ${tableInfo.tableName} `
if (policy.command) content += `FOR ${policy.command} `
if (policy.role) content += `TO ${policy.role} `
if (policy.expression) content += `USING (${policy.expression}) `
if (policy.with_check) content += `WITH CHECK (${policy.with_check})`
content += ';\n\n'
}
if (tableInfo.indexes.length > 0) content += `-- Indexes\n`
for (const index of tableInfo.indexes) {
content += `DROP INDEX IF EXISTS ${index.index_name};\n`
content += `${index.definition};\n`
}
content += '\n'
await fs.writeFile(`${outputDir}/${tableInfo.tableName}.sql`, content)
}
console.log('Writing remaining functions to functions.sql')
let functionsContent = `-- This file is autogenerated from regen-schema.ts\n\n`
for (const func of functions) {
functionsContent += `${func.definition};\n\n`
}
await fs.writeFile(`${outputDir}/functions.sql`, functionsContent)
console.log('Writing views to views.sql')
let viewsContent = `-- This file is autogenerated from regen-schema.ts\n\n`
for (const view of views) {
viewsContent += `CREATE OR REPLACE VIEW ${view.view_name} AS\n`
viewsContent += `${view.definition}\n\n`
}
await fs.writeFile(`${outputDir}/views.sql`, viewsContent)
console.log('Prettifying SQL files...')
execSync(
`prettier --write ${outputDir}/*.sql --ignore-path ../supabase/.gitignore`
)
}

View File

@@ -0,0 +1,76 @@
import { runScript } from './run-script'
import {
renderSql,
select,
from,
where,
} from '../shared/src/supabase/sql-builder'
import { type JSONContent } from '@tiptap/core'
const removeNodesOfType = (
content: JSONContent,
typeToRemove: string
): JSONContent | null => {
if (content.type === typeToRemove) {
return null
}
if (content.content) {
const newContent = content.content
.map((node) => removeNodesOfType(node, typeToRemove))
.filter((node) => node != null)
return { ...content, content: newContent }
}
// No content to process, return node as is
return content
}
runScript(async ({ pg }) => {
const nodeType = 'linkPreview'
console.log('\nSearching comments for linkPreviews...')
const commentQuery = renderSql(
select('id, content'),
from('lover_comments'),
where(`jsonb_path_exists(content, '$.**.type ? (@ == "${nodeType}")')`)
)
const comments = await pg.manyOrNone(commentQuery)
console.log(`Found ${comments.length} comments with linkPreviews`)
for (const comment of comments) {
const newContent = removeNodesOfType(comment.content, nodeType)
console.log('before', comment.content)
console.log('after', newContent)
await pg.none('update lover_comments set content = $1 where id = $2', [
newContent,
comment.id,
])
console.log('Updated comment:', comment.id)
}
console.log('\nSearching private messages for linkPreviews...')
const messageQuery = renderSql(
select('id, content'),
from('private_user_messages'),
where(`jsonb_path_exists(content, '$.**.type ? (@ == "${nodeType}")')`)
)
const messages = await pg.manyOrNone(messageQuery)
console.log(`Found ${messages.length} messages with linkPreviews`)
for (const msg of messages) {
const newContent = removeNodesOfType(msg.content, nodeType)
console.log('before', JSON.stringify(msg.content, null, 2))
console.log('after', JSON.stringify(newContent, null, 2))
await pg.none(
'update private_user_messages set content = $1 where id = $2',
[newContent, msg.id]
)
console.log('Updated message:', msg.id)
}
})

View File

@@ -0,0 +1,22 @@
import { getLocalEnv, initAdmin } from 'shared/init-admin'
import { getServiceAccountCredentials, loadSecretsToEnv } from 'common/secrets'
import {
createSupabaseDirectClient,
type SupabaseDirectClient,
} from 'shared/supabase/init'
initAdmin()
export const runScript = async (
main: (services: { pg: SupabaseDirectClient }) => Promise<any> | any
) => {
const env = getLocalEnv()
const credentials = getServiceAccountCredentials(env)
await loadSecretsToEnv(credentials)
const pg = createSupabaseDirectClient()
await main({ pg })
process.exit()
}

View File

@@ -0,0 +1,29 @@
{
"compilerOptions": {
"baseUrl": "./",
"composite": true,
"module": "commonjs",
"noImplicitReturns": true,
"outDir": "./lib",
"strict": true,
"target": "esnext",
"esModuleInterop": true,
"paths": {
"common/*": ["../../common/src/*", "../../../common/lib/*"],
"api/*": ["../api/src/*", "../../api/lib/*"],
"shared/*": ["../shared/src/*", "../../shared/lib/*"],
"email/*": ["../email/emails/*", "../../email/lib/*"],
"scripts/*": ["./src/*"]
}
},
"ts-node": {
"require": ["tsconfig-paths/register"]
},
"references": [
{ "path": "../../common" },
{ "path": "../shared" },
{ "path": "../api" },
{ "path": "../email" }
],
"compileOnSave": true
}

View File

@@ -0,0 +1,50 @@
module.exports = {
plugins: ['lodash', 'unused-imports'],
extends: ['eslint:recommended'],
ignorePatterns: ['dist', 'lib'],
env: {
node: true,
},
overrides: [
{
files: ['**/*.ts'],
plugins: ['@typescript-eslint'],
extends: ['plugin:@typescript-eslint/recommended'],
parser: '@typescript-eslint/parser',
parserOptions: {
tsconfigRootDir: __dirname,
project: ['./tsconfig.json'],
},
rules: {
'@typescript-eslint/ban-types': [
'error',
{
extendDefaults: true,
types: {
'{}': false,
},
},
],
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-extra-semi': 'off',
'@typescript-eslint/no-unused-vars': [
'warn',
{
argsIgnorePattern: '^_',
varsIgnorePattern: '^_',
caughtErrorsIgnorePattern: '^_',
},
],
'unused-imports/no-unused-imports': 'warn',
'no-constant-condition': 'off',
},
},
],
rules: {
'linebreak-style': [
'error',
process.platform === 'win32' ? 'windows' : 'unix',
],
'lodash/import-scope': [2, 'member'],
},
}

Some files were not shown because too many files have changed in this diff Show More