Merge pull request #3 from spacedriveapp/jamie-unsorted-changes

This commit is contained in:
Jamie Pine
2022-05-12 15:39:03 -07:00
committed by GitHub
130 changed files with 2440 additions and 1125 deletions

View File

@@ -1,2 +1,8 @@
[alias]
prisma = "run -p prisma-cli --"
prisma = "run -p prisma-cli --"
[target.x86_64-apple-darwin]
rustflags = [
"-C", "link-arg=-undefined",
"-C", "link-arg=dynamic_lookup",
]

View File

@@ -1,36 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: '[BUG] Give a suitable title'
labels: 'bug'
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. macOS, Windows, Linux, iOS, watchOS, Android]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

61
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@@ -0,0 +1,61 @@
name: 🐞 Bug Report
description: Report a bug
labels:
- kind/bug
- status/needs-triage
body:
- type: markdown
attributes:
value: |
## First of all
1. Please search for [existing issues](https://github.com/spacedriveapp/spacedrive/issues?q=is%3Aissue) about this problem first.
2. Make sure you run have the latest version of Rust (`rustup update`) and PNPM (`pnpm add -g pnpm`) along with all relevant Spacedrive dependencies.
3. Make sure it's an issue with Spacedrive and not something else you are using.
4. Remember to follow our community guidelines and be friendly.
- type: textarea
id: description
attributes:
label: Describe the bug
description: A clear description of what the bug is. Include screenshots if applicable.
placeholder: Bug description
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: Reproduction
description: Steps to reproduce the behavior.
placeholder: |
1. Go to ...
2. Click on ...
3. See error
- type: textarea
id: expected-behavior
attributes:
label: Expected behavior
description: A clear description of what you expected to happen.
- type: textarea
id: info
attributes:
label: Platform and versions
description: "Please include the output of `pnpm --version && cargo --version && rustc --version` along with information about your Operating System such as version and/or specific distribution if revelant."
render: shell
validations:
required: true
- type: textarea
id: logs
attributes:
label: Stack trace
render: shell
- type: textarea
id: context
attributes:
label: Additional context
description: Add any other context about the problem here.

11
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: 🙏 Get Help
url: https://github.com/spacedriveapp/spacedrive/discussions/new?category=help
about: If you can't get something to work the way you expect, open a question in our discussion forums.
- name: 💡 Feature Request
url: https://github.com/spacedriveapp/spacedrive/discussions/new?category=ideas
about: Suggest any ideas you have using our discussion forums.
- name: 💬 Discord Chat
url: https://discord.gg/gTaF2Z44f5
about: Ask questions and talk to other Spacedrive users and the maintainers

View File

@@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: '[FEATURE] Give a suitable title'
labels: 'enhancement'
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -1,4 +1,4 @@
name: Build and Publish Server
name: Build Server Image
description: Builds and publishes the docker image for the Spacedrive server
inputs:
gh_token:
@@ -34,10 +34,15 @@ runs:
echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV
echo "Building $IMAGE_NAME:$IMAGE_TAG"
- name: Build & push Docker image
- name: Build Docker image
shell: bash
run: |
docker build ./apps/server --tag $IMAGE_NAME:$IMAGE_TAG
- name: Push Docker image
shell: bash
if: github.event_name != 'pull_request'
run: |
docker push $IMAGE_NAME:$IMAGE_TAG
- name: Tag & push image as latest staging image

View File

@@ -0,0 +1,44 @@
name: Install dependencies
description: Installs OS-specific dependencies for builds
runs:
using: "composite"
steps:
- name: Install ffmpeg & tauri deps (Ubuntu)
if: matrix.platform == 'ubuntu-latest'
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
libssl-dev \
libavcodec-dev \
libavdevice-dev \
libavfilter-dev \
libavformat-dev \
libavresample-dev \
libavutil-dev \
libswscale-dev \
libswresample-dev \
ffmpeg \
libgtk-3-dev \
webkit2gtk-4.0 \
libappindicator3-dev \
librsvg2-dev \
patchelf
- name: Install ffmpeg (macOS)
if: matrix.platform == 'macos-latest'
shell: bash
run: brew install ffmpeg
- name: Install vcpkg & ffmpeg (Windows)
if: matrix.platform == 'windows-latest'
shell: powershell
run: |
$VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath)
Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n"
Invoke-WebRequest "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-full-shared.7z" -OutFile ffmpeg-release-full-shared.7z
7z x ffmpeg-release-full-shared.7z
mkdir ffmpeg
mv ffmpeg-*/* ffmpeg/
Add-Content $env:GITHUB_ENV "FFMPEG_DIR=${pwd}\ffmpeg`n"
Add-Content $env:GITHUB_PATH "${pwd}\ffmpeg\bin`n"

View File

@@ -0,0 +1,5 @@
name: 'Install ffmpeg macOS'
description: 'Installs ffmpeg with caching for macOS'
runs:
using: 'node16'
main: 'index.js'

View File

@@ -0,0 +1,8 @@
// @ts-check
const core = require('@actions/core');
const exec = require('@actions/exec');
const github = require('@actions/github');
// const folders =
exec.exec('brew', ['install', 'ffmpeg']);

View File

@@ -0,0 +1,17 @@
{
"name": "install-ffmpeg-macos",
"version": "0.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "Brendan Allan",
"license": "ISC",
"dependencies": {
"@actions/core": "^1.6.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^5.0.1"
}
}

View File

@@ -0,0 +1,36 @@
name: Publish desktop artifacts
description: Publishes desktop artifacts after Tauri build
runs:
using: 'composite'
steps:
- name: Make AppImage executable
if: matrix.platform == 'ubuntu-latest'
shell: bash
run: chmod +x ./target/release/bundle/appimage/spacedrive*.AppImage
- name: Determine short GitHub SHA
shell: bash
run: |
export GITHUB_SHA_SHORT=$(git rev-parse --short "$GITHUB_SHA")
echo "GITHUB_SHA_SHORT=$GITHUB_SHA_SHORT" >> $GITHUB_ENV
- name: Publish artifacts (AppImage)
if: matrix.platform == 'ubuntu-latest'
uses: actions/upload-artifact@v3
with:
name: Spacedrive-AppImage-${{ env.GITHUB_SHA_SHORT }}
path: ./target/release/bundle/appimage/spacedrive*.AppImage
- name: Publish artifacts (Windows)
if: matrix.platform == 'windows-latest'
uses: actions/upload-artifact@v3
with:
name: Spacedrive-Windows-${{ env.GITHUB_SHA_SHORT }}
path: .\target\release\bundle\msi\*.msi
- name: Publish artifacts (macOS)
if: matrix.platform == 'macos-latest'
uses: actions/upload-artifact@v3
with:
name: Spacedrive-macOS-${{ env.GITHUB_SHA_SHORT }}
path: ./target/release/bundle/macos/*.app

10
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,10 @@
<!-- Put any information about this PR up here -->
<!-- Which issue does this PR close? -->
<!-- If this PR does not have a corresponding issue,
make sure one gets created before you create this PR.
You can create a bug report or feature request at
https://github.com/spacedriveapp/spacedrive/issues/new/choose -->
Closes #(issue)

62
.github/scripts/setup-system.sh vendored Executable file
View File

@@ -0,0 +1,62 @@
#!/bin/bash
echo "Setting up your system for Spacedrive development!"
which cargo &> /dev/null
if [ $? -eq 1 ]; then
echo "Rust was not detected on your system. Ensure the 'rustc' and 'cargo' binaries are in your \$PATH."
exit 1
fi
which pnpm &> /dev/null
if [ $? -eq 1 ]; then
echo "PNPM was not detected on your system. Ensure the 'pnpm' command is in your \$PATH. You are **not** able to use Yarn or NPM."
exit 1
fi
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
if which apt-get &> /dev/null; then
echo "Detected 'apt' based distro!"
DEBIAN_TAURI_DEPS="libwebkit2gtk-4.0-dev build-essential curl wget libssl-dev libgtk-3-dev libappindicator3-dev librsvg2-dev" # Tauri dependencies
DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavresample-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies
DEBIAN_BINDGEN_DEPS="pkg-config clang" # Bindgen dependencies - it's used by a dependency of Spacedrive
sudo apt-get -y update
sudo apt-get -y install $DEBIAN_TAURI_DEPS $DEBIAN_FFMPEG_DEPS $DEBIAN_BINDGEN_DEPS
elif which pacman &> /dev/null; then
echo "Detected 'pacman' based distro!"
sudo pacman -S --needed webkit2gtk base-devel curl wget openssl appmenu-gtk-module gtk3 libappindicator-gtk3 librsvg libvips
ARCH_TAURI_DEPS="libwebkit2gtk-4.0-dev build-essential curl wget libssl-dev libgtk-3-dev libappindicator3-dev librsvg2-dev" # Tauri dependencies
ARCH_FFMPEG_DEPS="" # FFMPEG dependencies # TODO
ARCH_BINDGEN_DEPS="clang" # Bindgen dependencies - it's used by a dependency of Spacedrive
sudo pacman -Syu
sudo pacman -S --needed $ARCH_TAURI_DEPS $ARCH_FFMPEG_DEPS $ARCH_BINDGEN_DEPS
# TODO: Remove warning
echo "The FFMPEG dependencies are not yet included in this script for your Linux Distro. Please install them manually."
echo "It would also be greatly appreciated if you could ping @oscartbeaumont in the Discord or GitHub so that you can help me work these out and update the script for everyone."
exit 1
elif which dnf &> /dev/null; then
echo "Detected 'dnf' based distro!"
FEDORA_TAURI_DEPS="webkit2gtk3-devel.x86_64 openssl-devel curl wget libappindicator-gtk3 librsvg2-devel" # Tauri dependencies
FEDORA_FFMPEG_DEPS="ffmpeg ffmpeg-devel" # FFMPEG dependencies
FEDORA_BINDGEN_DEPS="clang" # Bindgen dependencies - it's used by a dependency of Spacedrive
sudo dnf check-update
sudo dnf install $FEDORA_TAURI_DEPS $FEDORA_FFMPEG_DEPS $FEDORA_BINDGEN_DEPS
sudo dnf group install "C Development Tools and Libraries"
else
echo "Your Linux distro '$(lsb_release -s -d)' is not supported by this script. We would welcome a PR or some help adding your OS to this script. https://github.com/spacedriveapp/spacedrive/issues"
exit 1
fi
echo "Your machine has been setup for Spacedrive development!"
elif [[ "$OSTYPE" == "darwin"* ]]; then
brew install ffmpeg
else
echo "Your OS '$OSTYPE' is not supported by this script. We would welcome a PR or some help adding your OS to this script. https://github.com/spacedriveapp/spacedrive/issues"
exit 1
fi

View File

@@ -1,51 +1,86 @@
name: CI
on:
pull_request:
push:
branches:
- main
- ci
- new-ci
paths-ignore:
- '**/.md'
workflow_dispatch:
env:
CARGO_INCREMENTAL: 1
jobs:
build:
strategy:
fail-fast: false
matrix:
platform: [macos-latest, ubuntu-latest, windows-latest]
env:
FFMPEG_DOWNLOAD_URL: https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-full-shared.7z
RUST_CACHE_VERSION: 0
runs-on: ${{ matrix.platform }}
typescript:
name: TypeScript
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# from https://github.com/zmwangx/rust-ffmpeg/blob/master/.github/workflows/build.yml
- name: Install ffmpeg (Windows)
if: matrix.platform == 'windows-latest'
run: |
$VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath)
Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n"
Invoke-WebRequest "${env:FFMPEG_DOWNLOAD_URL}" -OutFile ffmpeg-release-full-shared.7z
7z x ffmpeg-release-full-shared.7z
mkdir ffmpeg
mv ffmpeg-*/* ffmpeg/
Add-Content $env:GITHUB_ENV "FFMPEG_DIR=${pwd}\ffmpeg`n"
Add-Content $env:GITHUB_PATH "${pwd}\ffmpeg\bin`n"
- name: Install CMake (Windows)
uses: lukka/get-cmake@latest
if: matrix.platform == 'windows-latest'
# Optimisation for windows
- name: Rename existing rust toolchain (Windows)
if: matrix.platform == 'windows-latest'
run: Rename-Item C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc.old
- name: Checkout repository
uses: actions/checkout@v2
- name: Setup Node
uses: actions/setup-node@v1
with:
node-version: 16
node-version: 17
- name: Install pnpm
uses: pnpm/action-setup@v2.2.1
with:
version: 7.x.x
- name: Install dependencies
run: pnpm i --frozen-lockfile
- name: Perform typechecks
run: pnpm -r exec tsc
build-js:
name: Build JS
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Setup Node
uses: actions/setup-node@v1
with:
node-version: 17
- name: Install pnpm
uses: pnpm/action-setup@v2.2.1
with:
version: 7.x.x
- name: Install dependencies
run: pnpm i --frozen-lockfile
- name: Build Desktop
run: pnpm desktop build
- name: Build Web
run: pnpm web build
build-core:
name: Build Core (${{ matrix.platform }})
runs-on: ${{ matrix.platform }}
strategy:
fail-fast: true
matrix:
platform: [ubuntu-latest, macos-latest, windows-latest]
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Install dependencies
uses: ./.github/actions/install-deps
- name: Setup Node
uses: actions/setup-node@v1
with:
node-version: 17
- name: Install Rust stable
uses: actions-rs/toolchain@v1
@@ -56,74 +91,110 @@ jobs:
components: rustfmt, rust-src
- name: Cache Rust Dependencies
uses: Swatinem/rust-cache@cb2cf0cc7c5198d3364b9630e2c3d457f160790c
uses: Swatinem/rust-cache@v1
with:
sharedKey: ${{ env.RUST_CACHE_VERSION }}
sharedKey: core-v1-${{ hashFiles('**/Cargo.lock') }}
- name: Cache pnpm dependencies
uses: actions/cache@v2
- name: Generate Prisma client
working-directory: core
run: cargo run -p prisma-cli --release -- generate
- name: Build Core
run: cargo build -p sdcore --release
package-desktop:
name: Package desktop (${{ matrix.platform }})
runs-on: ${{ matrix.platform }}
needs: [typescript, build-js, build-core]
strategy:
matrix:
platform: [ubuntu-latest, macos-latest, windows-latest]
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Install dependencies
uses: ./.github/actions/install-deps
- name: Setup Node
uses: actions/setup-node@v1
with:
path: ~/.pnpm-store
key: ${{ runner.os }}-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-
node-version: 17
- name: Install Rust stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
components: rustfmt, rust-src
- name: Cache Rust Dependencies
uses: Swatinem/rust-cache@v1
with:
sharedKey: core-v1-${{ hashFiles('**/Cargo.lock') }}
- name: Install pnpm
uses: pnpm/action-setup@v2.2.1
with:
version: 6.32.6
- name: Install dependencies (Ubuntu)
if: matrix.platform == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
libgtk-3-dev \
webkit2gtk-4.0 \
libappindicator3-dev \
librsvg2-dev \
patchelf \
libssl-dev \
libavcodec-dev \
libavdevice-dev \
libavfilter-dev \
libavformat-dev \
libavresample-dev \
libavutil-dev \
libswscale-dev \
libswresample-dev \
pkg-config \
ffmpeg
- name: Install dependencies (macOS)
if: matrix.platform == 'macos-latest'
run: |
brew install ffmpeg
version: 7.x.x
- name: Install pnpm dependencies
run: pnpm i
- name: Build codegen
run: pnpm prep:ci
run: pnpm i --frozen-lockfile
- name: Build frontend
run: pnpm desktop build:vite
run: pnpm desktop build
- name: Build Tauri app
uses: tauri-apps/tauri-action@v0
- name: Generate Prisma client
working-directory: core
run: cargo run -p prisma-cli --release -- generate
- name: Bundle
run: pnpm desktop tauri build
- name: Publish artifacts
uses: ./.github/actions/publish-desktop-artifacts
build-server:
name: Build server
runs-on: ubuntu-latest
needs: build-core
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Install dependencies
uses: ./.github/actions/install-deps
- name: Install Rust stable
uses: actions-rs/toolchain@v1
with:
projectPath: apps/desktop
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
toolchain: stable
profile: minimal
override: true
components: rustfmt, rust-src
- name: Build and publish server
if: matrix.platform == 'ubuntu-latest'
uses: ./.github/actions/build-and-publish-server
- name: Cache Rust Dependencies
uses: Swatinem/rust-cache@v1
with:
sharedKey: core-v1-${{ hashFiles('**/Cargo.lock') }}
- name: Generate Prisma client
working-directory: core
run: cargo run -p prisma-cli --release -- generate
- name: Build server image
uses: ./.github/actions/build-server-image
with:
gh_token: ${{ secrets.GITHUB_TOKEN }}
deploy-server:
name: Deploy Server
runs-on: ubuntu-latest
needs: build-server
if: github.event_name != 'pull_request'
steps:
- name: Deploy Spacedrive Server to Kubernetes
if: matrix.platform == 'ubuntu-latest'
env:
K8S_KUBECONFIG: ${{ secrets.K8S_KUBECONFIG }}
run: |

View File

@@ -23,6 +23,6 @@ jobs:
source_file: 'README.md'
destination_repo: 'spacedriveapp/.github'
destination_folder: 'profile'
user_email: 'actions@spacedrive.app'
user_email: 'actions@spacedrive.com'
user_name: 'GH Actions'
commit_message: 'Update README'

View File

@@ -2,6 +2,7 @@
"cSpell.words": [
"actix",
"bpfrpt",
"consts",
"creationdate",
"ipfs",
"Keepsafe",
@@ -17,6 +18,9 @@
"tsparticles",
"upsert"
],
"[rust]": {
"editor.defaultFormatter": "matklad.rust-analyzer"
},
"rust-analyzer.procMacro.enable": true,
"rust-analyzer.diagnostics.enableExperimental": false,
"rust-analyzer.inlayHints.parameterHints": false,

133
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,133 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[hello@jamiepine.com](mailto:hello@jamiepine.com).
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
[https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available
at [https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations

80
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,80 @@
# Welcome to the Spacedrive contributing guide
Thank you for investing your time in contributing to our project!
Read our [Code of Conduct](./CODE_OF_CONDUCT.md) to keep our community approachable and respectable.
In this guide you will get an overview of the contribution workflow from opening an issue, creating a PR, reviewing, and merging the PR.
## New contributor guide
To get an overview of the project, read the [README](README.md). Here are some resources to help you get started with open source contributions:
- [Finding ways to contribute to open source on GitHub](https://docs.github.com/en/get-started/exploring-projects-on-github/finding-ways-to-contribute-to-open-source-on-github)
- [Set up Git](https://docs.github.com/en/get-started/quickstart/set-up-git)
- [GitHub flow](https://docs.github.com/en/get-started/quickstart/github-flow)
- [Collaborating with pull requests](https://docs.github.com/en/github/collaborating-with-pull-requests)
- [Your First Tauri App](https://tauri.studio/guides/getting-started/beginning-tutorial)
- [pnpm CLI](https://pnpm.io/pnpm-cli)
## Getting started
### Issues
#### Create a new issue
If you find an issue with the repository or have a feature request with Spacedrive, [search if an issue already exists](https://docs.github.com/en/github/searching-for-information-on-github/searching-on-github/searching-issues-and-pull-requests#search-by-the-title-body-or-comments). If a related issue doesn't exist, you can open a new issue using a relevant [issue form](https://github.com/spacedriveapp/spacedrive/issues/new/choose).
#### Solve an issue
Scan through our [existing issues](https://github.com/spacedriveapp/spacedrive/issues) to find one that interests you. You can narrow down the search using `labels` as filters. See [Labels](https://github.com/spacedriveapp/spacedrive/labels) for more information. As a general rule. If you find an issue to work on, you are welcome to open a PR with a fix.
### Make Changes
#### Make changes locally
This project uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/installation.html) and [pnpm](https://pnpm.io/installation). Ensure you have them installed before continuing.
> Note: MacOS M1 users should choose the customize option in the rustup init script and enter `x86_64-apple-darwin` as the default host triple instead of the default `aarch64-apple-darwin`
- `$ git clone https://github.com/spacedriveapp/spacedrive`
- For Linux or MacOS users run: `chmod +x ./.github/scripts/setup-system.sh && ./.github/scripts/setup-system.sh`
- This will install FFMPEG and any other required dependencies for Spacedrive to build.
- `$ cd spacedrive`
- `$ pnpm i`
- `$ pnpm prep` - Runs all necessary codegen & builds required dependencies.
To quickly run only the desktop app after `prep` you can use:
- `$ pnpm desktop dev`
To run the landing page
- `$ pnpm web dev` - runs the web app for the embed
- `$ pnpm landing dev`
If you are having issues ensure you are using the following versions of Rust and Node:
- Rust version: **1.60.0**
- Node version: **17**
### Pull Request
When you're finished with the changes, create a pull request, also known as a PR.
- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
- Don't forget to [link PR to issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) if you are solving one.
- Enable the checkbox to [allow maintainer edits](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/allowing-changes-to-a-pull-request-branch-created-from-a-fork) so the branch can be updated for a merge.
Once you submit your PR, a team member will review your proposal. We may ask questions or request for additional information.
- We may ask for changes to be made before a PR can be merged, either using [suggested changes](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/incorporating-feedback-in-your-pull-request) or pull request comments. You can apply suggested changes directly through the UI. You can make any other changes in your fork, then commit them to your branch.
- As you update your PR and apply changes, mark each conversation as [resolved](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request#resolving-conversations).
- If you run into any merge issues, checkout this [git tutorial](https://lab.github.com/githubtraining/managing-merge-conflicts) to help you resolve merge conflicts and other issues.
### Your PR is merged!
Congratulations :tada::tada: The Spacedrive team thanks you :sparkles:.
Once your PR is merged, your contributions will be included in the next release of the application.
### Credits
This CONTRIBUTING.md file was modelled after the [github/docs CONTRIBUTING.md](https://github.com/github/docs/blob/main/CONTRIBUTING.md) file, and we thank the original author.

BIN
Cargo.lock generated
View File

Binary file not shown.

View File

@@ -7,9 +7,9 @@
</p>
<h1 align="center"><b>Spacedrive</b></h1>
<p align="center">
The universal file manager.
A file explorer from the future.
<br />
<a href="https://spacedrive.app"><strong>spacedrive.app »</strong></a>
<a href="https://spacedrive.com"><strong>spacedrive.com »</strong></a>
<br />
<br />
<b>Download for </b>
@@ -29,17 +29,18 @@
</p>
</p>
Spacedrive is an open source cross-platform file manager, powered by a virtual distributed filesystem (<a href="#what-is-a-vdfs">VDFS</a>) written in Rust.
<!-- <a href="https://spacedrive.app"><strong>Learn more »</strong></a> -->
<br/>
<br/>
Organize files across many devices in one place. From cloud services to offline hard drives, Spacedrive combines the storage capacity and processing power of your devices into one personal distributed cloud, that is both secure and intuitive to use.
<br />
<br />
For independent creatives, hoarders and those that want to own their digital footprint. Spacedrive provides a file management experience like no other, and its completely free.
<br />
<br />
> NOTE: Spacedrive is under active development, most of the listed features are still experimental and subject to change.
Organize files across many devices in one place. From cloud services to offline hard drives, Spacedrive combines the storage capacity and processing power of your devices into one personal distributed cloud, that is both secure and intuitive to use.
For independent creatives, hoarders and those that want to own their digital footprint. Spacedrive provides a file management experience like no other, and it's completely free.
<p align="center">
<img src="https://raw.githubusercontent.com/spacedriveapp/.github/main/profile/spacedrive_screenshot.jpg" alt="Logo">
<img src="https://raw.githubusercontent.com/spacedriveapp/.github/main/profile/app.png" alt="Logo">
<br />
<br />
<a href="https://discord.gg/gTaF2Z44f5">
@@ -57,7 +58,7 @@ For independent creatives, hoarders and those that want to own their digital foo
<br />
</p>
> NOTE: Spacedrive is under active development, most of the listed features are still experimental and subject to change.
> NOTE: Spacedrive is under active development, most of the listed features are still experimental and subject to change. Additionally, most of the links on this page are broken but will be working once the repository is made public.
# What is a VDFS?
@@ -70,31 +71,42 @@ The first implementation of a VDFS can be found in this UC Berkeley [paper](http
Many of us have multiple cloud accounts, drives that arent backed up and data at risk of loss. We depend on cloud services like Google Photos and iCloud, but are locked in with limited capacity and almost zero interoperability between services and operating systems. Photo albums shouldnt be stuck in a device ecosystem, or harvested for advertising data. They should be OS agnostic, permanent and personally owned. Data we create is our legacy, that will long outlive us—open source technology is the only way to ensure we retain absolute control over the data that defines our lives, at unlimited scale.
# Features
Feature list moved to the [roadmap](docs/product/roadmap.md).
# Developer Installation Instructions
_Note: Links are for highlight purposes only until feature specific documentation is complete._
This environment uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/installation.html) and [pnpm](https://pnpm.io/installation). Ensure you have them installed before continuing.
**Complete:** _(in testing)_
- `$ git clone https://github.com/spacedriveapp/spacedrive`
- IMPORTANT: _Install [FFMPEG](https://www.ffmpeg.org/download.html) if you don't have it already_
- `$ cd spacedrive`
- `$ pnpm i`
- `$ pnpm prep` - Runs all necessary codegen & builds required dependencies.
- **[File discovery](#features)** - Scan devices, drives and cloud accounts to build a directory of all files with metadata.
- **[Preview generation](#features)** - Auto generate lower resolution stand-ins for image and video.
- **[Statistics](#features)** - Total capacity, index size, preview media size, free space etc.
To quickly run only the desktop app after `prep` you can use:
**In progress:**
- `$ pnpm desktop dev`
- **[File Explorer](#features)** - Browse online/offline storage locations, view files with metadata, perform basic CRUD.
- **[Realtime synchronization](#features)** - Data index synchronized in realtime between devices, prioritizing peer-to-peer LAN connections (WiFi sync).
To run the landing page
**To be developed (MVP):**
- `$ pnpm web dev` - runs the web app for the embed
- `$ pnpm landing dev`
- **[Photos](#features)** - Photo and video albums similar to Apple/Google photos.
- **[Search](#features)** - Deep search into your filesystem with a keybind, including offline locations.
- **[Tags](#features)** - Define routines on custom tags to automate workflows, easily tag files individually, in bulk and automatically via rules.
- **[Extensions](#features)** - Build tools on top of Spacedrive, extend functionality and integrate third party services. Extension directory on [spacedrive.com/extensions](#features).
If you are having issues ensure you are using the following versions of Rust and Node:
**To be developed (Post-MVP):**
- Rust version: **1.58.1**
- Node version: **17**
- **[Cloud integration](#features)** - Index & backup to Apple Photos, Google Drive, Dropbox, OneDrive & Mega + easy API for the community to add more.
- **[Encrypted vault(s)](#features)** - Effortlessly manage & encrypt sensitive files, built on top of VeraCrypt. Encrypt individual files or create flexible-size vaults.
- **[Key manager](#features)** - View, mount, dismount and hide keys. Mounted keys automatically unlock respective areas of your filesystem.
- **[Redundancy Goal](#features)** - Ensure a specific amount of copies exist for your important data, discover at-risk files and monitor device/drive health.
- **[Timeline](#features)** - View a linear timeline of content, travel to any time and see media represented visually.
- **[Media encoder](#features)** - Encode video and audio into various formats, use Tags to automate. Built with FFMPEG.
- **[Workers](#features)** - Utilize the compute power of your devices in unison to encode and perform tasks at increased speeds.
- **[Spacedrive Cloud](#features)** - We'll host an always-on cloud device for you, with pay-as-you-go plans for storage.
- **[Self hosted](#features)** - Spacedrive can be deployed as a service, behaving as just another device powering your personal cloud.
# Developer Guide
Please refer to the [contributing guide](CONTRIBUTING.md) for how to install Spacedrive from sources.
# Architecture
@@ -103,16 +115,15 @@ This project is using what I'm calling the **"PRRTT"** stack (Prisma, Rust, Reac
- Prisma on the front-end? 🤯 Made possible thanks to [prisma-client-rust](https://github.com/brendonovich/prisma-client-rust), developed by [Brendonovich](https://github.com/brendonovich). Gives us access to the powerful migration CLI in development, along with the Prisma syntax for our schema. The application bundles with the Prisma query engine and codegen for a beautiful Rust API. Our lightweight migration runner is custom built for a desktop app context.
- Tauri allows us to create a pure Rust native OS webview, without the overhead of your average Electron app. This brings the bundle size and average memory usage down dramatically. It also contributes to a more native feel, especially on macOS due to Safari's close integration with the OS.
- The core (`sdcore`) is written in pure Rust.
- Typesafe communication for an RPC-like message passing system between Rust and React Query.
## Monorepo structure:
### Apps:
- `desktop`: A [Tauri](https://tauri.studio) app with embedded `sdcore` Rust binary.
- `mobile`: A [React Native](https://reactnative.dev/) app with embedded `sdcore` Rust binary.
- `web`: A [React](https://reactjs.org) webapp as a light wrapper around the `interface` with a websocket Transport.
- `landing`: A [React](https://reactjs.org) app using Vite pages, Tailwind Typography
- `desktop`: A [Tauri](https://tauri.studio) app.
- `mobile`: A [React Native](https://reactnative.dev/) app.
- `web`: A [React](https://reactjs.org) webapp.
- `landing`: A [React](https://reactjs.org) app using Vite SSR & Vite pages.
### Core:

0
apps/desktop/dist/.placeholder vendored Normal file
View File

View File

@@ -8,11 +8,7 @@
"vite": "vite",
"dev": "concurrently \"pnpm tauri dev\" \"vite\"",
"tauri": "tauri",
"build:vite": "vite build",
"build": "vite build && tauri build"
},
"resolutions": {
"react-virtualized": "patch:react-virtualized@9.22.3#./path/to/react-virtualized-9.22.3.patch"
"build": "vite build"
},
"dependencies": {
"@sd/client": "workspace:*",
@@ -25,12 +21,12 @@
},
"devDependencies": {
"@tauri-apps/cli": "^1.0.0-rc.8",
"@tauri-apps/tauricon": "github:tauri-apps/tauricon",
"@types/babel-core": "^6.25.7",
"@types/byte-size": "^8.1.0",
"@types/react": "^18.0.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@types/react-router-dom": "^5.3.3",
"@types/react-virtualized-auto-sizer": "^1.0.1",
"@types/react-window": "^1.8.5",
"@types/tailwindcss": "^3.0.10",
"@vitejs/plugin-react": "^1.3.1",
@@ -40,8 +36,6 @@
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-filter-replace": "^0.1.9",
"vite-plugin-react-svg": "^0.2.0",
"vite-plugin-svgr": "^1.1.0",
"vite-tsconfig-paths": "^3.4.1"
"vite-plugin-svgr": "^1.1.0"
}
}

View File

@@ -4,7 +4,7 @@ version = "0.1.0"
description = "The next gen private virtual filesystem."
authors = ["Jamie Pine"]
license = ""
repository = "https://github.com/jamiepine/spacedrive"
repository = "https://github.com/spacedriveapp/spacedrive"
default-run = "spacedrive"
edition = "2021"
build = "src/build.rs"
@@ -23,6 +23,10 @@ sdcore = { path = "../../../core" }
tokio = { version = "1.17.0", features = ["sync"] }
window-shadows = "0.1.2"
# macOS system libs
[target.'cfg(target_os = "macos")'.dependencies]
cocoa = "0.24.0"
[features]
default = [ "custom-protocol" ]
custom-protocol = [ "tauri/custom-protocol" ]

View File

@@ -1,9 +1,13 @@
use std::time::{Duration, Instant};
use sdcore::{ClientCommand, ClientQuery, Core, CoreController, CoreEvent, CoreResponse};
use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node};
use tauri::api::path;
use tauri::Manager;
mod menu;
mod window;
use window::WindowExt;
#[tauri::command(async)]
async fn client_query_transport(
@@ -33,18 +37,32 @@ async fn client_command_transport(
}
}
#[tauri::command(async)]
async fn app_ready(app_handle: tauri::AppHandle) {
let window = app_handle.get_window("main").unwrap();
window.show().unwrap();
#[cfg(target_os = "macos")]
{
std::thread::sleep(std::time::Duration::from_millis(1000));
println!("fixing shadow for, {:?}", window.ns_window().unwrap());
window.fix_shadow();
}
}
#[tokio::main]
async fn main() {
let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
// create an instance of the core
let (mut core, mut event_receiver) = Core::new(data_dir).await;
let (mut node, mut event_receiver) = Node::new(data_dir).await;
// run startup tasks
core.initializer().await;
// extract the core controller
let controller = core.get_controller();
// throw the core into a dedicated thread
node.initializer().await;
// extract the node controller
let controller = node.get_controller();
// throw the node into a dedicated thread
tokio::spawn(async move {
core.start().await;
node.start().await;
});
// create tauri app
tauri::Builder::default()
@@ -53,14 +71,15 @@ async fn main() {
.setup(|app| {
let app = app.handle();
#[cfg(not(target_os = "linux"))]
{
app.windows().iter().for_each(|(_, window)| {
window_shadows::set_shadow(&window, true).unwrap_or(());
app.windows().iter().for_each(|(_, window)| {
window.hide().unwrap();
window.start_dragging().unwrap_or(());
});
}
#[cfg(target_os = "windows")]
window.set_decorations(true).unwrap();
#[cfg(target_os = "macos")]
window.set_transparent_titlebar(true, true);
});
// core event transport
tokio::spawn(async move {
@@ -85,9 +104,11 @@ async fn main() {
Ok(())
})
.on_menu_event(|event| menu::handle_menu_event(event))
.on_window_event(|event| window::handle_window_event(event))
.invoke_handler(tauri::generate_handler![
client_query_transport,
client_command_transport,
app_ready,
])
.menu(menu::get_menu())
.run(tauri::generate_context!())

View File

@@ -56,12 +56,17 @@ fn custom_menu_bar() -> Menu {
))
.add_item(CustomMenuItem::new("jeffd".to_string(), "Layout")),
);
let window = Submenu::new(
"Window",
Menu::new().add_native_item(MenuItem::EnterFullScreen),
);
let menu = Menu::new()
.add_submenu(spacedrive)
.add_submenu(file)
.add_submenu(edit)
.add_submenu(view);
.add_submenu(view)
.add_submenu(window);
menu
}

View File

@@ -0,0 +1,94 @@
use tauri::{GlobalWindowEvent, Runtime, Window, Wry};
pub(crate) fn handle_window_event(event: GlobalWindowEvent<Wry>) {
match event.event() {
_ => {}
}
}
pub trait WindowExt {
#[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool);
#[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool);
#[cfg(target_os = "macos")]
fn fix_shadow(&self);
}
impl<R: Runtime> WindowExt for Window<R> {
#[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool) {
use cocoa::{
appkit::{NSToolbar, NSWindow},
base::nil,
foundation::NSString,
};
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
if shown {
let toolbar =
NSToolbar::alloc(nil).initWithIdentifier_(NSString::alloc(nil).init_str("wat"));
toolbar.setShowsBaselineSeparator_(false);
id.setToolbar_(toolbar);
} else {
id.setToolbar_(nil);
}
}
}
#[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool) {
use cocoa::appkit::{NSWindow, NSWindowStyleMask, NSWindowTitleVisibility};
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
let mut style_mask = id.styleMask();
// println!("existing style mask, {:#?}", style_mask);
style_mask.set(
NSWindowStyleMask::NSFullSizeContentViewWindowMask,
transparent,
);
style_mask.set(
NSWindowStyleMask::NSTexturedBackgroundWindowMask,
transparent,
);
style_mask.set(
NSWindowStyleMask::NSUnifiedTitleAndToolbarWindowMask,
transparent && large,
);
id.setStyleMask_(style_mask);
if large {
self.set_toolbar(true);
}
id.setTitleVisibility_(if transparent {
NSWindowTitleVisibility::NSWindowTitleHidden
} else {
NSWindowTitleVisibility::NSWindowTitleVisible
});
id.setTitlebarAppearsTransparent_(if transparent {
cocoa::base::YES
} else {
cocoa::base::NO
});
}
}
#[cfg(target_os = "macos")]
fn fix_shadow(&self) {
use cocoa::appkit::NSWindow;
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
println!("recomputing shadow for window {:?}", id.title());
id.invalidateShadow();
}
}
}

View File

@@ -64,12 +64,14 @@
"title": "Spacedrive",
"width": 1200,
"height": 725,
"minWidth": 700,
"minHeight": 500,
"resizable": true,
"fullscreen": false,
"alwaysOnTop": false,
"focus": true,
"focus": false,
"fileDropEnabled": false,
"decorations": false,
"decorations": true,
"transparent": true,
"center": true
}

View File

@@ -1,17 +1,15 @@
import React, { useEffect, useState } from 'react';
import { createRoot } from 'react-dom/client';
// import Spacedrive interface
import SpacedriveInterface, { Platform } from '@sd/interface';
import { emit, listen, Event } from '@tauri-apps/api/event';
import { listen, Event } from '@tauri-apps/api/event';
// import types from Spacedrive core (TODO: re-export from client would be cleaner)
import { ClientCommand, ClientQuery, CoreEvent } from '@sd/core';
// import Spacedrive JS client
import { BaseTransport } from '@sd/client';
// import tauri apis
import { dialog, invoke, os } from '@tauri-apps/api';
import { dialog, invoke, os, shell } from '@tauri-apps/api';
import { convertFileSrc } from '@tauri-apps/api/tauri';
import '@sd/ui/style';
import { appWindow } from '@tauri-apps/api/window';
@@ -47,9 +45,21 @@ function App() {
}
const [platform, setPlatform] = useState<Platform>('macOS');
const [focused, setFocused] = useState(true);
useEffect(() => {
os.platform().then((platform) => setPlatform(getPlatform(platform)));
invoke('app_ready');
}, []);
useEffect(() => {
const unlistenFocus = listen('tauri://focus', () => setFocused(true));
const unlistenBlur = listen('tauri://blur', () => setFocused(false));
return () => {
unlistenFocus.then((unlisten) => unlisten());
unlistenBlur.then((unlisten) => unlisten());
};
}, []);
return (
@@ -65,9 +75,11 @@ function App() {
}): Promise<string | string[]> {
return dialog.open(options);
}}
isFocused={focused}
onClose={() => appWindow.close()}
onFullscreen={() => appWindow.setFullscreen(true)}
onMinimize={() => appWindow.minimize()}
onOpen={(path: string) => shell.open(path)}
/>
);
}

View File

@@ -1,26 +1,5 @@
{
"compilerOptions": {
"target": "ESNext",
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"allowJs": false,
"skipLibCheck": false,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react",
"outDir": "dist",
"baseUrl": "./",
"paths": {
"@sd/interface": ["../../packages/interface/src/index.ts"],
"@sd/ui": ["../../packages/ui/src/index.ts"],
"@sd/client": ["../../packages/client/src/index.ts"]
}
},
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
}

View File

@@ -1,8 +1,7 @@
import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
import { name, version } from './package.json';
import * as path from 'path';
import svg from 'vite-plugin-svgr';
import svg from "vite-plugin-svgr"
// https://vitejs.dev/config/
export default defineConfig({

24
apps/landing/env.d.ts vendored
View File

@@ -1,24 +0,0 @@
/// <reference types="vite/client" />
interface ImportMetaEnv {
readonly VITE_SDWEB_BASE_URL: string;
}
interface ImportMeta {
readonly env: ImportMetaEnv;
}
declare module '*.md' {
// "unknown" would be more detailed depends on how you structure frontmatter
const attributes: Record<string, unknown>;
// When "Mode.TOC" is requested
const toc: { level: string; content: string }[];
// When "Mode.HTML" is requested
const html: string;
// When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }>
import React from 'react';
const ReactComponent: React.VFC;
}

View File

@@ -31,14 +31,13 @@
"react-router-dom": "6.3.0",
"react-tsparticles": "^2.0.6",
"simple-icons": "^6.19.0",
"tsparticles": "^2.0.6",
"vite-plugin-markdown": "^2.0.2"
"tsparticles": "^2.0.6"
},
"devDependencies": {
"@babel/preset-react": "^7.16.7",
"@types/lodash": "^4.14.182",
"@types/prismjs": "^1.26.0",
"@types/react": "^18.0.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@types/react-helmet": "^6.1.5",
"@vitejs/plugin-react": "^1.3.1",
@@ -50,6 +49,7 @@
"ts-node": "^10.7.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-markdown": "^2.0.2",
"vite-plugin-md": "^0.13.0",
"vite-plugin-pages": "^0.23.0",
"vite-plugin-pages-sitemap": "^1.2.2",

View File

@@ -1,16 +1,32 @@
import clsx from 'clsx';
import React, { useState } from 'react';
import React, { useRef, useState } from 'react';
import { useEffect } from 'react';
import { isMobile } from 'react-device-detect';
export default function AppEmbed() {
const [showApp, setShowApp] = useState(false);
const [iFrameAppReady, setIframeAppReady] = useState(false);
const [forceImg, setForceImg] = useState(false);
const [imgFallback, setImageFallback] = useState(false);
const iFrame = useRef<HTMLIFrameElement>(null);
function handleResize() {
if (window.innerWidth < 1000) {
setForceImg(true);
} else if (forceImg) {
setForceImg(false);
}
}
useEffect(() => {
window.addEventListener('resize', handleResize);
handleResize();
return () => window.removeEventListener('resize', handleResize);
}, []);
function handleEvent(e: any) {
if (e.data === 'spacedrive-hello') {
if (!iFrameAppReady && !isMobile) setIframeAppReady(true);
if (!iFrameAppReady) setIframeAppReady(true);
}
}
@@ -35,24 +51,33 @@ export default function AppEmbed() {
}, 1000);
}, []);
const renderImage = (imgFallback && !iFrameAppReady) || forceImg;
return (
<div className="w-screen">
<div className="relative z-30 h-[200px] p-2 sm:p-0 sm:h-[328px] lg:h-[628px] mt-8 sm:mt-16 overflow-hidden ">
{showApp && (
<iframe
referrerPolicy="origin-when-cross-origin"
className={clsx(
'absolute w-[1200px] h-[300px] lg:h-[628px] z-30 border rounded-lg shadow-2xl inset-center bg-gray-850 border-gray-550',
iFrameAppReady ? 'fade-in-image opacity-100' : 'opacity-0 -ml-[10000px]'
)}
src={`${
import.meta.env.VITE_SDWEB_BASE_URL || 'http://localhost:8002'
}?library_id=9068c6ec-cf90-451b-bb30-4174781e7bc6`}
/>
)}
{imgFallback && !iFrameAppReady && (
<div className="z-40 h-full fade-in-image landing-img" />
)}
<div className="relative z-30 h-[228px] px-5 sm:h-[428px] md:h-[428px] lg:h-[628px] mt-8 sm:mt-16">
<div
className={clsx(
'relative h-full m-auto border rounded-lg max-w-7xl bg-gray-850 border-gray-550',
renderImage && 'bg-transparent border-none'
)}
>
{showApp && !forceImg && (
<iframe
ref={iFrame}
referrerPolicy="origin-when-cross-origin"
className={clsx(
'w-full h-full z-30 rounded-lg shadow-iframe inset-center bg-gray-850',
iFrameAppReady ? 'fade-in-app-embed opacity-100' : 'opacity-0 -ml-[10000px]'
)}
src={`${
import.meta.env.VITE_SDWEB_BASE_URL || 'http://localhost:8002'
}?library_id=9068c6ec-cf90-451b-bb30-4174781e7bc6`}
/>
)}
{renderImage && <div className="z-40 h-full fade-in-app-embed landing-img " />}
</div>
</div>
</div>
);

View File

@@ -39,24 +39,24 @@ export const Bubbles = () => {
enable: true
},
move: {
direction: 'none',
direction: 'top',
enable: true,
outModes: {
default: 'bounce'
default: 'destroy'
},
random: false,
speed: 0.2,
straight: false
straight: true
},
number: {
density: {
enable: true,
area: 800
area: 900
},
value: 80
value: 100
},
opacity: {
value: 0.2
value: 0.1
},
shape: {
type: 'circle'

View File

@@ -9,7 +9,7 @@ import {
Twitch
} from '@icons-pack/react-simple-icons';
function FooterLink(props: { children: string; link: string }) {
function FooterLink(props: { children: string | JSX.Element; link: string }) {
return (
<a href={props.link} target="_blank" className="text-gray-300 hover:text-white">
{props.children}
@@ -27,24 +27,24 @@ export function Footer() {
<h3 className="mb-1 text-xl font-bold">Spacedrive</h3>
<p className="text-sm text-gray-350">&copy; Copyright 2022 Jamie Pine</p>
<div className="flex flex-row mt-6 mb-10 space-x-3">
<a href="https://twitter.com/spacedriveapp" target="_blank">
<FooterLink link="https://twitter.com/spacedriveapp">
<Twitter />
</a>
<a href="https://discord.gg/gTaF2Z44f5" target="_blank">
</FooterLink>
<FooterLink link="https://discord.gg/gTaF2Z44f5">
<Discord />
</a>
<a href="https://instagram.com/spacedriveapp" target="_blank">
</FooterLink>
<FooterLink link="https://instagram.com/spacedriveapp">
<Instagram />
</a>
<a href="https://github.com/spacedriveapp" target="_blank">
</FooterLink>
<FooterLink link="https://github.com/spacedriveapp">
<Github />
</a>
<a href="https://opencollective.com/spacedrive" target="_blank">
</FooterLink>
<FooterLink link="https://opencollective.com/spacedrive">
<Opencollective />
</a>
<a href="https://twitch.tv/jamiepinelive" target="_blank">
</FooterLink>
<FooterLink link="https://twitch.tv/jamiepinelive">
<Twitch />
</a>
</FooterLink>
</div>
</div>
@@ -71,10 +71,10 @@ export function Footer() {
</div>
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Developers</h3>
<FooterLink link="https://github.com/jamiepine/spacedrive/tree/main/docs">
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs">
Documentation
</FooterLink>
<FooterLink link="https://github.com/jamiepine/spacedrive/tree/main/docs/developer/contributing.md">
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs/developer/contributing.md">
Contribute
</FooterLink>
<div className="opacity-50 pointer-events-none">
@@ -87,7 +87,7 @@ export function Footer() {
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Org</h3>
<FooterLink link="https://opencollective.com/spacedrive">Open Collective</FooterLink>
<FooterLink link="https://github.com/jamiepine/spacedrive/blob/main/LICENSE">
<FooterLink link="https://github.com/spacedriveapp/spacedrive/blob/main/LICENSE">
License
</FooterLink>
<div className="opacity-50 pointer-events-none">

View File

@@ -14,7 +14,9 @@ function MarkdownPage(props: MarkdownPageProps) {
}, []);
return (
<div className="container max-w-4xl p-4 mt-32 mb-20">
<article className="m-auto prose lg:prose-xs dark:prose-invert">{props.children}</article>
<article id="content" className="m-auto prose lg:prose-xs dark:prose-invert">
{props.children}
</article>
</div>
);
}

View File

@@ -41,7 +41,7 @@ export default function NavBar() {
return (
<div
className={clsx(
'fixed transition z-50 w-full h-16 border-b ',
'fixed transition z-40 w-full h-16 border-b ',
isAtTop
? 'bg-transparent border-transparent'
: 'border-gray-550 bg-gray-750 bg-opacity-80 backdrop-blur'

View File

@@ -1,5 +1,5 @@
import React, { Suspense } from 'react';
import ReactDOM from 'react-dom';
import { createRoot } from 'react-dom/client';
import { BrowserRouter as Router, useRoutes } from 'react-router-dom';
import routes from '~react-pages';
@@ -8,11 +8,20 @@ import { Footer } from './components/Footer';
import '@sd/ui/style';
import './style.scss';
import { Button } from '@sd/ui';
function App() {
return (
<Suspense fallback={<p>Loading...</p>}>
<div className="dark:bg-black dark:text-white ">
<Button
href="#content"
className="fixed left-0 z-50 mt-3 ml-8 duration-200 -translate-y-16 cursor-pointer focus:translate-y-0"
variant="gray"
>
Skip to content
</Button>
<NavBar />
<div className="container z-10 flex flex-col items-center px-4 mx-auto overflow-x-hidden sm:overflow-x-visible ">
{useRoutes(routes)}
@@ -23,11 +32,12 @@ function App() {
);
}
ReactDOM.render(
const root = createRoot(document.getElementById('root')!);
root.render(
<React.StrictMode>
<Router>
<App />
</Router>
</React.StrictMode>,
document.getElementById('root')
</React.StrictMode>
);

View File

@@ -15,16 +15,16 @@ function Page() {
<h1 className="mb-2 text-center">In the quantum realm this page potentially exists.</h1>
<p>In other words, thats a 404.</p>
<div className="flex flex-wrap justify-center">
<a onClick={() => window.history.back()}>
<Button className="mt-2 mr-3 cursor-pointer " variant="gray">
Back
</Button>
</a>
<a href="/">
<Button className="mt-2 cursor-pointer" variant="primary">
Discover Spacedrive
</Button>
</a>
<Button
href={document.referrer || 'javascript:history.back()'}
className="mt-2 mr-3 cursor-pointer "
variant="gray"
>
Back
</Button>
<Button href="/" className="mt-2 cursor-pointer" variant="primary">
Discover Spacedrive
</Button>
</div>
</div>
<div className="h-96" />

View File

@@ -1,6 +1,6 @@
import Markdown from '../components/Markdown';
import React from 'react';
import { ReactComponent as Content } from '../../../../docs/changelog/index.md';
import { ReactComponent as Content } from '~/docs/changelog/index.md';
import { Helmet } from 'react-helmet';
function Page() {

View File

@@ -1,6 +1,6 @@
import Markdown from '../../../components/Markdown';
import React from 'react';
import { ReactComponent as Content } from '../../../../../../docs/architecture/distributed-data-sync.md';
import { ReactComponent as Content } from '~/docs/architecture/distributed-data-sync.md';
import { Helmet } from 'react-helmet';
function Page() {

View File

@@ -1,6 +1,6 @@
import Markdown from '../components/Markdown';
import React from 'react';
import { ReactComponent as Content } from '../../../../docs/product/faq.md';
import { ReactComponent as Content } from '~/docs/product/faq.md';
import { Helmet } from 'react-helmet';
function Page() {

View File

@@ -14,6 +14,7 @@ interface SectionProps {
heading?: string;
description?: string | React.ReactNode;
children?: React.ReactNode;
className?: string;
}
function Section(props: SectionProps = { orientation: 'left' }) {
@@ -24,7 +25,7 @@ function Section(props: SectionProps = { orientation: 'left' }) {
</div>
);
return (
<div className="grid grid-cols-1 my-10 lg:grid-cols-2 lg:my-44">
<div className={clsx('grid grid-cols-1 my-10 lg:grid-cols-2 lg:my-44', props.className)}>
{props.orientation === 'right' ? (
<>
{info}
@@ -41,25 +42,21 @@ function Section(props: SectionProps = { orientation: 'left' }) {
}
function Page() {
// const [appLoaded, setAppLoaded] = useState(false);
// function handleResize(event: Event) {
// if (window.innerWidth > 1000) setShowApp(true);
// else if (showApp) setShowApp(false);
// }
// useEffect(() => {
// window.addEventListener('resize', handleResize);
// return () => window.removeEventListener('resize', handleResize);
// }, []);
return (
<>
<div className="mt-28 lg:mt-36" />
<h1 className="px-2 mb-3 text-4xl font-black leading-tight text-center md:text-6xl ">
<div className="absolute w-full max-w-[1200px] overflow-visible top-[500px] h-32">
<div className="left-0 mt-22 bloom bloom-one " />
<div className="left-[34%] -mt-32 bloom bloom-three " />
<div className="right-0 invisible sm:visible bloom bloom-two" />
</div>
<h1
id="content"
className="z-30 px-2 mb-3 text-4xl font-black leading-tight text-center md:text-6xl"
>
A file explorer from the future.
</h1>
<p className="max-w-4xl mt-1 mb-8 text-center text-md lg:text-lg leading-2 lg:leading-8 text-gray-450">
<p className="z-30 max-w-4xl mt-1 mb-8 text-center text-md lg:text-lg leading-2 lg:leading-8 text-gray-450">
Combine your drives and clouds into one database that you can organize and explore from any
device.
<br />
@@ -67,31 +64,18 @@ function Page() {
Designed for creators, hoarders and the painfully disorganized.
</span>
</p>
<div className="flex flex-row space-x-4">
{/* <Button className="px-2">
<WindowsLogo className="" fill="white" />
</Button> */}
{/* <Button
onClick={() =>
alert(
"You're here early! This is the only button on this page that does not work, I promise. Release build coming very soon—follow @spacedriveapp for updates."
)
}
className="opacity-50 cursor-not-allowed select-none"
variant="primary"
<div className="flex flex-row space-x-4 delay-3 ">
<Button
href="https://github.com/spacedriveapp/spacedrive"
target="_blank"
className="z-30 cursor-pointer"
variant="gray"
>
Download
</Button> */}
<a href="https://github.com/spacedriveapp/spacedrive" target="_blank">
<Button className="cursor-pointer" variant="gray">
<Github className="inline w-5 h-5 -mt-[4px] -ml-1 mr-2" fill="white" />
Star on GitHub
</Button>
</a>
<Github className="inline w-5 h-5 -mt-[4px] -ml-1 mr-2" fill="white" />
Star on GitHub
</Button>
</div>
<p className="px-6 mt-3 text-sm text-center opacity-75 text-gray-450">
<p className="z-30 px-6 mt-3 text-sm text-center text-gray-450 ">
Coming soon on macOS, Windows and Linux.
<br />
Shortly after to iOS & Android.
@@ -101,6 +85,7 @@ function Page() {
<Section
orientation="right"
heading="Never leave a file behind."
className="z-30"
description={
<>
Spacedrive accounts for every file you own, uniquely fingerprinting and extracting

View File

@@ -1,6 +1,6 @@
import Markdown from '../components/Markdown';
import React from 'react';
import { ReactComponent as Content } from '../../../../docs/product/roadmap.md';
import { ReactComponent as Content } from '~/docs/product/roadmap.md';
import { Helmet } from 'react-helmet';
import { ReactComponent as Folder } from '../../../../packages/interface/src/assets/svg/folder.svg';

View File

@@ -1,6 +1,6 @@
import Markdown from '../components/Markdown';
import React from 'react';
import { ReactComponent as Content } from '../../../../docs/product/credits.md';
import { ReactComponent as Content } from '~/docs/product/credits.md';
import { Helmet } from 'react-helmet';
function Page() {

View File

@@ -6,3 +6,88 @@ html {
display: none;
}
}
.landing-img {
background-image: url('/app.png');
background-size: contain;
background-repeat: no-repeat;
background-position: center top;
}
.fade-in-app-embed {
animation: fadeInUp 3s;
-webkit-animation: fadeInUp 3s;
-moz-animation: fadeInUp 3s;
-o-animation: fadeInUp 3s;
-ms-animation: fadeInUp 3s;
}
.fade-in-heading {
animation: fadeInUp 1s;
}
@keyframes fadeInUp {
0% {
opacity:0;
// transform: translateY(10px);
}
100% {
opacity:1;
// transform: translateY(0px);
}
}
.bloom {
@apply absolute w-96 h-96;
will-change: opacity;
opacity: 0;
filter: blur(160px);
border-radius: 50%;
transform: scale(1.5);
animation-name: bloomBurst;
animation-duration: 1s;
animation-timing-function: ease-in-out;
animation-fill-mode: forwards;
animation-iteration-count: 1;
animation-direction: forwards;
&.bloom-one {
background: conic-gradient(from 90deg at 50% 50%, #255bef, #aa1cca);
animation-delay: 500ms;
}
&.bloom-two {
background: conic-gradient(from 90deg at 50% 50%, #c62dbb, #1D054B);
animation-delay: 300ms;
}
&.bloom-three {
background: conic-gradient(from 90deg at 50% 50%, #2d53c6, #1D054B);
animation-delay: 1100ms;
}
}
@keyframes bloomBurst {
from {
opacity: 0;
}
40% {
opacity: 1;
}
to {
opacity: 0.6;
}
}
.shadow-iframe {
box-shadow: 0px 0px 100px 0px rgba(0,0,0,0.5);
}
// Gradient colors
// #3916BA
// #7A1D77
// #8E4CAB
// #1D054B
// #9A3F8C

View File

@@ -1,2 +1,25 @@
/// <reference types="vite/client" />
/// <reference types="vite-plugin-pages/client-react" />
interface ImportMetaEnv {
readonly VITE_SDWEB_BASE_URL: string;
}
interface ImportMeta {
readonly env: ImportMetaEnv;
}
declare module '*.md' {
// "unknown" would be more detailed depends on how you structure frontmatter
const attributes: Record<string, unknown>;
// When "Mode.TOC" is requested
const toc: { level: string; content: string }[];
// When "Mode.HTML" is requested
const html: string;
// When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }>
import React from 'react';
const ReactComponent: React.VFC;
}

View File

@@ -1,40 +1,5 @@
{
"compilerOptions": {
"target": "ESNext",
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"allowJs": false,
"skipLibCheck": false,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react",
"types": ["vite-plugin-svgr/client", "vite/client"],
"paths": {
"@sd/interface": ["../../packages/interface/src/index.ts"],
"@sd/ui": ["../../packages/ui/src/index.ts"],
"@sd/client": ["../../packages/client/src/index.ts"]
}
},
"ts-node": {
"transpileOnly": true,
"compilerOptions": {
"module": "CommonJS"
}
},
"include": [
"src",
"env.d.ts",
"src/vite-env.d.ts",
"src/components",
"src/pages",
"renderer",
"server"
],
"references": [{ "path": "./tsconfig.node.json" }]
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
}

View File

@@ -1,8 +0,0 @@
{
"compilerOptions": {
"composite": true,
"module": "esnext",
"moduleResolution": "node"
},
"include": ["vite.config.ts"]
}

View File

@@ -1,23 +1,26 @@
import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
//@ts-expect-error
import svg from 'vite-plugin-svgr';
import pages from 'vite-plugin-pages';
import md, { Mode } from 'vite-plugin-markdown';
import generateSitemap from 'vite-plugin-pages-sitemap';
import svg from 'vite-plugin-svgr';
// https://vitejs.dev/config/
export default defineConfig({
// @ts-ignore
plugins: [
react(),
svg({ svgrOptions: { icon: true } }),
pages({
dirs: 'src/pages'
// onRoutesGenerated: (routes) => generateSitemap({ routes })
}),
svg(),
md({ mode: [Mode.REACT] })
],
resolve: {
alias: {
'~/docs': __dirname + '../../../docs'
}
},
server: {
port: 8003
},

View File

@@ -1,4 +1,4 @@
use sdcore::{ClientCommand, ClientQuery, Core, CoreController, CoreEvent, CoreResponse};
use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node};
use std::{env, path::Path};
use actix::{
@@ -196,14 +196,14 @@ async fn setup() -> (
},
};
let (mut core, event_receiver) = Core::new(data_dir_path).await;
let (mut node, event_receiver) = Node::new(data_dir_path).await;
core.initializer().await;
node.initializer().await;
let controller = core.get_controller();
let controller = node.get_controller();
tokio::spawn(async move {
core.start().await;
node.start().await;
});
(web::Data::new(event_receiver), web::Data::new(controller))

View File

@@ -4,7 +4,7 @@
"version": "0.0.0",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
@@ -14,12 +14,10 @@
"@sd/interface": "*",
"@sd/ui": "*",
"react": "^18.0.0",
"react-dom": "^18.0.0",
"react-tsparticles": "^2.0.6",
"tsparticles": "^2.0.6"
"react-dom": "^18.0.0"
},
"devDependencies": {
"@types/react": "^18.0.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@vitejs/plugin-react": "^1.3.1",
"autoprefixer": "^10.4.4",
@@ -27,6 +25,7 @@
"tailwind": "^4.0.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-svgr": "^1.1.0"
"vite-plugin-svgr": "^1.1.0",
"vite-plugin-tsconfig-paths": "^1.0.5"
}
}

View File

@@ -72,6 +72,7 @@ function App() {
<div className="App">
{/* <header className="App-header"></header> */}
<SpacedriveInterface
demoMode
useMemoryRouter={true}
transport={new Transport()}
platform={'browser'}

View File

@@ -1,33 +1,5 @@
{
"compilerOptions": {
"target": "ESNext",
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react",
"baseUrl": "./",
"types": ["vite-plugin-svgr/client", "vite/client"],
"paths": {
"@sd/interface": ["../../packages/interface/src/index.ts"],
"@sd/ui": ["../../packages/ui/src/index.ts"],
"@sd/client": ["../../packages/client/src/index.ts"],
"@sd/core": ["../../core/index.ts"],
},
},
"ts-node": {
"transpileOnly": true,
"compilerOptions": {
"module": "CommonJS"
}
},
"include": ["src"],
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
}

View File

@@ -1,7 +1,9 @@
import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
import { name, version } from './package.json';
import svg from 'vite-plugin-svgr';
import tsconfigPaths from 'vite-plugin-tsconfig-paths';
import { name, version } from './package.json';
// https://vitejs.dev/config/
export default defineConfig({
@@ -13,7 +15,8 @@ export default defineConfig({
react({
jsxRuntime: 'classic'
}),
svg({ svgrOptions: { icon: true } })
svg({ svgrOptions: { icon: true } }),
tsconfigPaths()
],
root: 'src',
publicDir: '../../packages/interface/src/assets',

View File

@@ -4,7 +4,7 @@ version = "0.1.0"
description = "A virtual distributed filesystem."
authors = ["Jamie Pine"]
license = "GNU GENERAL PUBLIC LICENSE"
repository = "https://github.com/jamiepine/spacedrive"
repository = "https://github.com/spacedriveapp/spacedrive"
edition = "2021"
[features]

View File

@@ -0,0 +1,19 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_volumes" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"client_id" INTEGER NOT NULL,
"name" TEXT NOT NULL,
"mount_point" TEXT NOT NULL,
"total_bytes_capacity" TEXT NOT NULL DEFAULT '0',
"total_bytes_available" TEXT NOT NULL DEFAULT '0',
"disk_type" TEXT,
"filesystem" TEXT,
"is_system" BOOLEAN NOT NULL DEFAULT false,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO "new_volumes" ("client_id", "date_modified", "disk_type", "filesystem", "id", "mount_point", "name", "total_bytes_available", "total_bytes_capacity") SELECT "client_id", "date_modified", "disk_type", "filesystem", "id", "mount_point", "name", "total_bytes_available", "total_bytes_capacity" FROM "volumes";
DROP TABLE "volumes";
ALTER TABLE "new_volumes" RENAME TO "volumes";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,8 @@
/*
Warnings:
- A unique constraint covering the columns `[client_id,mount_point,name]` on the table `volumes` will be added. If there are existing duplicate values, this will fail.
*/
-- CreateIndex
CREATE UNIQUE INDEX "volumes_client_id_mount_point_name_key" ON "volumes"("client_id", "mount_point", "name");

View File

@@ -0,0 +1,177 @@
/*
Warnings:
- You are about to drop the `clients` table. If the table is not empty, all the data it contains will be lost.
- You are about to drop the column `client_id` on the `sync_events` table. All the data in the column will be lost.
- You are about to drop the column `client_id` on the `locations` table. All the data in the column will be lost.
- You are about to drop the column `client_id` on the `jobs` table. All the data in the column will be lost.
- You are about to drop the column `encryption` on the `tags` table. All the data in the column will be lost.
- You are about to drop the column `client_id` on the `volumes` table. All the data in the column will be lost.
- Added the required column `node_id` to the `sync_events` table without a default value. This is not possible if the table is not empty.
- Added the required column `node_id` to the `jobs` table without a default value. This is not possible if the table is not empty.
- Added the required column `node_id` to the `volumes` table without a default value. This is not possible if the table is not empty.
*/
-- DropIndex
DROP INDEX "clients_pub_id_key";
-- DropTable
PRAGMA foreign_keys=off;
DROP TABLE "clients";
PRAGMA foreign_keys=on;
-- CreateTable
CREATE TABLE "nodes" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"pub_id" TEXT NOT NULL,
"name" TEXT NOT NULL,
"platform" INTEGER NOT NULL DEFAULT 0,
"version" TEXT,
"online" BOOLEAN DEFAULT true,
"last_seen" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"timezone" TEXT,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
-- CreateTable
CREATE TABLE "keys" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"checksum" TEXT NOT NULL,
"name" TEXT,
"date_created" DATETIME DEFAULT CURRENT_TIMESTAMP,
"algorithm" INTEGER DEFAULT 0
);
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_files" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"cas_id" TEXT NOT NULL,
"integrity_checksum" TEXT,
"kind" INTEGER NOT NULL DEFAULT 0,
"size_in_bytes" TEXT NOT NULL,
"encryption" INTEGER NOT NULL DEFAULT 0,
"key_id" INTEGER,
"hidden" BOOLEAN NOT NULL DEFAULT false,
"favorite" BOOLEAN NOT NULL DEFAULT false,
"important" BOOLEAN NOT NULL DEFAULT false,
"has_thumbnail" BOOLEAN NOT NULL DEFAULT false,
"has_thumbstrip" BOOLEAN NOT NULL DEFAULT false,
"has_video_preview" BOOLEAN NOT NULL DEFAULT false,
"ipfs_id" TEXT,
"comment" TEXT,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_indexed" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "files_key_id_fkey" FOREIGN KEY ("key_id") REFERENCES "keys" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_files" ("cas_id", "comment", "date_created", "date_indexed", "date_modified", "encryption", "favorite", "has_thumbnail", "has_thumbstrip", "has_video_preview", "hidden", "id", "important", "integrity_checksum", "ipfs_id", "kind", "size_in_bytes") SELECT "cas_id", "comment", "date_created", "date_indexed", "date_modified", "encryption", "favorite", "has_thumbnail", "has_thumbstrip", "has_video_preview", "hidden", "id", "important", "integrity_checksum", "ipfs_id", "kind", "size_in_bytes" FROM "files";
DROP TABLE "files";
ALTER TABLE "new_files" RENAME TO "files";
CREATE UNIQUE INDEX "files_cas_id_key" ON "files"("cas_id");
CREATE UNIQUE INDEX "files_integrity_checksum_key" ON "files"("integrity_checksum");
CREATE TABLE "new_sync_events" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"node_id" INTEGER NOT NULL,
"timestamp" TEXT NOT NULL,
"data" TEXT NOT NULL,
CONSTRAINT "sync_events_node_id_fkey" FOREIGN KEY ("node_id") REFERENCES "nodes" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_sync_events" ("data", "id", "timestamp") SELECT "data", "id", "timestamp" FROM "sync_events";
DROP TABLE "sync_events";
ALTER TABLE "new_sync_events" RENAME TO "sync_events";
CREATE TABLE "new_locations" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"pub_id" TEXT NOT NULL,
"node_id" INTEGER,
"name" TEXT,
"local_path" TEXT,
"total_capacity" INTEGER,
"available_capacity" INTEGER,
"filesystem" TEXT,
"disk_type" INTEGER,
"is_removable" BOOLEAN,
"is_online" BOOLEAN NOT NULL DEFAULT true,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO "new_locations" ("available_capacity", "date_created", "disk_type", "filesystem", "id", "is_online", "is_removable", "local_path", "name", "pub_id", "total_capacity") SELECT "available_capacity", "date_created", "disk_type", "filesystem", "id", "is_online", "is_removable", "local_path", "name", "pub_id", "total_capacity" FROM "locations";
DROP TABLE "locations";
ALTER TABLE "new_locations" RENAME TO "locations";
CREATE UNIQUE INDEX "locations_pub_id_key" ON "locations"("pub_id");
CREATE TABLE "new_jobs" (
"id" TEXT NOT NULL PRIMARY KEY,
"node_id" INTEGER NOT NULL,
"action" INTEGER NOT NULL,
"status" INTEGER NOT NULL DEFAULT 0,
"task_count" INTEGER NOT NULL DEFAULT 1,
"completed_task_count" INTEGER NOT NULL DEFAULT 0,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"seconds_elapsed" INTEGER NOT NULL DEFAULT 0,
CONSTRAINT "jobs_node_id_fkey" FOREIGN KEY ("node_id") REFERENCES "nodes" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
INSERT INTO "new_jobs" ("action", "completed_task_count", "date_created", "date_modified", "id", "seconds_elapsed", "status", "task_count") SELECT "action", "completed_task_count", "date_created", "date_modified", "id", "seconds_elapsed", "status", "task_count" FROM "jobs";
DROP TABLE "jobs";
ALTER TABLE "new_jobs" RENAME TO "jobs";
CREATE TABLE "new_tags" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"pub_id" TEXT NOT NULL,
"name" TEXT,
"total_files" INTEGER DEFAULT 0,
"redundancy_goal" INTEGER DEFAULT 1,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO "new_tags" ("date_created", "date_modified", "id", "name", "pub_id", "redundancy_goal", "total_files") SELECT "date_created", "date_modified", "id", "name", "pub_id", "redundancy_goal", "total_files" FROM "tags";
DROP TABLE "tags";
ALTER TABLE "new_tags" RENAME TO "tags";
CREATE UNIQUE INDEX "tags_pub_id_key" ON "tags"("pub_id");
CREATE TABLE "new_file_paths" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"is_dir" BOOLEAN NOT NULL DEFAULT false,
"location_id" INTEGER NOT NULL,
"materialized_path" TEXT NOT NULL,
"name" TEXT NOT NULL,
"extension" TEXT,
"file_id" INTEGER,
"parent_id" INTEGER,
"encryption" INTEGER NOT NULL DEFAULT 0,
"key_id" INTEGER,
"permissions" TEXT,
"temp_cas_id" TEXT,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_indexed" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "file_paths_location_id_fkey" FOREIGN KEY ("location_id") REFERENCES "locations" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "file_paths_file_id_fkey" FOREIGN KEY ("file_id") REFERENCES "files" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "file_paths_parent_id_fkey" FOREIGN KEY ("parent_id") REFERENCES "file_paths" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "file_paths_key_id_fkey" FOREIGN KEY ("key_id") REFERENCES "keys" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_file_paths" ("date_created", "date_indexed", "date_modified", "encryption", "extension", "file_id", "id", "is_dir", "location_id", "materialized_path", "name", "parent_id", "permissions", "temp_cas_id") SELECT "date_created", "date_indexed", "date_modified", "encryption", "extension", "file_id", "id", "is_dir", "location_id", "materialized_path", "name", "parent_id", "permissions", "temp_cas_id" FROM "file_paths";
DROP TABLE "file_paths";
ALTER TABLE "new_file_paths" RENAME TO "file_paths";
CREATE UNIQUE INDEX "file_paths_location_id_materialized_path_name_extension_key" ON "file_paths"("location_id", "materialized_path", "name", "extension");
CREATE TABLE "new_volumes" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"node_id" INTEGER NOT NULL,
"name" TEXT NOT NULL,
"mount_point" TEXT NOT NULL,
"total_bytes_capacity" TEXT NOT NULL DEFAULT '0',
"total_bytes_available" TEXT NOT NULL DEFAULT '0',
"disk_type" TEXT,
"filesystem" TEXT,
"is_system" BOOLEAN NOT NULL DEFAULT false,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO "new_volumes" ("date_modified", "disk_type", "filesystem", "id", "is_system", "mount_point", "name", "total_bytes_available", "total_bytes_capacity") SELECT "date_modified", "disk_type", "filesystem", "id", "is_system", "mount_point", "name", "total_bytes_available", "total_bytes_capacity" FROM "volumes";
DROP TABLE "volumes";
ALTER TABLE "new_volumes" RENAME TO "volumes";
CREATE UNIQUE INDEX "volumes_node_id_mount_point_name_key" ON "volumes"("node_id", "mount_point", "name");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;
-- CreateIndex
CREATE UNIQUE INDEX "nodes_pub_id_key" ON "nodes"("pub_id");
-- CreateIndex
CREATE UNIQUE INDEX "keys_checksum_key" ON "keys"("checksum");

View File

@@ -0,0 +1,83 @@
/*
Warnings:
- You are about to drop the `spaces` table. If the table is not empty, all the data it contains will be lost.
- You are about to drop the column `encryption` on the `libraries` table. All the data in the column will be lost.
- You are about to drop the column `encryption` on the `files` table. All the data in the column will be lost.
- You are about to drop the column `encryption` on the `file_paths` table. All the data in the column will be lost.
- You are about to drop the column `permissions` on the `file_paths` table. All the data in the column will be lost.
- You are about to drop the column `temp_cas_id` on the `file_paths` table. All the data in the column will be lost.
*/
-- DropIndex
DROP INDEX "spaces_pub_id_key";
-- DropTable
PRAGMA foreign_keys=off;
DROP TABLE "spaces";
PRAGMA foreign_keys=on;
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_libraries" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"pub_id" TEXT NOT NULL,
"name" TEXT NOT NULL,
"remote_id" TEXT,
"is_primary" BOOLEAN NOT NULL DEFAULT true,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"timezone" TEXT
);
INSERT INTO "new_libraries" ("date_created", "id", "is_primary", "name", "pub_id", "remote_id", "timezone") SELECT "date_created", "id", "is_primary", "name", "pub_id", "remote_id", "timezone" FROM "libraries";
DROP TABLE "libraries";
ALTER TABLE "new_libraries" RENAME TO "libraries";
CREATE UNIQUE INDEX "libraries_pub_id_key" ON "libraries"("pub_id");
CREATE TABLE "new_files" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"cas_id" TEXT NOT NULL,
"integrity_checksum" TEXT,
"kind" INTEGER NOT NULL DEFAULT 0,
"size_in_bytes" TEXT NOT NULL,
"key_id" INTEGER,
"hidden" BOOLEAN NOT NULL DEFAULT false,
"favorite" BOOLEAN NOT NULL DEFAULT false,
"important" BOOLEAN NOT NULL DEFAULT false,
"has_thumbnail" BOOLEAN NOT NULL DEFAULT false,
"has_thumbstrip" BOOLEAN NOT NULL DEFAULT false,
"has_video_preview" BOOLEAN NOT NULL DEFAULT false,
"ipfs_id" TEXT,
"comment" TEXT,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_indexed" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "files_key_id_fkey" FOREIGN KEY ("key_id") REFERENCES "keys" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_files" ("cas_id", "comment", "date_created", "date_indexed", "date_modified", "favorite", "has_thumbnail", "has_thumbstrip", "has_video_preview", "hidden", "id", "important", "integrity_checksum", "ipfs_id", "key_id", "kind", "size_in_bytes") SELECT "cas_id", "comment", "date_created", "date_indexed", "date_modified", "favorite", "has_thumbnail", "has_thumbstrip", "has_video_preview", "hidden", "id", "important", "integrity_checksum", "ipfs_id", "key_id", "kind", "size_in_bytes" FROM "files";
DROP TABLE "files";
ALTER TABLE "new_files" RENAME TO "files";
CREATE UNIQUE INDEX "files_cas_id_key" ON "files"("cas_id");
CREATE UNIQUE INDEX "files_integrity_checksum_key" ON "files"("integrity_checksum");
CREATE TABLE "new_file_paths" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"is_dir" BOOLEAN NOT NULL DEFAULT false,
"location_id" INTEGER NOT NULL,
"materialized_path" TEXT NOT NULL,
"name" TEXT NOT NULL,
"extension" TEXT,
"file_id" INTEGER,
"parent_id" INTEGER,
"key_id" INTEGER,
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_modified" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"date_indexed" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "file_paths_location_id_fkey" FOREIGN KEY ("location_id") REFERENCES "locations" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "file_paths_file_id_fkey" FOREIGN KEY ("file_id") REFERENCES "files" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "file_paths_parent_id_fkey" FOREIGN KEY ("parent_id") REFERENCES "file_paths" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "file_paths_key_id_fkey" FOREIGN KEY ("key_id") REFERENCES "keys" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_file_paths" ("date_created", "date_indexed", "date_modified", "extension", "file_id", "id", "is_dir", "key_id", "location_id", "materialized_path", "name", "parent_id") SELECT "date_created", "date_indexed", "date_modified", "extension", "file_id", "id", "is_dir", "key_id", "location_id", "materialized_path", "name", "parent_id" FROM "file_paths";
DROP TABLE "file_paths";
ALTER TABLE "new_file_paths" RENAME TO "file_paths";
CREATE UNIQUE INDEX "file_paths_location_id_materialized_path_name_extension_key" ON "file_paths"("location_id", "materialized_path", "name", "extension");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -20,10 +20,10 @@ model Migration {
model SyncEvent {
id Int @id @default(autoincrement())
client_id Int
node_id Int
timestamp String
data String
client Client @relation(fields: [client_id], references: [id])
node Node @relation(fields: [node_id], references: [id])
@@map("sync_events")
}
@@ -34,10 +34,8 @@ model Library {
name String
remote_id String?
is_primary Boolean @default(true)
encryption Int @default(0)
date_created DateTime @default(now())
timezone String?
spaces Space[]
@@map("libraries")
}
@@ -57,7 +55,7 @@ model LibraryStatistics {
@@map("library_statistics")
}
model Client {
model Node {
id Int @id @default(autoincrement())
pub_id String @unique
name String
@@ -71,27 +69,29 @@ model Client {
sync_events SyncEvent[]
jobs Job[]
@@map("clients")
@@map("nodes")
}
model Volume {
id Int @id() @default(autoincrement())
client_id Int
node_id Int
name String
mount_point String
total_bytes_capacity String @default("0")
total_bytes_available String @default("0")
disk_type String?
filesystem String?
is_system Boolean @default(false)
date_modified DateTime @default(now())
@@unique([node_id, mount_point, name])
@@map("volumes")
}
model Location {
id Int @id @default(autoincrement())
pub_id String @unique
client_id Int?
node_id Int?
name String?
local_path String?
total_capacity Int?
@@ -116,8 +116,7 @@ model File {
// basic metadata
kind Int @default(0)
size_in_bytes String
// mark uniqely as encrypted, will lead to all file paths being encrypted
encryption Int @default(0)
key_id Int?
// handy ways to mark a file
hidden Boolean @default(false)
favorite Boolean @default(false)
@@ -144,6 +143,8 @@ model File {
comments Comment[]
media_data MediaData?
key Key? @relation(fields: [key_id], references: [id])
@@map("files")
}
@@ -159,11 +160,11 @@ model FilePath {
extension String?
// the unique File for this file path
file_id Int?
//
// the parent in the file tree
parent_id Int?
encryption Int @default(0)
permissions String?
temp_cas_id String? // so a filepath can be created without its File, as they're created lazily
key_id Int? // replacement for encryption
// permissions String?
// temp_cas_id String? // so a filepath can be created without its File, as they're created lazily
date_created DateTime @default(now())
date_modified DateTime @default(now())
@@ -174,6 +175,8 @@ model FilePath {
parent FilePath? @relation("directory_file_paths", fields: [parent_id], references: [id])
children FilePath[] @relation("directory_file_paths")
key Key? @relation(fields: [key_id], references: [id])
@@unique([location_id, materialized_path, name, extension])
@@map("file_paths")
}
@@ -186,6 +189,23 @@ model FileConflict {
@@map("file_conflicts")
}
// keys allow us to know exactly which files can be decrypted with a given key
// they can be "mounted" to a client, and then used to decrypt files automatically
model Key {
id Int @id @default(autoincrement())
// used to identify the key when it is entered by user
checksum String @unique
name String?
// nullable if concealed for security
date_created DateTime? @default(now())
// so we know which algorithm to use, can be null if user must select
algorithm Int? @default(0)
files File[]
file_paths FilePath[]
@@map("keys")
}
model MediaData {
id Int @id
pixel_width Int?
@@ -210,7 +230,6 @@ model Tag {
id Int @id @default(autoincrement())
pub_id String @unique
name String?
encryption Int? @default(0)
total_files Int? @default(0)
redundancy_goal Int? @default(1)
date_created DateTime @default(now())
@@ -258,10 +277,10 @@ model LabelOnFile {
}
model Job {
id String @id
client_id Int
action Int
status Int @default(0)
id String @id
node_id Int
action Int
status Int @default(0)
task_count Int @default(1)
completed_task_count Int @default(0)
@@ -269,23 +288,10 @@ model Job {
date_modified DateTime @default(now())
seconds_elapsed Int @default(0)
clients Client @relation(fields: [client_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
nodes Node @relation(fields: [node_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
@@map("jobs")
}
model Space {
id Int @id @default(autoincrement())
pub_id String @unique
name String
encryption Int? @default(0) // remove
date_created DateTime @default(now())
date_modified DateTime @default(now())
Library Library? @relation(fields: [libraryId], references: [id])
libraryId Int?
@@map("spaces")
}
model Album {
id Int @id @default(autoincrement())
pub_id String @unique

View File

View File

@@ -1,90 +0,0 @@
use crate::{
prisma::{self, client},
state, Core,
};
use chrono::{DateTime, Utc};
use int_enum::IntEnum;
use serde::{Deserialize, Serialize};
use std::env;
use thiserror::Error;
use ts_rs::TS;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct Client {
pub uuid: String,
pub name: String,
pub platform: Platform,
pub tcp_address: String,
#[ts(type = "string")]
pub last_seen: DateTime<Utc>,
#[ts(type = "string")]
pub last_synchronized: DateTime<Utc>,
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum Platform {
Unknown = 0,
Windows = 1,
MacOS = 2,
Linux = 3,
IOS = 4,
Android = 5,
}
pub async fn create(core: &Core) -> Result<(), ClientError> {
println!("Creating client...");
let mut config = state::client::get();
let db = &core.database;
let hostname = match hostname::get() {
Ok(hostname) => hostname.to_str().unwrap_or_default().to_owned(),
Err(_) => "unknown".to_owned(),
};
let platform = match env::consts::OS {
"windows" => Platform::Windows,
"macos" => Platform::MacOS,
"linux" => Platform::Linux,
_ => Platform::Unknown,
};
let client = match db
.client()
.find_unique(client::pub_id::equals(config.client_uuid.clone()))
.exec()
.await?
{
Some(client) => client,
None => {
db.client()
.create(
client::pub_id::set(config.client_uuid.clone()),
client::name::set(hostname.clone()),
vec![
client::platform::set(platform as i32),
client::online::set(Some(true)),
],
)
.exec()
.await?
}
};
config.client_name = hostname;
config.client_id = client.id;
config.save();
println!("Client: {:?}", &client);
Ok(())
}
#[derive(Error, Debug)]
pub enum ClientError {
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
}

View File

@@ -1,4 +1,3 @@
use crate::state;
use crate::{prisma, prisma::PrismaClient};
use thiserror::Error;
pub mod migrate;

View File

@@ -1,5 +1,5 @@
use crate::job::jobs::JobReportUpdate;
use crate::state::client;
use crate::node::state;
use crate::{
job::{jobs::Job, worker::WorkerContext},
prisma::file_path,
@@ -27,7 +27,7 @@ pub static THUMBNAIL_CACHE_DIR_NAME: &str = "thumbnails";
#[async_trait::async_trait]
impl Job for ThumbnailJob {
async fn run(&self, ctx: WorkerContext) -> Result<()> {
let config = client::get();
let config = state::get();
let core_ctx = ctx.core_ctx.clone();
let location = sys::locations::get_location(&core_ctx, self.location_id).await?;
@@ -60,13 +60,13 @@ impl Job for ThumbnailJob {
image_file.materialized_path.clone()
))]);
let path = format!("{}{}", root_path, image_file.materialized_path);
let checksum = image_file.temp_cas_id.as_ref().unwrap();
let cas_id = image_file.file().unwrap().unwrap().cas_id.clone();
// Define and write the WebP-encoded file to a given path
let output_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location_id))
.join(checksum)
.join(&cas_id)
.with_extension("webp");
// check if file exists at output path
@@ -81,9 +81,7 @@ impl Job for ThumbnailJob {
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
if !is_background {
block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail {
cas_id: checksum.to_string(),
}));
block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }));
};
} else {
println!("Thumb exists, skipping... {}", output_path.display());
@@ -140,7 +138,13 @@ pub async fn get_images(
params.push(file_path::materialized_path::starts_with(path.to_string()))
}
let image_files = ctx.database.file_path().find_many(params).exec().await?;
let image_files = ctx
.database
.file_path()
.find_many(params)
.with(file_path::file::fetch())
.exec()
.await?;
Ok(image_files)
}

View File

@@ -1,3 +1,5 @@
use std::fs;
use crate::job::jobs::JobReportUpdate;
use crate::{
file::FileError,
@@ -10,6 +12,8 @@ use futures::executor::block_on;
use serde::{Deserialize, Serialize};
use prisma_client_rust::Direction;
use super::checksum::generate_cas_id;
#[derive(Deserialize, Serialize, Debug)]
pub struct FileCreated {
pub id: i32,
@@ -46,8 +50,9 @@ impl Job for FileIdentifierJob {
let mut rows: Vec<String> = Vec::new();
// only rows that have a valid cas_id to be inserted
for file_path in file_paths.iter() {
if file_path.temp_cas_id.is_some() {
rows.push(prepare_file_values(file_path));
let data = prepare_file_values(file_path);
if let Ok(d) = data {
rows.push(d);
}
}
if rows.len() == 0 {
@@ -133,10 +138,22 @@ pub async fn get_orphan_file_paths(
Ok(files)
}
pub fn prepare_file_values(file_path: &file_path::Data) -> String {
format!(
pub fn prepare_file_values(file_path: &file_path::Data) -> Result<String> {
let metadata = fs::metadata(&file_path.materialized_path)?;
let cas_id = {
if !file_path.is_dir {
// TODO: remove unwrap
let mut x = generate_cas_id(&file_path.materialized_path, metadata.len()).unwrap();
x.truncate(16);
x
} else {
"".to_string()
}
};
// TODO: add all metadata
Ok(format!(
"(\"{}\",\"{}\")",
file_path.temp_cas_id.as_ref().unwrap(),
cas_id,
"0"
)
))
}

View File

@@ -1,13 +1,12 @@
use std::path::Path;
use crate::{
encode::thumb::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, FileError, FilePath},
prisma::file_path,
state::client,
file::{DirectoryWithContents, File, FileError},
node::state,
prisma::{file, file_path},
sys::locations::get_location,
CoreContext,
};
use std::path::Path;
pub async fn open_dir(
ctx: &CoreContext,
@@ -15,7 +14,7 @@ pub async fn open_dir(
path: &str,
) -> Result<DirectoryWithContents, FileError> {
let db = &ctx.database;
let config = client::get();
let config = state::get();
// get location
let location = get_location(ctx, location_id.clone()).await?;
@@ -31,27 +30,27 @@ pub async fn open_dir(
.await?
.ok_or(FileError::DirectoryNotFound(path.to_string()))?;
let files = db
.file_path()
.find_many(vec![file_path::parent_id::equals(Some(directory.id))])
let files: Vec<File> = db
.file()
.find_many(vec![file::paths::some(vec![file_path::parent_id::equals(
Some(directory.id),
)])])
.exec()
.await?;
.await?
.into_iter()
.map(Into::into)
.collect();
let files: Vec<FilePath> = files.into_iter().map(|l| l.into()).collect();
let mut contents: Vec<FilePath> = vec![];
let mut contents: Vec<File> = vec![];
for mut file in files {
if file.temp_cas_id.is_some() {
let path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location.id))
.join(file.temp_cas_id.as_ref().unwrap())
.with_extension("webp");
let thumb_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location.id))
.join(file.cas_id.clone())
.with_extension("webp");
let exists = path.exists();
file.has_local_thumbnail = exists;
}
file.has_thumbnail = thumb_path.exists();
contents.push(file);
}

View File

@@ -15,6 +15,8 @@ pub enum ScanProgress {
Message(String),
}
static BATCH_SIZE: usize = 100;
// creates a vector of valid path buffers from a directory
pub async fn scan_path(
ctx: &CoreContext,
@@ -49,7 +51,7 @@ pub async fn scan_path(
// spawn a dedicated thread to scan the directory for performance
let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || {
// store every valid path discovered
let mut paths: Vec<(PathBuf, i32, Option<i32>)> = Vec::new();
let mut paths: Vec<(PathBuf, i32, Option<i32>, bool)> = Vec::new();
// store a hashmap of directories to their file ids for fast lookup
let mut dirs: HashMap<String, i32> = HashMap::new();
// begin timer for logging purposes
@@ -76,6 +78,8 @@ pub async fn scan_path(
};
let path = entry.path();
println!("found: {:?}", path);
let parent_path = path
.parent()
.unwrap_or(Path::new(""))
@@ -83,15 +87,28 @@ pub async fn scan_path(
.unwrap_or("");
let parent_dir_id = dirs.get(&*parent_path);
let str = match path.as_os_str().to_str() {
Some(str) => str,
None => {
println!("Error reading file {}", &path.display());
continue;
}
};
on_progress(vec![
ScanProgress::Message(format!("Found: {:?}", &path)),
ScanProgress::ChunkCount(paths.len() / 100),
ScanProgress::Message(format!("{}", str)),
ScanProgress::ChunkCount(paths.len() / BATCH_SIZE),
]);
let file_id = get_id();
paths.push((path.to_owned(), file_id, parent_dir_id.cloned()));
let file_type = entry.file_type();
let is_dir = file_type.is_dir();
if entry.file_type().is_dir() {
if is_dir || file_type.is_file() {
paths.push((path.to_owned(), file_id, parent_dir_id.cloned(), is_dir));
}
if is_dir {
let _path = match path.to_str() {
Some(path) => path.to_owned(),
None => continue,
@@ -107,11 +124,11 @@ pub async fn scan_path(
let db_write_start = Instant::now();
let scan_read_time = scan_start.elapsed();
for (i, chunk) in paths.chunks(100).enumerate() {
for (i, chunk) in paths.chunks(BATCH_SIZE).enumerate() {
on_progress(vec![
ScanProgress::SavedChunks(i as usize),
ScanProgress::Message(format!(
"Writing {} of {} to db",
"Writing {} of {} to library",
i * chunk.len(),
paths.len(),
)),
@@ -119,9 +136,9 @@ pub async fn scan_path(
// vector to store active models
let mut files: Vec<String> = Vec::new();
for (file_path, file_id, parent_dir_id) in chunk {
for (file_path, file_id, parent_dir_id, is_dir) in chunk {
files.push(
match prepare_values(&file_path, *file_id, &location, parent_dir_id) {
match prepare_values(&file_path, *file_id, &location, parent_dir_id, *is_dir) {
Ok(file) => file,
Err(e) => {
println!("Error creating file model from path {:?}: {}", file_path, e);
@@ -132,7 +149,7 @@ pub async fn scan_path(
}
let raw_sql = format!(
r#"
INSERT INTO file_paths (id, is_dir, location_id, materialized_path, name, extension, parent_id, date_created, temp_cas_id)
INSERT INTO file_paths (id, is_dir, location_id, materialized_path, name, extension, parent_id)
VALUES {}
"#,
files.join(", ")
@@ -157,12 +174,25 @@ fn prepare_values(
id: i32,
location: &LocationResource,
parent_id: &Option<i32>,
is_dir: bool,
) -> Result<String> {
let metadata = fs::metadata(&file_path)?;
// let metadata = fs::metadata(&file_path)?;
let location_path = location.path.as_ref().unwrap().as_str();
// let size = metadata.len();
let name = extract_name(file_path.file_stem());
let extension = extract_name(file_path.extension());
let name;
let extension;
// if the 'file_path' is not a directory, then get the extension and name.
// if 'file_path' is a directory, set extension to an empty string to avoid periods in folder names
// - being interpreted as file extensions
if is_dir {
extension = "".to_string();
name = extract_name(file_path.file_name());
} else {
extension = extract_name(file_path.extension());
name = extract_name(file_path.file_stem());
}
let materialized_path = match file_path.to_str() {
Some(p) => p
@@ -173,23 +203,24 @@ fn prepare_values(
None => return Err(anyhow!("{}", file_path.to_str().unwrap_or_default())),
};
let cas_id = {
if !metadata.is_dir() {
let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap();
x.truncate(16);
x
} else {
"".to_string()
}
};
// let cas_id = {
// if !metadata.is_dir() {
// // TODO: remove unwrap, skip and make sure to continue loop
// let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap();
// x.truncate(16);
// x
// } else {
// "".to_string()
// }
// };
let date_created: DateTime<Utc> = metadata.created().unwrap().into();
let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true);
// let date_created: DateTime<Utc> = metadata.created().unwrap().into();
// let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true);
let values = format!(
"({}, {}, {}, \"{}\", \"{}\", \"{}\", {},\"{}\", \"{}\")",
"({}, {}, {}, \"{}\", \"{}\", \"{}\", {})",
id,
metadata.is_dir(),
is_dir,
location.id,
materialized_path,
name,
@@ -198,8 +229,8 @@ fn prepare_values(
.clone()
.map(|id| format!("\"{}\"", &id))
.unwrap_or("NULL".to_string()),
parsed_date_created,
cas_id
// parsed_date_created,
// cas_id
);
println!("{}", values);

View File

@@ -29,7 +29,7 @@ pub struct File {
pub has_thumbnail: bool,
pub has_thumbstrip: bool,
pub has_video_preview: bool,
pub encryption: EncryptionAlgorithm,
// pub encryption: EncryptionAlgorithm,
pub ipfs_id: Option<String>,
pub comment: Option<String>,
@@ -58,7 +58,7 @@ pub struct FilePath {
pub extension: Option<String>,
pub file_id: Option<i32>,
pub parent_id: Option<i32>,
pub temp_cas_id: Option<String>,
// pub temp_cas_id: Option<String>,
pub has_local_thumbnail: bool,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
@@ -66,7 +66,6 @@ pub struct FilePath {
pub date_modified: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>,
pub permissions: Option<String>,
}
#[repr(i32)]
@@ -92,7 +91,7 @@ impl Into<File> for file::Data {
integrity_checksum: self.integrity_checksum,
kind: IntEnum::from_int(self.kind).unwrap(),
size_in_bytes: self.size_in_bytes.to_string(),
encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(),
// encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(),
ipfs_id: self.ipfs_id,
hidden: self.hidden,
favorite: self.favorite,
@@ -119,11 +118,11 @@ impl Into<FilePath> for file_path::Data {
parent_id: self.parent_id,
location_id: self.location_id,
date_indexed: self.date_indexed,
permissions: self.permissions,
// permissions: self.permissions,
has_local_thumbnail: false,
name: self.name,
extension: self.extension,
temp_cas_id: self.temp_cas_id,
// temp_cas_id: self.temp_cas_id,
date_created: self.date_created,
date_modified: self.date_modified,
}
@@ -134,7 +133,7 @@ impl Into<FilePath> for file_path::Data {
#[ts(export)]
pub struct DirectoryWithContents {
pub directory: FilePath,
pub contents: Vec<FilePath>,
pub contents: Vec<File>,
}
#[derive(Error, Debug)]

View File

@@ -3,8 +3,8 @@ use super::{
JobError,
};
use crate::{
prisma::{client, job},
state,
node::state,
prisma::{job, node},
sync::{crdt::Replicate, engine::SyncContext},
CoreContext,
};
@@ -134,14 +134,14 @@ impl JobReport {
}
}
pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
let config = state::client::get();
let config = state::get();
ctx
.database
.job()
.create(
job::id::set(self.id.clone()),
job::action::set(1),
job::clients::link(client::id::equals(config.client_id)),
job::nodes::link(node::id::equals(config.node_id)),
vec![],
)
.exec()

View File

@@ -1,9 +1,11 @@
use crate::{file::cas::identifier::FileIdentifierJob, library::loader::get_library_path};
use crate::{
file::cas::identifier::FileIdentifierJob, library::loader::get_library_path,
node::state::NodeState,
};
use job::jobs::{Job, JobReport, Jobs};
use prisma::PrismaClient;
use serde::{Deserialize, Serialize};
use state::client::ClientState;
use std::{fs, path::Path, sync::Arc};
use std::{fs, sync::Arc};
use thiserror::Error;
use tokio::sync::{
mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender},
@@ -14,17 +16,16 @@ use ts_rs::TS;
use crate::encode::thumb::ThumbnailJob;
// init modules
pub mod client;
pub mod crypto;
pub mod db;
pub mod encode;
pub mod file;
pub mod job;
pub mod library;
pub mod node;
#[cfg(target_os = "p2p")]
pub mod p2p;
pub mod prisma;
pub mod state;
pub mod sync;
pub mod sys;
pub mod util;
@@ -101,8 +102,8 @@ impl CoreContext {
}
}
pub struct Core {
state: ClientState,
pub struct Node {
state: NodeState,
jobs: job::jobs::Jobs,
database: Arc<PrismaClient>,
// filetype_registry: library::TypeRegistry,
@@ -126,9 +127,9 @@ pub struct Core {
),
}
impl Core {
// create new instance of core, run startup tasks
pub async fn new(mut data_dir: std::path::PathBuf) -> (Core, mpsc::Receiver<CoreEvent>) {
impl Node {
// create new instance of node, run startup tasks
pub async fn new(mut data_dir: std::path::PathBuf) -> (Node, mpsc::Receiver<CoreEvent>) {
let (event_sender, event_recv) = mpsc::channel(100);
data_dir = data_dir.join("spacedrive");
@@ -136,15 +137,15 @@ impl Core {
// create data directory if it doesn't exist
fs::create_dir_all(&data_dir).unwrap();
// prepare basic client state
let mut state = ClientState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
let mut state = NodeState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
// load from disk
state.read_disk().unwrap_or(println!(
"Error: No client state found, creating new one..."
));
state
.read_disk()
.unwrap_or(println!("Error: No node state found, creating new one..."));
state.save();
println!("Client State: {:?}", state);
println!("Node State: {:?}", state);
// connect to default library
let database = Arc::new(
@@ -155,7 +156,7 @@ impl Core {
let internal_channel = unbounded_channel::<InternalEvent>();
let core = Core {
let node = Node {
state,
query_channel: unbounded_channel(),
command_channel: unbounded_channel(),
@@ -170,7 +171,7 @@ impl Core {
p2p::listener::listen(None).await.unwrap_or(());
});
(core, event_recv)
(node, event_recv)
}
pub fn get_context(&self) -> CoreContext {
@@ -233,10 +234,10 @@ impl Core {
}
}
}
// init client
match client::create(&self).await {
// init node data within library
match node::LibraryNode::create(&self).await {
Ok(_) => println!("Spacedrive online"),
Err(e) => println!("Error initializing client: {:?}", e),
Err(e) => println!("Error initializing node: {:?}", e),
};
}
@@ -246,7 +247,9 @@ impl Core {
Ok(match cmd {
// CRUD for locations
ClientCommand::LocCreate { path } => {
CoreResponse::LocCreate(sys::locations::new_location_and_scan(&ctx, &path).await?)
let loc = sys::locations::new_location_and_scan(&ctx, &path).await?;
ctx.spawn_job(Box::new(FileIdentifierJob));
CoreResponse::LocCreate(loc)
}
ClientCommand::LocUpdate { id: _, name: _ } => todo!(),
ClientCommand::LocDelete { id: _ } => todo!(),
@@ -314,11 +317,11 @@ impl Core {
),
ClientQuery::LibGetTags => todo!(),
ClientQuery::JobGetRunning => CoreResponse::JobGetRunning(self.jobs.get_running().await),
// TODO: FIX THIS
ClientQuery::JobGetHistory => CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?),
ClientQuery::GetLibraryStatistics => {
CoreResponse::GetLibraryStatistics(library::statistics::Statistics::calculate(&ctx).await?)
}
ClientQuery::GetNodes => todo!(),
})
}
}
@@ -370,6 +373,7 @@ pub enum ClientQuery {
limit: i32,
},
GetLibraryStatistics,
GetNodes,
}
// represents an event this library can emit
@@ -395,7 +399,7 @@ pub enum CoreResponse {
SysGetLocation(sys::locations::LocationResource),
SysGetLocations(Vec<sys::locations::LocationResource>),
LibGetExplorerDir(file::DirectoryWithContents),
ClientGetState(ClientState),
ClientGetState(NodeState),
LocCreate(sys::locations::LocationResource),
JobGetRunning(Vec<JobReport>),
JobGetHistory(Vec<JobReport>),

View File

@@ -1,9 +1,9 @@
use anyhow::Result;
use uuid::Uuid;
use crate::state::client::LibraryState;
use crate::{db::migrate, prisma::library, state};
use crate::{Core, CoreContext};
use crate::node::state::LibraryState;
use crate::{db::migrate, node::state, prisma::library};
use crate::{CoreContext, Node};
use super::LibraryError;
@@ -15,8 +15,8 @@ pub fn get_library_path(data_path: &str) -> String {
format!("{}/{}", path, LIBRARY_DB_NAME)
}
pub async fn get(core: &Core) -> Result<library::Data, LibraryError> {
let config = state::client::get();
pub async fn get(core: &Node) -> Result<library::Data, LibraryError> {
let config = state::get();
let db = &core.database;
let library_state = config.get_current_library();
@@ -43,7 +43,7 @@ pub async fn get(core: &Core) -> Result<library::Data, LibraryError> {
}
pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Result<()> {
let mut config = state::client::get();
let mut config = state::get();
println!("Initializing library: {} {}", &library_id, library_path);
@@ -58,7 +58,7 @@ pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Re
}
pub async fn create(ctx: &CoreContext, name: Option<String>) -> Result<()> {
let mut config = state::client::get();
let mut config = state::get();
let uuid = Uuid::new_v4().to_string();

View File

@@ -1,6 +1,6 @@
use crate::{
node::state,
prisma::{library, library_statistics::*},
state::client,
sys::{self, volumes::Volume},
CoreContext,
};
@@ -53,7 +53,7 @@ impl Default for Statistics {
impl Statistics {
pub async fn retrieve(ctx: &CoreContext) -> Result<Statistics, LibraryError> {
let config = client::get();
let config = state::get();
let db = &ctx.database;
let library_data = config.get_current_library();
@@ -70,7 +70,7 @@ impl Statistics {
Ok(library_statistics_db.into())
}
pub async fn calculate(ctx: &CoreContext) -> Result<Statistics, LibraryError> {
let config = client::get();
let config = state::get();
let db = &ctx.database;
// get library from client state
let library_data = config.get_current_library();
@@ -104,10 +104,11 @@ impl Statistics {
// println!("{:?}", volumes);
let mut available_capacity: u64 = 0;
let mut total_capacity: u64 = 0;
if volumes.is_ok() {
for volume in volumes.unwrap() {
println!("{:?}", volume.available_capacity);
available_capacity += volume.available_capacity
total_capacity += volume.total_capacity;
available_capacity += volume.available_capacity;
}
}
@@ -122,7 +123,8 @@ impl Statistics {
let statistics = Statistics {
library_db_size: library_db_size.to_string(),
total_bytes_capacity: available_capacity.to_string(),
total_bytes_free: available_capacity.to_string(),
total_bytes_capacity: total_capacity.to_string(),
preview_media_bytes: thumbnail_folder_size.unwrap_or(0).to_string(),
..Statistics::default()
};

104
core/src/node/mod.rs Normal file
View File

@@ -0,0 +1,104 @@
use crate::{
prisma::{self, node},
CoreContext, Node,
};
use chrono::{DateTime, Utc};
use int_enum::IntEnum;
use serde::{Deserialize, Serialize};
use std::env;
use thiserror::Error;
use ts_rs::TS;
pub mod state;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct LibraryNode {
pub uuid: String,
pub name: String,
pub platform: Platform,
pub tcp_address: String,
#[ts(type = "string")]
pub last_seen: DateTime<Utc>,
#[ts(type = "string")]
pub last_synchronized: DateTime<Utc>,
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum Platform {
Unknown = 0,
Windows = 1,
MacOS = 2,
Linux = 3,
IOS = 4,
Android = 5,
}
impl LibraryNode {
pub async fn create(node: &Node) -> Result<(), NodeError> {
println!("Creating node...");
let mut config = state::get();
let db = &node.database;
let hostname = match hostname::get() {
Ok(hostname) => hostname.to_str().unwrap_or_default().to_owned(),
Err(_) => "unknown".to_owned(),
};
let platform = match env::consts::OS {
"windows" => Platform::Windows,
"macos" => Platform::MacOS,
"linux" => Platform::Linux,
_ => Platform::Unknown,
};
let _node = match db
.node()
.find_unique(node::pub_id::equals(config.node_pub_id.clone()))
.exec()
.await?
{
Some(node) => node,
None => {
db.node()
.create(
node::pub_id::set(config.node_pub_id.clone()),
node::name::set(hostname.clone()),
vec![
node::platform::set(platform as i32),
node::online::set(Some(true)),
],
)
.exec()
.await?
}
};
config.node_name = hostname;
config.node_id = _node.id;
config.save();
println!("node: {:?}", &_node);
Ok(())
}
pub async fn get_nodes(ctx: &CoreContext) -> Result<Vec<node::Data>, NodeError> {
let db = &ctx.database;
let _node = db.node().find_many(vec![]).exec().await?;
Ok(_node)
}
}
#[derive(Error, Debug)]
pub enum NodeError {
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
#[error("Client not found error")]
ClientNotFound,
}

View File

@@ -9,23 +9,21 @@ use uuid::Uuid;
#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
#[ts(export)]
pub struct ClientState {
// client id is a uniquely generated UUID
pub client_uuid: String,
pub client_id: i32,
// client_name is the name of the device running the client
pub client_name: String,
pub struct NodeState {
pub node_pub_id: String,
pub node_id: i32,
pub node_name: String,
// config path is stored as struct can exist only in memory during startup and be written to disk later without supplying path
pub data_path: String,
// the port this client uses to listen for incoming connections
// the port this node uses to listen for incoming connections
pub tcp_port: u32,
// all the libraries loaded by this client
// all the libraries loaded by this node
pub libraries: Vec<LibraryState>,
// used to quickly find the default library
pub current_library_uuid: String,
}
pub static CLIENT_STATE_CONFIG_NAME: &str = "client_state.json";
pub static NODE_STATE_CONFIG_NAME: &str = "node_state.json";
#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
#[ts(export)]
@@ -36,26 +34,26 @@ pub struct LibraryState {
pub offline: bool,
}
// global, thread-safe storage for client state
// global, thread-safe storage for node state
lazy_static! {
static ref CONFIG: RwLock<Option<ClientState>> = RwLock::new(None);
static ref CONFIG: RwLock<Option<NodeState>> = RwLock::new(None);
}
pub fn get() -> ClientState {
pub fn get() -> NodeState {
match CONFIG.read() {
Ok(guard) => guard.clone().unwrap_or(ClientState::default()),
Err(_) => return ClientState::default(),
Ok(guard) => guard.clone().unwrap_or(NodeState::default()),
Err(_) => return NodeState::default(),
}
}
impl ClientState {
pub fn new(data_path: &str, client_name: &str) -> Result<Self> {
impl NodeState {
pub fn new(data_path: &str, node_name: &str) -> Result<Self> {
let uuid = Uuid::new_v4().to_string();
// create struct and assign defaults
let config = Self {
client_uuid: uuid,
node_pub_id: uuid,
data_path: data_path.to_string(),
client_name: client_name.to_string(),
node_name: node_name.to_string(),
..Default::default()
};
Ok(config)
@@ -65,7 +63,7 @@ impl ClientState {
self.write_memory();
// only write to disk if config path is set
if !&self.data_path.is_empty() {
let config_path = format!("{}/{}", &self.data_path, CLIENT_STATE_CONFIG_NAME);
let config_path = format!("{}/{}", &self.data_path, NODE_STATE_CONFIG_NAME);
let mut file = fs::File::create(config_path).unwrap();
let json = serde_json::to_string(&self).unwrap();
file.write_all(json.as_bytes()).unwrap();
@@ -73,7 +71,7 @@ impl ClientState {
}
pub fn read_disk(&mut self) -> Result<()> {
let config_path = format!("{}/{}", &self.data_path, CLIENT_STATE_CONFIG_NAME);
let config_path = format!("{}/{}", &self.data_path, NODE_STATE_CONFIG_NAME);
// open the file and parse json
let file = fs::File::open(config_path)?;
let reader = BufReader::new(file);

View File

@@ -1 +0,0 @@
pub mod client;

View File

@@ -1,5 +1,5 @@
use crate::{
file::indexer::IndexerJob, prisma::location, state::client, ClientQuery, CoreContext, CoreEvent,
file::indexer::IndexerJob, node::state, prisma::location, ClientQuery, CoreContext, CoreEvent,
};
use anyhow::Result;
use serde::{Deserialize, Serialize};
@@ -109,13 +109,33 @@ pub async fn get_locations(ctx: &CoreContext) -> Result<Vec<LocationResource>, S
pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationResource, SysError> {
let db = &ctx.database;
let config = client::get();
let config = state::get();
// check if we have access to this location
match fs::File::open(&path) {
Ok(_) => println!("Path is valid, creating location for '{}'", &path),
Err(e) => Err(LocationError::FileReadError(e))?,
if !Path::new(path).exists() {
Err(LocationError::NotFound(path.to_string()))?;
}
// if on windows
if cfg!(target_family = "windows") {
// try and create a dummy file to see if we can write to this location
match fs::File::create(format!("{}/{}", path.clone(), ".spacewrite")) {
Ok(file) => file,
Err(e) => Err(LocationError::DotfileWriteFailure(e, path.to_string()))?,
};
match fs::remove_file(format!("{}/{}", path.clone(), ".spacewrite")) {
Ok(_) => (),
Err(e) => Err(LocationError::DotfileWriteFailure(e, path.to_string()))?,
}
} else {
// unix allows us to test this more directly
match fs::File::open(&path) {
Ok(_) => println!("Path is valid, creating location for '{}'", &path),
Err(e) => Err(LocationError::FileReadError(e))?,
}
}
// check if location already exists
let location = match db
.location()

View File

@@ -1,5 +1,5 @@
// use crate::native;
use crate::{prisma::volume::*, state::client};
use crate::{node::state, prisma::volume::*};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
// #[cfg(not(target_os = "macos"))]
@@ -28,24 +28,35 @@ pub struct Volume {
impl Volume {
pub async fn save(ctx: &CoreContext) -> Result<(), SysError> {
let db = &ctx.database;
let config = client::get();
let config = state::get();
let volumes = Self::get_volumes()?;
// enter all volumes associate with this client add to db
for volume in volumes {
db.volume()
.upsert(node_id_mount_point_name(
config.node_id.clone(),
volume.mount_point.to_string(),
volume.name.to_string(),
))
.create(
client_id::set(config.client_id),
node_id::set(config.node_id),
name::set(volume.name),
mount_point::set(volume.mount_point),
vec![
disk_type::set(volume.disk_type),
filesystem::set(volume.file_system),
disk_type::set(volume.disk_type.clone()),
filesystem::set(volume.file_system.clone()),
total_bytes_capacity::set(volume.total_capacity.to_string()),
total_bytes_available::set(volume.available_capacity.to_string()),
],
)
.update(vec![
disk_type::set(volume.disk_type),
filesystem::set(volume.file_system),
total_bytes_capacity::set(volume.total_capacity.to_string()),
total_bytes_available::set(volume.available_capacity.to_string()),
])
.exec()
.await?;
}

View File

@@ -2,7 +2,7 @@
Synchronizing data between clients in a Spacedrive network is accomplished using various forms of [CRDTs](https://en.wikipedia.org/wiki/Conflict-free_replicated_data_type) combined with a hybrid logical clock, ensuring eventual constancy.
Designed for synchronizing data in realtime between a [SQLite](https://www.sqlite.org/) databases potentially in the gigabytes.
Designed for synchronizing data in realtime between [SQLite](https://www.sqlite.org/) databases potentially in the gigabytes.
```rust
mod sync {
@@ -167,7 +167,7 @@ For the sake of compatibility with local relations, some resource properties can
In some cases we are able to create many shared data resources at once and resolve conflicts on the fly by merging where the oldest resource takes priority.
This is intended for the `files` resource. It requires Shared data behaviour as most other shared resources are related at a database level and user defined metadata can be assigned, however it is initially derived from `file_paths` which is Owned data.
This is intended for the `files` resource. It requires Shared data behavior as most other shared resources are related at a database level and user defined metadata can be assigned, however it is initially derived from `file_paths` which is Owned data.
As `files` are created in abundance (hundreds of thousands at a time), it would be inefficient to record these changes in the `pending_operations` table. But we are also unable to sync in the same way as Owned data due to the possibility of conflicts.
@@ -226,7 +226,7 @@ Then inside the `sync` function we send the event to the
}
```
Files also impempent `OperationalMerge` would use
Files also implement `OperationalMerge` would use
# Resources

View File

@@ -34,11 +34,11 @@ struct File {
}
```
- `partial_checksum ` - A SHA256 checksum generated from 5 samples of 10,000 bytes throughout the file data, including the begining and end + total byte count. This is used to identify a file as _likely_ unique in under 100µs.
- `partial_checksum ` - A SHA256 checksum generated from 5 samples of 10,000 bytes throughout the file data, including the beginning and end + total byte count. This is used to identify a file as _likely_ unique in under 100µs.
> ~~It is impossible to have a unique constraint at a database level for the `partial_checksum` however we can asyncronously resolve conflicts by querying for duplicates and generating full checksums at a later date.~~
> ~~It is impossible to have a unique constraint at a database level for the `partial_checksum` however we can asynchronously resolve conflicts by querying for duplicates and generating full checksums at a later date.~~
>
> For synchronization of this resource we can tolerate temporary duplicates, any client can calculate that two files resources are duplicate and merge them into a single resource. In turn, triggering a shared data merge operation, whereby the older record is prioritsed at a property level during the merge.
> For synchronization of this resource we can tolerate temporary duplicates, any client can calculate that two files resources are duplicate and merge them into a single resource. In turn, triggering a shared data merge operation, whereby the older record is prioritised at a property level during the merge.
- `checksum` - A full SHA256 checksum of the file data used to verify uniqueness should a `partial_checksum` conflict occur.

18
docs/developer/.todo Normal file
View File

@@ -0,0 +1,18 @@
# Todo
☐ Landing sections
☐ Client pool
✔ Custom scrollbars
☐ Tag files
☐ Right click menu
☐ File explorer grid view
☐ Albums
☐ Content types screen
☐ Show duplicate files on the inspector
☐ Explorer filter by tag
☐ Library statistics
☐ File viewer
☐ Open file
☐ Media data (started)

View File

@@ -1,16 +0,0 @@
# Todo
- Landing sections
- Client pool
- Custom scrollbars
- Tag files
- Right click menu
- File explorer grid view
- Albums
- Content types screen
- Show duplicate files on the inspector
- Explorer filter by tag
- Library statistics
- File viewer
- Open file
- Media data (started)

View File

@@ -1,12 +1,20 @@
## Spacedrive
_&copy; Copyright 2022-Present Jamie Pine_
### Business contact
hello@jamiepine.com
### Developers
Jamie Pine, Brendonovich, Oscar Beaumont
### Contributors
Haden Fletcher
<a href="https://github.com/spacedriveapp/spacedrive/graphs/contributors">
<img
src="https://contrib.rocks/image?repo=spacedriveapp/spacedrive&columns=8&max=40"
alt="Avatars of the top contributors the the Spacedrive repository. Follow link for names and data."
/>
</a>

View File

@@ -3,9 +3,9 @@
**Complete (Still Testing):**
- **File discovery** - Scan devices, drives and cloud accounts to build a directory of all files with metadata.
- **Preview generation** - Auto generate lower resolution stand-ins for image and video.
- **Statistics** - Total capacity, index size, preview media size, free space etc.
**In progress:**
- **Statistics** - Total capacity, index size, preview media size, free space etc.
- **File Explorer** - Browse online/offline storage locations, view files with metadata, perform basic CRUD.
- **Realtime synchronization** - Data index synchronized in realtime between devices, prioritizing peer-to-peer LAN connections (WiFi sync).
- **Self hosted** - Spacedrive can be deployed as a service, behaving as just another device powering your personal cloud.
@@ -14,7 +14,7 @@
- **Photos** - Photo and video albums similar to Apple/Google photos.
- **Search** - Deep search into your filesystem with a keybind, including offline locations.
- **Tags** - Define routines on custom tags to automate workflows, easily tag files individually, in bulk and automatically via rules.
- **Extensions** - Build tools on top of Spacedrive, extend functionality and integrate third party services. Extension directory on [spacedrive.app/extensions.
- **Extensions** - Build tools on top of Spacedrive, extend functionality and integrate third party services. Extension directory on [spacedrive.com/extensions](/extensions).
**To be developed (Post-MVP):**
- **Spacedrive Cloud** - We'll host an always-on cloud device for you, with pay-as-you-go plans for storage.

View File

@@ -3,15 +3,14 @@
"version": "0.0.0",
"private": true,
"scripts": {
"prep": "pnpm db:gen && pnpm core codegen",
"prep:ci": "pnpm db:gen && pnpm core codegen",
"prep": "pnpm db:gen",
"build": "turbo run build",
"landing-web": "turbo run dev --parallel --filter=@sd/landing --filter=@sd/web",
"db:migrate": "pnpm core prisma migrate dev",
"db:gen": "pnpm core prisma generate",
"lint": "turbo run lint",
"format": "prettier --write \"**/*.{ts,tsx,md}\"",
"desktop": "pnpm --filter @sd/desktop -- ",
"desktop": "pnpm --filter @sd/desktop --",
"mobile": "pnpm --filter @sd/mobile -- ",
"web": "pnpm --filter @sd/web -- ",
"landing": "pnpm --filter @sd/landing -- ",
@@ -24,31 +23,18 @@
},
"devDependencies": {
"prettier": "^2.6.2",
"turbo": "^1.2.1"
"turbo": "^1.2.4"
},
"turbo": {
"pipeline": {
"build": {
"dependsOn": [
"^build"
],
"outputs": [
"dist/**",
".next/**"
]
},
"lint": {
"outputs": []
},
"dev": {
"cache": false
}
}
"overrides": {
"vite-plugin-svgr": "https://github.com/spacedriveapp/vite-plugin-svgr#cb4195b69849429cdb18d1f12381676bf9196a84"
},
"engines": {
"pnpm": ">=6.0.0",
"npm": "pnpm",
"yarn": "pnpm",
"node": ">=14.0.0"
},
"dependencies": {
"zustand": "^3.7.2"
}
}

View File

@@ -1 +0,0 @@
module.exports = require('scripts/eslint-preset');

View File

@@ -14,11 +14,10 @@
"clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist"
},
"devDependencies": {
"@types/react": "^18.0.8",
"scripts": "*",
"tsconfig": "*",
"typescript": "^4.6.3",
"react": "^18.0.0",
"react-query": "^3.34.19"
"typescript": "^4.6.3"
},
"jest": {
"preset": "scripts/jest/node"
@@ -28,6 +27,7 @@
"@sd/core": "workspace:*",
"eventemitter3": "^4.0.7",
"immer": "^9.0.12",
"react-query": "^3.34.19",
"zustand": "^3.7.2"
},
"peerDependencies": {

View File

@@ -1,11 +1,19 @@
import React from 'react';
import { QueryClientProvider } from 'react-query';
import { QueryClientProvider, QueryClientProviderProps } from 'react-query';
export interface ClientProviderProps extends Omit<QueryClientProviderProps, 'client'> {
children?: React.ReactNode;
}
// The ClientProvider injects the React-query context into the "context store" of the current package. This is needed due to the fact the repository is a monorepo.
// This is a pretty hacky solution and a better solution should probably be found to replace it.
export function ClientProvider({ children }: any) {
export const ClientProvider: React.FC<ClientProviderProps> = ({ children, ...props }) => {
return (
// @ts-ignore: This exists to add the QueryClientProvider to the current subpackage '@sd/client'. The ReactQueryClient is fetched from the window object (which is set in the parent application).
<QueryClientProvider client={window.ReactQueryClient}>{children}</QueryClientProvider>
// This exists to add the QueryClientProvider to the current subpackage '@sd/client'.
// The ReactQueryClient is fetched from the window object (which is set in the parent application).
// @ts-expect-error: react-query depends on the react@^17 FC type which includes children -- once v4.0 is released this will not be necessary
<QueryClientProvider {...props} client={window.ReactQueryClient}>
{children}
</QueryClientProvider>
);
}
};

7
packages/client/src/window.d.ts vendored Normal file
View File

@@ -0,0 +1,7 @@
import type { QueryClient } from 'react-query/types';
declare global {
interface Window {
ReactQueryClient: QueryClient;
}
}

View File

@@ -1,15 +1,6 @@
{
"compilerOptions": {
"lib": ["esnext"],
"module": "esnext",
"outDir": "./dist",
"rootDir": "src",
"jsx": "react"
},
"paths": {
"@sd/core": ["../../core/index.ts"],
},
"extends": "../config/tsconfig/base.json",
"include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules", "dist"]
"extends": "../config/base.tsconfig.json",
"compilerOptions": {},
"include": ["src"],
"exclude": ["node_modules"]
}

View File

@@ -0,0 +1,30 @@
{
"$schema": "https://json.schemastore.org/tsconfig",
"display": "Default",
"compilerOptions": {
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"declaration": false,
"noEmit": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"inlineSources": false,
"isolatedModules": false,
"module": "ESNext",
"target": "ES6",
"moduleResolution": "node",
"noUnusedLocals": false,
"noUnusedParameters": false,
"preserveWatchOutput": true,
"skipLibCheck": false,
"strict": true,
"allowSyntheticDefaultImports": true,
"resolveJsonModule": true,
"jsx": "react",
"paths": {
"@sd/interface": ["../../packages/interface"],
"@sd/ui": ["../../packages/ui"],
"@sd/client": ["../../packages/client"]
}
},
"exclude": ["node_modules"]
}

View File

@@ -0,0 +1,6 @@
{
"extends": "./base.tsconfig.json",
"compilerOptions": {
"types": ["vite-plugin-svgr/client"]
}
}

View File

@@ -1,20 +0,0 @@
{
"$schema": "https://json.schemastore.org/tsconfig",
"display": "Default",
"compilerOptions": {
"composite": false,
"declaration": true,
"declarationMap": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"inlineSources": false,
"isolatedModules": true,
"moduleResolution": "node",
"noUnusedLocals": false,
"noUnusedParameters": false,
"preserveWatchOutput": true,
"skipLibCheck": true,
"strict": true
},
"exclude": ["node_modules"]
}

View File

@@ -1,22 +0,0 @@
{
"$schema": "https://json.schemastore.org/tsconfig",
"display": "Next.js",
"extends": "./base.json",
"compilerOptions": {
"target": "es5",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": false,
"forceConsistentCasingInFileNames": true,
"noEmit": true,
"incremental": true,
"esModuleInterop": true,
"module": "esnext",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve"
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"]
}

View File

@@ -1,11 +0,0 @@
{
"$schema": "https://json.schemastore.org/tsconfig",
"display": "React Library",
"extends": "./base.json",
"compilerOptions": {
"lib": ["ES2015"],
"module": "ESNext",
"target": "ES6",
"jsx": "react-jsx"
}
}

View File

@@ -4,12 +4,13 @@
"license": "MIT",
"private": true,
"main": "src/index.ts",
"exports": {
".": "./src/index.ts",
"./types": "./src/types"
},
"scripts": {
"icons": "ts-node ./scripts/generateSvgImports.mjs"
},
"resolutions": {
"react-virtualized": "patch:react-virtualized@9.22.3#./path/to/react-virtualized-9.22.3.patch"
},
"dependencies": {
"@apollo/client": "^3.5.10",
"@fontsource/inter": "^4.5.7",
@@ -33,6 +34,7 @@
"phosphor-react": "^1.4.1",
"pretty-bytes": "^6.0.0",
"react": "^18.0.0",
"react-countup": "^6.2.0",
"react-dom": "^18.0.0",
"react-dropzone": "^12.0.4",
"react-error-boundary": "^3.1.4",
@@ -55,14 +57,13 @@
"@trivago/prettier-plugin-sort-imports": "^3.2.0",
"@types/babel-core": "^6.25.7",
"@types/byte-size": "^8.1.0",
"@types/lodash": "^4.14.182",
"@types/node": "^17.0.23",
"@types/pretty-bytes": "^5.2.0",
"@types/react": "^18.0.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@types/react-router-dom": "^5.3.3",
"@types/react-table": "^7.7.10",
"@types/react-virtualized": "^9.21.21",
"@types/react-virtualized-auto-sizer": "^1.0.1",
"@types/react-window": "^1.8.5",
"@types/tailwindcss": "^3.0.10",
"@vitejs/plugin-react": "^1.3.1",

View File

@@ -1,7 +1,16 @@
import '@fontsource/inter/variable.css';
import { BaseTransport, ClientProvider, setTransport } from '@sd/client';
// global window type extensions
// only load at TS compile time
import type {} from '@sd/client/src/window';
import { Button } from '@sd/ui';
import clsx from 'clsx';
import React, { useContext, useEffect, useState } from 'react';
import { ErrorBoundary, FallbackProps } from 'react-error-boundary';
import { QueryClient, QueryClientProvider } from 'react-query';
import {
MemoryRouter,
Location,
MemoryRouter,
Outlet,
Route,
Routes,
@@ -9,29 +18,22 @@ import {
useNavigate
} from 'react-router-dom';
import { Sidebar } from './components/file/Sidebar';
import { SettingsScreen } from './screens/Settings';
import { ExplorerScreen } from './screens/Explorer';
import { useCoreEvents } from './hooks/useCoreEvents';
import { ErrorBoundary, FallbackProps } from 'react-error-boundary';
import { OverviewScreen } from './screens/Overview';
import { DebugScreen } from './screens/Debug';
import { Modal } from './components/layout/Modal';
import GeneralSettings from './screens/settings/GeneralSettings';
import SlideUp from './components/transitions/SlideUp';
import SecuritySettings from './screens/settings/SecuritySettings';
import LocationSettings from './screens/settings/LocationSettings';
import { RedirectPage } from './screens/Redirect';
import { QueryClient, QueryClientProvider } from 'react-query';
import { BaseTransport, ClientProvider, setTransport } from '@sd/client';
import { Button } from '@sd/ui';
import { CoreEvent } from '@sd/core';
import clsx from 'clsx';
import './style.scss';
import { useCoreEvents } from './hooks/useCoreEvents';
import { ContentScreen } from './screens/Content';
import { DebugScreen } from './screens/Debug';
import { ExplorerScreen } from './screens/Explorer';
import { OverviewScreen } from './screens/Overview';
import { RedirectPage } from './screens/Redirect';
import { SettingsScreen } from './screens/Settings';
import ExperimentalSettings from './screens/settings/ExperimentalSettings';
import GeneralSettings from './screens/settings/GeneralSettings';
import LibrarySettings from './screens/settings/LibrarySettings';
import '@fontsource/inter/variable.css';
import LocationSettings from './screens/settings/LocationSettings';
import SecuritySettings from './screens/settings/SecuritySettings';
import { TagScreen } from './screens/Tag';
import './style.scss';
const queryClient = new QueryClient();
@@ -47,7 +49,10 @@ export interface AppProps {
onClose?: () => void;
onMinimize?: () => void;
onFullscreen?: () => void;
onOpen?: (path: string) => void;
isFocused?: boolean;
useMemoryRouter: boolean;
demoMode?: boolean;
}
function AppLayout() {
@@ -96,6 +101,7 @@ function SettingsRoutes({ modal = false }) {
<Route path="general" element={<GeneralSettings />} />
<Route path="security" element={<SecuritySettings />} />
<Route path="appearance" element={<></>} />
<Route path="experimental" element={<ExperimentalSettings />} />
<Route path="locations" element={<LocationSettings />} />
<Route path="library" element={<LibrarySettings />} />
<Route path="media" element={<></>} />
@@ -198,11 +204,10 @@ function BrowserRouterContainer() {
export function bindCoreEvent() {}
export default function App(props: AppProps) {
// @ts-ignore: TODO: This is a hack and a better solution should probably be found. This exists so that the queryClient can be accessed within the subpackage '@sd/client'. Refer to <ClientProvider /> for where this is used.
if (window.ReactQueryClient === undefined) {
// @ts-ignore
window.ReactQueryClient = queryClient;
}
// TODO: This is a hack and a better solution should probably be found.
// This exists so that the queryClient can be accessed within the subpackage '@sd/client'.
// Refer to <ClientProvider /> for where this is used.
window.ReactQueryClient ??= queryClient;
setTransport(props.transport);
@@ -214,7 +219,7 @@ export default function App(props: AppProps) {
<ErrorBoundary FallbackComponent={ErrorFallback} onReset={() => {}}>
{/* @ts-ignore */}
<QueryClientProvider client={queryClient} contextSharing={false}>
<AppPropsContext.Provider value={props}>
<AppPropsContext.Provider value={Object.assign({ isFocused: true }, props)}>
<ClientProvider>
{props.useMemoryRouter ? <MemoryRouterContainer /> : <BrowserRouterContainer />}
</ClientProvider>

View File

@@ -1,3 +1,3 @@
<svg width="87" height="20" viewBox="0 0 87 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="87" height="20" fill="#985712"/>
<svg width="87" height="16" viewBox="0 0 87 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M87 0H0V16H87V0Z" fill="#985712"/>
</svg>

Before

Width:  |  Height:  |  Size: 149 B

After

Width:  |  Height:  |  Size: 147 B

View File

@@ -21,6 +21,7 @@ export interface DeviceProps {
type: 'laptop' | 'desktop' | 'phone' | 'server';
locations: { name: string }[];
runningJob?: { amount: number; task: string };
removeThisSoon?: boolean;
}
export function Device(props: DeviceProps) {
@@ -38,7 +39,7 @@ export function Device(props: DeviceProps) {
{props.type === 'laptop' && <Laptop weight="fill" size={20} className="mr-2" />}
{props.type === 'desktop' && <Desktop weight="fill" size={20} className="mr-2" />}
{props.type === 'server' && <Cloud weight="fill" size={20} className="mr-2" />}
<h3 className="font-semibold text-md">{props.name}</h3>
<h3 className="font-semibold text-md">{props.name || 'Unnamed Device'}</h3>
<div className="flex flex-row space-x-1.5 mt-0.5">
<span className="font-semibold flex flex-row h-[19px] -mt-0.5 ml-3 py-0.5 px-1.5 text-[10px] rounded bg-gray-500 text-gray-400">
<LockClosedIcon className="w-3 h-3 mr-1 -ml-0.5 m-[1px]" />
@@ -87,6 +88,24 @@ export function Device(props: DeviceProps) {
folder
/>
))}
{props.removeThisSoon && (
<>
<FileItem
selected={selectedFile == 'tsx'}
onClick={() => handleSelect('tsx')}
fileName="App.tsx"
format="tsx"
iconName="reactts"
/>
<FileItem
selected={selectedFile == 'vite'}
onClick={() => handleSelect('vite')}
fileName="vite.config.js"
format="vite"
iconName="vite"
/>
</>
)}
</div>
</div>
);

View File

@@ -0,0 +1,5 @@
import create from 'zustand';
export const useStore = create((set) => ({
experimental: false
}));

Some files were not shown because too many files have changed in this diff Show More