SilverBullet pivot to become an offline-first PWA (#403)
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
||||
.git
|
79
.github/workflows/desktop.yml
vendored
@ -1,79 +0,0 @@
|
||||
name: Build & Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
jobs:
|
||||
build:
|
||||
name: Build (${{ matrix.os }} - ${{ matrix.arch }})
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: macOS-latest
|
||||
arch: arm64
|
||||
- os: macOS-latest
|
||||
arch: x64
|
||||
- os: windows-latest
|
||||
arch: x64
|
||||
- os: ubuntu-latest
|
||||
arch: x64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3.5.1
|
||||
with:
|
||||
node-version: 18.x
|
||||
cache: npm
|
||||
cache-dependency-path: desktop/package-lock.json
|
||||
- name: Setup Deno
|
||||
# uses: denoland/setup-deno@v1
|
||||
uses: denoland/setup-deno@d4873ceeec10de6275fecd1f94b6985369d40231
|
||||
with:
|
||||
deno-version: v1.32.5
|
||||
- name: Build SilverBullet
|
||||
run: deno task build
|
||||
- name: Create SilverBullet bundle
|
||||
run: deno task bundle
|
||||
- name: Set MacOS signing certs
|
||||
if: matrix.os == 'macOS-latest'
|
||||
run: chmod +x scripts/add-macos-cert.sh && ./scripts/add-macos-cert.sh
|
||||
env:
|
||||
MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }}
|
||||
MACOS_CERT_PASSWORD: ${{ secrets.MACOS_CERT_PASSWORD }}
|
||||
# - name: Set Windows signing certificate
|
||||
# if: matrix.os == 'windows-latest'
|
||||
# continue-on-error: true
|
||||
# id: write_file
|
||||
# uses: timheuer/base64-to-file@v1
|
||||
# with:
|
||||
# fileName: 'win-certificate.pfx'
|
||||
# encodedString: ${{ secrets.WINDOWS_CODESIGN_P12 }}
|
||||
- name: Install npm dependencies
|
||||
run: npm install
|
||||
working-directory: desktop
|
||||
- name: Build application
|
||||
run: npm run make -- --arch=${{ matrix.arch }}
|
||||
working-directory: desktop
|
||||
env:
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
#WINDOWS_CODESIGN_FILE: ${{ steps.write_file.outputs.filePath }}
|
||||
#WINDOWS_CODESIGN_PASSWORD: ${{ secrets.WINDOWS_CODESIGN_PASSWORD }}
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
draft: true
|
||||
files: |
|
||||
desktop/out/**/*.deb
|
||||
desktop/out/**/*Setup.exe
|
||||
desktop/out/**/RELEASES
|
||||
desktop/out/**/*.nupkg
|
||||
desktop/out/**/*.rpm
|
||||
desktop/out/**/*.zip
|
||||
dist/silverbullet.js
|
89
.github/workflows/docker-s3.yml
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
name: Docker S3
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
tags:
|
||||
- "*"
|
||||
env:
|
||||
DENO_VERSION: v1.33
|
||||
# Docker & Registries
|
||||
ARCHITECTURES: linux/amd64,linux/arm64
|
||||
IMAGE_NAME: silverbullet-s3
|
||||
NAMESPACE_GITHUB: silverbulletmd
|
||||
NAMESPACE_DOCKER: zefhemel
|
||||
jobs:
|
||||
docker-build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Setup repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU for multi-arch builds with buildx
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: ${{ env.ARCHITECTURES }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: ${{ env.ARCHITECTURES }}
|
||||
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@d4873ceeec10de6275fecd1f94b6985369d40231
|
||||
with:
|
||||
deno-version: ${{ env.DENO_VERSION }}
|
||||
|
||||
- name: Run bundle build
|
||||
run: |
|
||||
deno task build
|
||||
deno task bundle
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to the ghcr Container registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
#
|
||||
# MetaData Extract Docu: <https://github.com/docker/metadata-action>
|
||||
#
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4.4.0
|
||||
with:
|
||||
images: |
|
||||
# Set the different image names(paces) for docker-hub & ghcr
|
||||
${{ env.NAMESPACE_DOCKER }}/${{ env.IMAGE_NAME }}
|
||||
ghcr.io/${{ env.NAMESPACE_GITHUB }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
# <https://github.com/docker/metadata-action#typeref>
|
||||
# minimal (short sha), enable f desired
|
||||
# type=sha,enable=true,priority=100,prefix=commit-,suffix=,format=short
|
||||
# set latest tag for default branch
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
#
|
||||
# tag w/ full tag part of git tag: <https://github.com/docker/metadata-action#typesemver>
|
||||
# only present for `on.push.tags` !
|
||||
type=semver,pattern={{raw}},enable=true
|
||||
# type=edge,branch=local # usually this would be the develop branch
|
||||
|
||||
- name: Build and push main docker images
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ env.ARCHITECTURES }}
|
||||
push: true
|
||||
file: Dockerfile.s3
|
||||
# Disable to get rid of unknown architecture in ghcr
|
||||
provenance: false
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
9
.github/workflows/docker.yml
vendored
@ -5,16 +5,16 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
tags:
|
||||
- "**"
|
||||
- "*"
|
||||
env:
|
||||
DENO_VERSION: v1.32.5
|
||||
DENO_VERSION: v1.33
|
||||
# Docker & Registries
|
||||
ARCHITECTURES: linux/amd64,linux/arm64
|
||||
IMAGE_NAME: silverbullet
|
||||
NAMESPACE_GITHUB: silverbulletmd
|
||||
NAMESPACE_DOCKER: zefhemel
|
||||
jobs:
|
||||
docker-build-push:
|
||||
docker-main:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@ -76,12 +76,13 @@ jobs:
|
||||
type=semver,pattern={{raw}},enable=true
|
||||
# type=edge,branch=develop # usually this would be the develop branch
|
||||
|
||||
- name: Build and push Docker images
|
||||
- name: Build and push main docker images
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ env.ARCHITECTURES }}
|
||||
push: true
|
||||
file: Dockerfile
|
||||
# Disable to get rid of unknown architecture in ghcr
|
||||
provenance: false
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
29
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
name: Build & Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup repo
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.33
|
||||
- name: Run build
|
||||
run: deno task build
|
||||
- name: Bundle
|
||||
run: deno task bundle
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
draft: true
|
||||
files: |
|
||||
website/CHANGELOG.md
|
||||
dist/silverbullet.js
|
7
.github/workflows/test.yml
vendored
@ -18,10 +18,9 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Deno
|
||||
# uses: denoland/setup-deno@v1
|
||||
uses: denoland/setup-deno@d4873ceeec10de6275fecd1f94b6985369d40231
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.32.5
|
||||
deno-version: v1.33
|
||||
|
||||
- name: Run build
|
||||
run: deno task build
|
||||
@ -30,4 +29,4 @@ jobs:
|
||||
run: deno task check
|
||||
|
||||
- name: Run tests
|
||||
run: deno task test
|
||||
run: deno task test --trace-ops
|
||||
|
10
.gitignore
vendored
@ -1,13 +1,11 @@
|
||||
pages
|
||||
test_space
|
||||
.DS_Store
|
||||
dist_bundle
|
||||
dist_client_bundle
|
||||
dist_plug_bundle
|
||||
dist
|
||||
*.js.map
|
||||
website_build
|
||||
data.db*
|
||||
publish-data.db
|
||||
/index.json
|
||||
.idea
|
||||
deno.lock
|
||||
node_modules
|
||||
fly.toml
|
||||
env.sh
|
@ -3,7 +3,7 @@ FROM lukechannings/deno:v1.33.2
|
||||
# Create a volume first:
|
||||
# docker volume create myspace
|
||||
# Then bind-mount it when running the container with the -v flag, e.g.:
|
||||
# docker run -v myspace:/space -it zefhemel/silverbullet
|
||||
# docker run -v myspace:/space -p3000:3000 -it zefhemel/silverbullet
|
||||
VOLUME /space
|
||||
|
||||
# Accept TARGETARCH as argument
|
||||
@ -44,4 +44,4 @@ EXPOSE 3000
|
||||
|
||||
# Run the server, allowing to pass in additional argument at run time, e.g.
|
||||
# docker run -p 3002:3000 -v myspace:/space -it zefhemel/silverbullet --user me:letmein
|
||||
ENTRYPOINT ["/tini", "--", "deno", "run", "-A", "--unstable", "/silverbullet.js", "--hostname", "0.0.0.0", "/space"]
|
||||
ENTRYPOINT /tini -- deno run -A /silverbullet.js -L0.0.0.0 /space
|
||||
|
24
Dockerfile.s3
Normal file
@ -0,0 +1,24 @@
|
||||
# This Dockerfile is used to build a Docker image that runs silverbullet with an S3 bucket as a backend
|
||||
# Configure it with the following environment variables
|
||||
|
||||
# AWS_ACCESS_KEY_ID=XXXX
|
||||
# AWS_SECRET_ACCESS_KEY=XXXX
|
||||
# AWS_ENDPOINT=s3.eu-central-1.amazonaws.com
|
||||
# AWS_REGION=eu-central-1
|
||||
# AWS_BUCKET=my-sb-bucket
|
||||
|
||||
FROM denoland/deno:alpine-1.33.2
|
||||
|
||||
# Copy the bundled version of silverbullet into the container
|
||||
ADD ./dist/silverbullet.js /silverbullet.js
|
||||
|
||||
# deno user id is 1000 in alpine image
|
||||
USER deno
|
||||
|
||||
# Expose port 3000
|
||||
# Port map this when running, e.g. with -p 3002:3000 (where 3002 is the host port)
|
||||
EXPOSE 3000
|
||||
|
||||
# Run the server, allowing to pass in additional argument at run time, e.g.
|
||||
# docker run -p 3002:3000 -v myspace:/space -it zefhemel/silverbullet --user me:letmein
|
||||
ENTRYPOINT deno run -A /silverbullet.js -L 0.0.0.0 s3://
|
35
build_bundle.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { denoPlugins, esbuild } from "./plugos/deps.ts";
|
||||
|
||||
await Deno.mkdir("dist", { recursive: true });
|
||||
await esbuild.build({
|
||||
entryPoints: {
|
||||
silverbullet: "silverbullet.ts",
|
||||
},
|
||||
outdir: "dist",
|
||||
format: "esm",
|
||||
absWorkingDir: Deno.cwd(),
|
||||
bundle: true,
|
||||
treeShaking: true,
|
||||
sourcemap: false,
|
||||
minify: false,
|
||||
plugins: [
|
||||
{
|
||||
name: "json",
|
||||
setup: (build) =>
|
||||
build.onLoad({ filter: /\.json$/ }, () => ({ loader: "json" })),
|
||||
},
|
||||
|
||||
...denoPlugins({
|
||||
importMapURL: new URL("./import_map.json", import.meta.url)
|
||||
.toString(),
|
||||
}),
|
||||
],
|
||||
});
|
||||
const bundleJs = await Deno.readTextFile("dist/silverbullet.js");
|
||||
// Patch output JS with import.meta.main override to avoid ESBuild CLI handling
|
||||
await Deno.writeTextFile(
|
||||
"dist/silverbullet.js",
|
||||
"import.meta.main = false;\n" + bundleJs,
|
||||
);
|
||||
console.log("Output in dist/silverbullet.js");
|
||||
esbuild.stop();
|
@ -1,17 +0,0 @@
|
||||
import { bundle, esbuild } from "./build_web.ts";
|
||||
import * as flags from "https://deno.land/std@0.165.0/flags/mod.ts";
|
||||
import { copy } from "https://deno.land/std@0.165.0/fs/copy.ts";
|
||||
|
||||
if (import.meta.main) {
|
||||
const args = flags.parse(Deno.args, {
|
||||
boolean: ["watch"],
|
||||
alias: { w: "watch" },
|
||||
default: {
|
||||
watch: false,
|
||||
},
|
||||
});
|
||||
await bundle(args.watch, "mobile", "mobile/dist");
|
||||
if (!args.watch) {
|
||||
esbuild.stop();
|
||||
}
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
deno run -A --unstable plugos/bin/plugos-bundle.ts $@ --dist dist_bundle/_plug plugs/*/*.plug.yaml
|
@ -1,35 +1,30 @@
|
||||
import { expandGlobSync, flags, path } from "./plugos/deps.ts";
|
||||
import { bundleRun } from "./plugos/bin/plugos-bundle.ts";
|
||||
import { esbuild } from "./plugos/compile.ts";
|
||||
import { esbuild, flags, path } from "./plugos/deps.ts";
|
||||
import { compileManifests } from "./plugos/compile.ts";
|
||||
import { builtinPlugNames } from "./plugs/builtin_plugs.ts";
|
||||
|
||||
if (import.meta.main) {
|
||||
const args = flags.parse(Deno.args, {
|
||||
boolean: ["debug", "watch", "reload", "info"],
|
||||
string: ["dist", "importmap"],
|
||||
alias: { w: "watch" },
|
||||
});
|
||||
|
||||
if (!args.dist) {
|
||||
args.dist = path.resolve(path.join("dist_bundle", "_plug"));
|
||||
}
|
||||
const manifests = builtinPlugNames.map((name) =>
|
||||
`./plugs/${name}/${name}.plug.yaml`
|
||||
);
|
||||
|
||||
const manifests: string[] = [];
|
||||
const pattern: string = path.join("plugs", "*", "*.plug.yaml");
|
||||
for (const file of expandGlobSync(pattern)) {
|
||||
manifests.push(file.path);
|
||||
}
|
||||
const targetDir = path.join("dist_plug_bundle", "_plug");
|
||||
Deno.mkdirSync(targetDir, { recursive: true });
|
||||
Deno.mkdirSync("dist", { recursive: true });
|
||||
|
||||
await bundleRun(
|
||||
// Build the other plugs
|
||||
await compileManifests(
|
||||
manifests,
|
||||
args.dist,
|
||||
targetDir,
|
||||
args.watch,
|
||||
{
|
||||
debug: args.debug,
|
||||
reload: args.reload,
|
||||
info: args.info,
|
||||
importMap: args.importmap
|
||||
? new URL(args.importmap, `file://${Deno.cwd()}/`)
|
||||
: undefined,
|
||||
},
|
||||
);
|
||||
esbuild.stop();
|
||||
|
126
build_web.ts
@ -1,23 +1,41 @@
|
||||
// -- esbuild --
|
||||
// @deno-types="https://deno.land/x/esbuild@v0.14.54/mod.d.ts"
|
||||
import * as esbuildWasm from "https://deno.land/x/esbuild@v0.14.54/wasm.js";
|
||||
import * as esbuildNative from "https://deno.land/x/esbuild@v0.14.54/mod.js";
|
||||
import { denoPlugin } from "https://deno.land/x/esbuild_deno_loader@0.6.0/mod.ts"; //"./esbuild_deno_loader/mod.ts";
|
||||
import { denoPlugins } from "https://deno.land/x/esbuild_deno_loader@0.7.0/mod.ts";
|
||||
import { copy } from "https://deno.land/std@0.165.0/fs/copy.ts";
|
||||
|
||||
import sass from "https://deno.land/x/denosass@1.0.4/mod.ts";
|
||||
import { bundleFolder } from "./plugos/asset_bundle/builder.ts";
|
||||
import { patchDenoLibJS } from "./plugos/hack.ts";
|
||||
import { bundle as plugOsBundle } from "./plugos/bin/plugos-bundle.ts";
|
||||
|
||||
import * as flags from "https://deno.land/std@0.165.0/flags/mod.ts";
|
||||
import { patchDenoLibJS } from "./plugos/compile.ts";
|
||||
import { esbuild } from "./plugos/deps.ts";
|
||||
|
||||
// @ts-ignore trust me
|
||||
export const esbuild: typeof esbuildWasm = Deno.run === undefined
|
||||
? esbuildWasm
|
||||
: esbuildNative;
|
||||
export async function bundleAll(
|
||||
watch: boolean,
|
||||
): Promise<void> {
|
||||
let building = false;
|
||||
await buildCopyBundleAssets();
|
||||
let timer;
|
||||
if (watch) {
|
||||
const watcher = Deno.watchFs(["web", "dist_plug_bundle"]);
|
||||
for await (const _event of watcher) {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
timer = setTimeout(() => {
|
||||
console.log("Change detected, rebuilding...");
|
||||
if (building) {
|
||||
return;
|
||||
}
|
||||
building = true;
|
||||
buildCopyBundleAssets().finally(() => {
|
||||
building = false;
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function prepareAssets(dist: string) {
|
||||
export async function copyAssets(dist: string) {
|
||||
await Deno.mkdir(dist, { recursive: true });
|
||||
await copy("web/fonts", `${dist}`, { overwrite: true });
|
||||
await copy("web/index.html", `${dist}/index.html`, {
|
||||
overwrite: true,
|
||||
@ -25,12 +43,18 @@ export async function prepareAssets(dist: string) {
|
||||
await copy("web/auth.html", `${dist}/auth.html`, {
|
||||
overwrite: true,
|
||||
});
|
||||
await copy("web/reset.html", `${dist}/reset.html`, {
|
||||
overwrite: true,
|
||||
});
|
||||
await copy("web/images/favicon.png", `${dist}/favicon.png`, {
|
||||
overwrite: true,
|
||||
});
|
||||
await copy("web/images/logo.png", `${dist}/logo.png`, {
|
||||
overwrite: true,
|
||||
});
|
||||
await copy("web/images/logo-dock.png", `${dist}/logo-dock.png`, {
|
||||
overwrite: true,
|
||||
});
|
||||
await copy("web/manifest.json", `${dist}/manifest.json`, {
|
||||
overwrite: true,
|
||||
});
|
||||
@ -44,58 +68,34 @@ export async function prepareAssets(dist: string) {
|
||||
`${dist}/main.css`,
|
||||
compiler.to_string("expanded") as string,
|
||||
);
|
||||
const globalManifest = await plugOsBundle("./plugs/global.plug.yaml");
|
||||
await Deno.writeTextFile(
|
||||
`${dist}/global.plug.json`,
|
||||
JSON.stringify(globalManifest, null, 2),
|
||||
);
|
||||
|
||||
// HACK: Patch the JS by removing an invalid regex
|
||||
let bundleJs = await Deno.readTextFile(`${dist}/client.js`);
|
||||
bundleJs = patchDenoLibJS(bundleJs);
|
||||
await Deno.writeTextFile(`${dist}/client.js`, bundleJs);
|
||||
}
|
||||
async function buildCopyBundleAssets() {
|
||||
await Deno.mkdir("dist_client_bundle", { recursive: true });
|
||||
await Deno.mkdir("dist_plug_bundle", { recursive: true });
|
||||
|
||||
export async function bundle(
|
||||
watch: boolean,
|
||||
type: "web" | "mobile",
|
||||
distDir: string,
|
||||
): Promise<void> {
|
||||
let building = false;
|
||||
await doBuild(`${type}/boot.ts`);
|
||||
let timer;
|
||||
if (watch) {
|
||||
const watcher = Deno.watchFs([type, "dist_bundle/_plug"]);
|
||||
for await (const _event of watcher) {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
timer = setTimeout(() => {
|
||||
console.log("Change detected, rebuilding...");
|
||||
doBuild(`${type}/boot.ts`);
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
async function doBuild(
|
||||
mainScript: string,
|
||||
) {
|
||||
if (building) {
|
||||
return;
|
||||
}
|
||||
building = true;
|
||||
if (type === "mobile") {
|
||||
await bundleFolder("dist_bundle", "dist/asset_bundle.json");
|
||||
}
|
||||
await bundleFolder(
|
||||
"dist_plug_bundle",
|
||||
"dist/plug_asset_bundle.json",
|
||||
);
|
||||
|
||||
await Promise.all([
|
||||
esbuild.build({
|
||||
entryPoints: {
|
||||
client: mainScript,
|
||||
service_worker: "web/service_worker.ts",
|
||||
worker: "plugos/environments/sandbox_worker.ts",
|
||||
entryPoints: [
|
||||
{
|
||||
in: "web/boot.ts",
|
||||
out: ".client/client",
|
||||
},
|
||||
outdir: distDir,
|
||||
{
|
||||
in: "web/service_worker.ts",
|
||||
out: "service_worker",
|
||||
},
|
||||
],
|
||||
outdir: "dist_client_bundle",
|
||||
absWorkingDir: Deno.cwd(),
|
||||
bundle: true,
|
||||
treeShaking: true,
|
||||
@ -106,22 +106,24 @@ export async function bundle(
|
||||
jsxFragment: "Fragment",
|
||||
jsxImportSource: "https://esm.sh/preact@10.11.1",
|
||||
plugins: [
|
||||
denoPlugin({
|
||||
importMapURL: new URL("./import_map.json", import.meta.url),
|
||||
...denoPlugins({
|
||||
importMapURL: new URL("./import_map.json", import.meta.url)
|
||||
.toString(),
|
||||
}),
|
||||
],
|
||||
}),
|
||||
]);
|
||||
|
||||
await prepareAssets(distDir);
|
||||
if (type === "web") {
|
||||
await bundleFolder("dist_bundle", "dist/asset_bundle.json");
|
||||
}
|
||||
// Patch the service_worker {{CACHE_NAME}}
|
||||
let swCode = await Deno.readTextFile("dist_client_bundle/service_worker.js");
|
||||
swCode = swCode.replaceAll("{{CACHE_NAME}}", `cache-${Date.now()}`);
|
||||
await Deno.writeTextFile("dist_client_bundle/service_worker.js", swCode);
|
||||
|
||||
await copyAssets("dist_client_bundle/.client");
|
||||
await bundleFolder("dist_client_bundle", "dist/client_asset_bundle.json");
|
||||
|
||||
building = false;
|
||||
console.log("Built!");
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.main) {
|
||||
const args = flags.parse(Deno.args, {
|
||||
@ -131,7 +133,7 @@ if (import.meta.main) {
|
||||
watch: false,
|
||||
},
|
||||
});
|
||||
await bundle(args.watch, "web", "dist_bundle/web");
|
||||
await bundleAll(args.watch);
|
||||
if (!args.watch) {
|
||||
esbuild.stop();
|
||||
}
|
||||
|
29
cmd/fix.ts
@ -1,29 +0,0 @@
|
||||
import { path } from "../server/deps.ts";
|
||||
|
||||
export async function fixCommand(_options: any, folder: string) {
|
||||
folder = path.resolve(Deno.cwd(), folder);
|
||||
console.log("Now going to attempt to fix", folder);
|
||||
console.log(`First, we'll purge the ${folder}/_plug folder...`);
|
||||
try {
|
||||
await Deno.remove(path.join(folder, "_plug"), { recursive: true });
|
||||
} catch (e: any) {
|
||||
if (e instanceof Deno.errors.NotFound) {
|
||||
console.log("No _plug folder found, nothing to do here.");
|
||||
} else {
|
||||
console.error("Something went wrong:", e);
|
||||
}
|
||||
}
|
||||
console.log("And now we'll delete data.db");
|
||||
try {
|
||||
await Deno.remove(path.join(folder, "data.db"));
|
||||
} catch (e: any) {
|
||||
if (e instanceof Deno.errors.NotFound) {
|
||||
console.log("No data.db found, nothing to do here.");
|
||||
} else {
|
||||
console.error("Something went wrong:", e);
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
"Alright then, that should be it. Try running SilverBullet again.",
|
||||
);
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
import { SpaceSystem } from "../server/space_system.ts";
|
||||
|
||||
import assetBundle from "../dist/asset_bundle.json" assert { type: "json" };
|
||||
import { path } from "../plugos/deps.ts";
|
||||
import { AssetBundle, AssetJson } from "../plugos/asset_bundle/bundle.ts";
|
||||
|
||||
export async function invokeFunction(
|
||||
options: any,
|
||||
pagesPath: string,
|
||||
functionName: string,
|
||||
...args: string[]
|
||||
) {
|
||||
console.log("Going to invoke funciton", functionName, "with args", args);
|
||||
const spaceSystem = new SpaceSystem(
|
||||
new AssetBundle(assetBundle as AssetJson),
|
||||
pagesPath,
|
||||
path.join(pagesPath, options.db),
|
||||
);
|
||||
|
||||
await spaceSystem.start();
|
||||
|
||||
const [plugName, funcName] = functionName.split(".");
|
||||
|
||||
const plug = spaceSystem.system.loadedPlugs.get(plugName);
|
||||
|
||||
if (!plug) {
|
||||
console.error("Plug not found", plugName);
|
||||
Deno.exit(1);
|
||||
}
|
||||
|
||||
await plug.invoke(funcName, args);
|
||||
Deno.exit(0);
|
||||
}
|
@ -1,25 +1,27 @@
|
||||
import { bundleRun } from "../plugos/bin/plugos-bundle.ts";
|
||||
import { esbuild } from "../plugos/compile.ts";
|
||||
import { compileManifests } from "../plugos/compile.ts";
|
||||
import { esbuild } from "../plugos/deps.ts";
|
||||
|
||||
export async function plugCompileCommand(
|
||||
{ watch, dist, debug, info, importmap }: {
|
||||
{ watch, dist, debug, info, importmap, runtimeUrl }: {
|
||||
watch: boolean;
|
||||
dist: string;
|
||||
debug: boolean;
|
||||
info: boolean;
|
||||
importmap?: string;
|
||||
runtimeUrl?: string;
|
||||
},
|
||||
...manifestPaths: string[]
|
||||
) {
|
||||
await bundleRun(
|
||||
await compileManifests(
|
||||
manifestPaths,
|
||||
dist,
|
||||
watch,
|
||||
{
|
||||
debug: debug,
|
||||
info: info,
|
||||
runtimeUrl,
|
||||
importMap: importmap
|
||||
? new URL(importmap, `file://${Deno.cwd()}/`)
|
||||
? new URL(importmap, `file://${Deno.cwd()}/`).toString()
|
||||
: undefined,
|
||||
},
|
||||
);
|
||||
|
@ -1,37 +1,65 @@
|
||||
import { path } from "../server/deps.ts";
|
||||
import { HttpServer } from "../server/http_server.ts";
|
||||
import assetBundle from "../dist/asset_bundle.json" assert { type: "json" };
|
||||
import clientAssetBundle from "../dist/client_asset_bundle.json" assert {
|
||||
type: "json",
|
||||
};
|
||||
import plugAssetBundle from "../dist/plug_asset_bundle.json" assert {
|
||||
type: "json",
|
||||
};
|
||||
import { AssetBundle, AssetJson } from "../plugos/asset_bundle/bundle.ts";
|
||||
import { AssetBundlePlugSpacePrimitives } from "../common/spaces/asset_bundle_space_primitives.ts";
|
||||
import { DiskSpacePrimitives } from "../common/spaces/disk_space_primitives.ts";
|
||||
import { SpacePrimitives } from "../common/spaces/space_primitives.ts";
|
||||
import { S3SpacePrimitives } from "../server/spaces/s3_space_primitives.ts";
|
||||
|
||||
export function serveCommand(options: any, folder: string) {
|
||||
const pagesPath = path.resolve(Deno.cwd(), folder);
|
||||
const hostname = options.hostname || "127.0.0.1";
|
||||
const port = options.port || 3000;
|
||||
const bareMode = options.bare;
|
||||
const maxFileSizeMB = options.maxFileSizeMB || 20;
|
||||
|
||||
console.log(
|
||||
"Going to start SilverBullet binding to",
|
||||
`${hostname}:${port}`,
|
||||
);
|
||||
console.log("Serving pages from", pagesPath);
|
||||
|
||||
if (hostname === "127.0.0.1") {
|
||||
console.log(
|
||||
`_Note:_ SilverBullet will only be available locally (via http://localhost:${port}), to allow outside connections, pass --host 0.0.0.0 as a flag.`,
|
||||
`NOTE: SilverBullet will only be available locally (via http://localhost:${port}).
|
||||
To allow outside connections, pass -L 0.0.0.0 as a flag, and put a TLS terminator on top.`,
|
||||
);
|
||||
}
|
||||
let spacePrimitives: SpacePrimitives | undefined;
|
||||
if (folder === "s3://") {
|
||||
spacePrimitives = new AssetBundlePlugSpacePrimitives(
|
||||
new S3SpacePrimitives({
|
||||
accessKey: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
||||
endPoint: Deno.env.get("AWS_ENDPOINT")!,
|
||||
region: Deno.env.get("AWS_REGION")!,
|
||||
bucket: Deno.env.get("AWS_BUCKET")!,
|
||||
}),
|
||||
new AssetBundle(plugAssetBundle as AssetJson),
|
||||
);
|
||||
console.log("Running in S3 mode");
|
||||
} else {
|
||||
folder = path.resolve(Deno.cwd(), folder);
|
||||
spacePrimitives = new AssetBundlePlugSpacePrimitives(
|
||||
new DiskSpacePrimitives(folder, {
|
||||
maxFileSizeMB: options.maxFileSizeMB,
|
||||
}),
|
||||
new AssetBundle(plugAssetBundle as AssetJson),
|
||||
);
|
||||
}
|
||||
console.log("Serving pages from", folder);
|
||||
|
||||
const httpServer = new HttpServer({
|
||||
const httpServer = new HttpServer(spacePrimitives, {
|
||||
hostname,
|
||||
port: port,
|
||||
pagesPath: pagesPath,
|
||||
dbPath: path.join(pagesPath, options.db),
|
||||
assetBundle: new AssetBundle(assetBundle as AssetJson),
|
||||
pagesPath: folder,
|
||||
clientAssetBundle: new AssetBundle(clientAssetBundle as AssetJson),
|
||||
user: options.user,
|
||||
bareMode,
|
||||
});
|
||||
httpServer.start().catch((e) => {
|
||||
console.error("HTTP Server error", e);
|
||||
Deno.exit(1);
|
||||
keyFile: options.key,
|
||||
certFile: options.cert,
|
||||
maxFileSizeMB: +maxFileSizeMB,
|
||||
});
|
||||
httpServer.start().catch(console.error);
|
||||
}
|
||||
|
12
common/crypto.ts
Normal file
@ -0,0 +1,12 @@
|
||||
export function simpleHash(s: string): number {
|
||||
let hash = 0,
|
||||
i,
|
||||
chr;
|
||||
if (s.length === 0) return hash;
|
||||
for (i = 0; i < s.length; i++) {
|
||||
chr = s.charCodeAt(i);
|
||||
hash = ((hash << 5) - hash) + chr;
|
||||
hash |= 0; // Convert to 32bit integer
|
||||
}
|
||||
return hash;
|
||||
}
|
@ -122,3 +122,5 @@ export {
|
||||
javascriptLanguage,
|
||||
typescriptLanguage,
|
||||
} from "https://esm.sh/@codemirror/lang-javascript@6.1.4?external=@codemirror/language,@codemirror/autocomplete,@codemirror/view,@codemirror/state,@codemirror/lint,@lezer/common,@lezer/lr,@lezer/javascript,@codemirror/commands";
|
||||
|
||||
export { mime } from "https://deno.land/x/mimetypes@v1.0.0/mod.ts";
|
||||
|
@ -1,5 +1,4 @@
|
||||
import * as plugos from "../plugos/types.ts";
|
||||
import { EndpointHookT } from "../plugos/hooks/endpoint.ts";
|
||||
import { CronHookT } from "../plugos/hooks/cron.ts";
|
||||
import { EventHookT } from "../plugos/hooks/event.ts";
|
||||
import { CommandHookT } from "../web/hooks/command.ts";
|
||||
@ -10,7 +9,6 @@ import { CodeWidgetT } from "../web/hooks/code_widget.ts";
|
||||
export type SilverBulletHooks =
|
||||
& CommandHookT
|
||||
& SlashCommandHookT
|
||||
& EndpointHookT
|
||||
& CronHookT
|
||||
& EventHookT
|
||||
& CodeWidgetT
|
||||
|
37
common/proxy_fetch.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { base64Encode } from "../plugos/asset_bundle/base64.ts";
|
||||
|
||||
export type ProxyFetchRequest = {
|
||||
method?: string;
|
||||
headers?: Record<string, string>;
|
||||
body?: string;
|
||||
};
|
||||
|
||||
export type ProxyFetchResponse = {
|
||||
ok: boolean;
|
||||
status: number;
|
||||
headers: Record<string, string>;
|
||||
// We base64 encode the body because the body can be binary data that we have to push through the worker boundary
|
||||
base64Body: string;
|
||||
};
|
||||
|
||||
export async function performLocalFetch(
|
||||
url: string,
|
||||
req: ProxyFetchRequest,
|
||||
): Promise<ProxyFetchResponse> {
|
||||
const result = await fetch(
|
||||
url,
|
||||
req && {
|
||||
method: req.method,
|
||||
headers: req.headers,
|
||||
body: req.body,
|
||||
},
|
||||
);
|
||||
return {
|
||||
ok: result.ok,
|
||||
status: result.status,
|
||||
headers: Object.fromEntries(result.headers.entries()),
|
||||
base64Body: base64Encode(
|
||||
new Uint8Array(await (await result.blob()).arrayBuffer()),
|
||||
),
|
||||
};
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
import { Plug } from "../../plugos/plug.ts";
|
||||
import { FileMeta } from "../types.ts";
|
||||
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
|
||||
import { SpacePrimitives } from "./space_primitives.ts";
|
||||
import { AssetBundle } from "../../plugos/asset_bundle/bundle.ts";
|
||||
import { mime } from "../deps.ts";
|
||||
|
||||
const bootTime = Date.now();
|
||||
export class AssetBundlePlugSpacePrimitives implements SpacePrimitives {
|
||||
@ -13,10 +13,10 @@ export class AssetBundlePlugSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
async fetchFileList(): Promise<FileMeta[]> {
|
||||
const files = await this.wrapped.fetchFileList();
|
||||
return this.assetBundle.listFiles().filter((p) => p.startsWith("_plug/"))
|
||||
return this.assetBundle.listFiles()
|
||||
.map((p) => ({
|
||||
name: p,
|
||||
contentType: "application/json",
|
||||
contentType: mime.getType(p) || "application/octet-stream",
|
||||
lastModified: bootTime,
|
||||
perm: "ro",
|
||||
size: -1,
|
||||
@ -25,22 +25,21 @@ export class AssetBundlePlugSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
readFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
): Promise<{ data: FileData; meta: FileMeta }> {
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
if (this.assetBundle.has(name)) {
|
||||
const data = this.assetBundle.readFileSync(name);
|
||||
// console.log("Requested encoding", encoding);
|
||||
return Promise.resolve({
|
||||
data: encoding === "utf8" ? new TextDecoder().decode(data) : data,
|
||||
data,
|
||||
meta: {
|
||||
lastModified: bootTime,
|
||||
size: data.byteLength,
|
||||
perm: "ro",
|
||||
contentType: "application/json",
|
||||
contentType: this.assetBundle.getMimeType(name),
|
||||
} as FileMeta,
|
||||
});
|
||||
}
|
||||
return this.wrapped.readFile(name, encoding);
|
||||
return this.wrapped.readFile(name);
|
||||
}
|
||||
|
||||
getFileMeta(name: string): Promise<FileMeta> {
|
||||
@ -50,7 +49,7 @@ export class AssetBundlePlugSpacePrimitives implements SpacePrimitives {
|
||||
lastModified: bootTime,
|
||||
size: data.byteLength,
|
||||
perm: "ro",
|
||||
contentType: "application/json",
|
||||
contentType: this.assetBundle.getMimeType(name),
|
||||
} as FileMeta);
|
||||
}
|
||||
return this.wrapped.getFileMeta(name);
|
||||
@ -58,11 +57,20 @@ export class AssetBundlePlugSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
writeFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: FileData,
|
||||
data: Uint8Array,
|
||||
selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta> {
|
||||
return this.wrapped.writeFile(name, encoding, data, selfUpdate);
|
||||
if (this.assetBundle.has(name)) {
|
||||
console.warn("Attempted to write to read-only asset file", name);
|
||||
return this.getFileMeta(name);
|
||||
}
|
||||
return this.wrapped.writeFile(
|
||||
name,
|
||||
data,
|
||||
selfUpdate,
|
||||
lastModified,
|
||||
);
|
||||
}
|
||||
|
||||
deleteFile(name: string): Promise<void> {
|
||||
@ -72,18 +80,4 @@ export class AssetBundlePlugSpacePrimitives implements SpacePrimitives {
|
||||
}
|
||||
return this.wrapped.deleteFile(name);
|
||||
}
|
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any> {
|
||||
return this.wrapped.proxySyscall(plug, name, args);
|
||||
}
|
||||
|
||||
invokeFunction(
|
||||
plug: Plug<any>,
|
||||
env: string,
|
||||
name: string,
|
||||
args: any[],
|
||||
): Promise<any> {
|
||||
return this.wrapped.invokeFunction(plug, env, name, args);
|
||||
}
|
||||
}
|
||||
|
@ -2,14 +2,9 @@
|
||||
import { path } from "../deps.ts";
|
||||
import { readAll } from "../deps.ts";
|
||||
import { FileMeta } from "../types.ts";
|
||||
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
|
||||
import { Plug } from "../../plugos/plug.ts";
|
||||
import { SpacePrimitives } from "./space_primitives.ts";
|
||||
import { mime } from "https://deno.land/x/mimetypes@v1.0.0/mod.ts";
|
||||
import {
|
||||
base64DecodeDataUrl,
|
||||
base64EncodedDataUrl,
|
||||
} from "../../plugos/asset_bundle/base64.ts";
|
||||
import { walk } from "../../plugos/deps.ts";
|
||||
import { walk } from "https://deno.land/std@0.165.0/fs/walk.ts";
|
||||
|
||||
function lookupContentType(path: string): string {
|
||||
return mime.getType(path) || "application/octet-stream";
|
||||
@ -21,10 +16,14 @@ function normalizeForwardSlashPath(path: string) {
|
||||
|
||||
const excludedFiles = ["data.db", "data.db-journal", "sync.json"];
|
||||
|
||||
export type DiskSpaceOptions = {
|
||||
maxFileSizeMB?: number;
|
||||
};
|
||||
|
||||
export class DiskSpacePrimitives implements SpacePrimitives {
|
||||
rootPath: string;
|
||||
|
||||
constructor(rootPath: string) {
|
||||
constructor(rootPath: string, private options: DiskSpaceOptions = {}) {
|
||||
this.rootPath = Deno.realPathSync(rootPath);
|
||||
}
|
||||
|
||||
@ -46,36 +45,16 @@ export class DiskSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
async readFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
): Promise<{ data: FileData; meta: FileMeta }> {
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
const localPath = this.filenameToPath(name);
|
||||
try {
|
||||
const s = await Deno.stat(localPath);
|
||||
let data: FileData | null = null;
|
||||
const contentType = lookupContentType(name);
|
||||
switch (encoding) {
|
||||
case "utf8":
|
||||
data = await Deno.readTextFile(localPath);
|
||||
break;
|
||||
case "dataurl":
|
||||
{
|
||||
|
||||
const f = await Deno.open(localPath, { read: true });
|
||||
const buf = await readAll(f);
|
||||
const data = await readAll(f);
|
||||
Deno.close(f.rid);
|
||||
|
||||
data = base64EncodedDataUrl(contentType, buf);
|
||||
}
|
||||
break;
|
||||
case "arraybuffer":
|
||||
{
|
||||
const f = await Deno.open(localPath, { read: true });
|
||||
const buf = await readAll(f);
|
||||
Deno.close(f.rid);
|
||||
|
||||
data = buf.buffer;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return {
|
||||
data,
|
||||
meta: {
|
||||
@ -94,29 +73,29 @@ export class DiskSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
async writeFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: FileData,
|
||||
data: Uint8Array,
|
||||
_selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta> {
|
||||
const localPath = this.filenameToPath(name);
|
||||
try {
|
||||
// Ensure parent folder exists
|
||||
await Deno.mkdir(path.dirname(localPath), { recursive: true });
|
||||
|
||||
const file = await Deno.open(localPath, {
|
||||
write: true,
|
||||
create: true,
|
||||
truncate: true,
|
||||
});
|
||||
|
||||
// Actually write the file
|
||||
switch (encoding) {
|
||||
case "utf8":
|
||||
await Deno.writeTextFile(`${localPath}`, data as string);
|
||||
break;
|
||||
case "dataurl":
|
||||
await Deno.writeFile(
|
||||
localPath,
|
||||
base64DecodeDataUrl(data as string),
|
||||
);
|
||||
break;
|
||||
case "arraybuffer":
|
||||
await Deno.writeFile(localPath, new Uint8Array(data as ArrayBuffer));
|
||||
break;
|
||||
await Deno.write(file.rid, data);
|
||||
|
||||
if (lastModified) {
|
||||
console.log("Seting mtime to", new Date(lastModified));
|
||||
await Deno.futime(file.rid, new Date(), new Date(lastModified));
|
||||
}
|
||||
file.close();
|
||||
|
||||
// Fetch new metadata
|
||||
const s = await Deno.stat(localPath);
|
||||
@ -171,6 +150,13 @@ export class DiskSpacePrimitives implements SpacePrimitives {
|
||||
const fullPath = file.path;
|
||||
try {
|
||||
const s = await Deno.stat(fullPath);
|
||||
// Don't list file exceeding the maximum file size
|
||||
if (
|
||||
this.options.maxFileSizeMB &&
|
||||
s.size / (1024 * 1024) > this.options.maxFileSizeMB
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
const name = fullPath.substring(this.rootPath.length + 1);
|
||||
if (excludedFiles.includes(name)) {
|
||||
continue;
|
||||
@ -193,20 +179,6 @@ export class DiskSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
return allFiles;
|
||||
}
|
||||
|
||||
// Plugs
|
||||
invokeFunction(
|
||||
plug: Plug<any>,
|
||||
_env: string,
|
||||
name: string,
|
||||
args: any[],
|
||||
): Promise<any> {
|
||||
return plug.invoke(name, args);
|
||||
}
|
||||
|
||||
proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any> {
|
||||
return plug.syscall(name, args);
|
||||
}
|
||||
}
|
||||
|
||||
function escapeRegExp(string: string) {
|
||||
|
@ -1,8 +1,7 @@
|
||||
import { EventHook } from "../../plugos/hooks/event.ts";
|
||||
import { Plug } from "../../plugos/plug.ts";
|
||||
|
||||
import { FileMeta } from "../types.ts";
|
||||
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
|
||||
import type { SpacePrimitives } from "./space_primitives.ts";
|
||||
|
||||
export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
constructor(private wrapped: SpacePrimitives, private eventHook: EventHook) {}
|
||||
@ -11,56 +10,30 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
return this.wrapped.fetchFileList();
|
||||
}
|
||||
|
||||
proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any> {
|
||||
return this.wrapped.proxySyscall(plug, name, args);
|
||||
}
|
||||
|
||||
invokeFunction(
|
||||
plug: Plug<any>,
|
||||
env: string,
|
||||
name: string,
|
||||
args: any[],
|
||||
): Promise<any> {
|
||||
return this.wrapped.invokeFunction(plug, env, name, args);
|
||||
}
|
||||
|
||||
readFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
): Promise<{ data: FileData; meta: FileMeta }> {
|
||||
return this.wrapped.readFile(name, encoding);
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
return this.wrapped.readFile(name);
|
||||
}
|
||||
|
||||
async writeFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: FileData,
|
||||
data: Uint8Array,
|
||||
selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta> {
|
||||
const newMeta = await this.wrapped.writeFile(
|
||||
name,
|
||||
encoding,
|
||||
data,
|
||||
selfUpdate,
|
||||
lastModified,
|
||||
);
|
||||
// This can happen async
|
||||
if (name.endsWith(".md")) {
|
||||
const pageName = name.substring(0, name.length - 3);
|
||||
let text = "";
|
||||
switch (encoding) {
|
||||
case "utf8":
|
||||
text = data as string;
|
||||
break;
|
||||
case "arraybuffer":
|
||||
{
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
text = decoder.decode(data as ArrayBuffer);
|
||||
}
|
||||
|
||||
break;
|
||||
case "dataurl":
|
||||
throw Error("Data urls not supported in this context");
|
||||
}
|
||||
text = decoder.decode(data);
|
||||
|
||||
this.eventHook
|
||||
.dispatchEvent("page:saved", pageName)
|
||||
@ -74,6 +47,9 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
console.error("Error dispatching page:saved event", e);
|
||||
});
|
||||
}
|
||||
if (name.endsWith(".plug.js")) {
|
||||
await this.eventHook.dispatchEvent("plug:changed", name);
|
||||
}
|
||||
return newMeta;
|
||||
}
|
||||
|
||||
|
45
common/spaces/fallback_space_primitives.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import { FileMeta } from "../types.ts";
|
||||
import type { SpacePrimitives } from "./space_primitives.ts";
|
||||
|
||||
/**
|
||||
* FallbackSpacePrimitives is a SpacePrimitives implementation that will try to fall back to another SpacePrimitives implementation for two
|
||||
* operations:
|
||||
* - readFile
|
||||
* - getFileMeta
|
||||
* The use case is primarily sync: when sync hasn't completed yet, we can fall back to HttpSpacePrimitives to fetch the file from the server.
|
||||
*/
|
||||
export class FallbackSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
private primary: SpacePrimitives,
|
||||
private fallback: SpacePrimitives,
|
||||
) {
|
||||
}
|
||||
fetchFileList(): Promise<FileMeta[]> {
|
||||
return this.primary.fetchFileList();
|
||||
}
|
||||
async readFile(name: string): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
try {
|
||||
return await this.primary.readFile(name);
|
||||
} catch {
|
||||
return this.fallback.readFile(name);
|
||||
}
|
||||
}
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
try {
|
||||
return await this.primary.getFileMeta(name);
|
||||
} catch {
|
||||
return this.fallback.getFileMeta(name);
|
||||
}
|
||||
}
|
||||
writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
selfUpdate?: boolean | undefined,
|
||||
lastModified?: number | undefined,
|
||||
): Promise<FileMeta> {
|
||||
return this.primary.writeFile(name, data, selfUpdate, lastModified);
|
||||
}
|
||||
deleteFile(name: string): Promise<void> {
|
||||
return this.primary.deleteFile(name);
|
||||
}
|
||||
}
|
@ -1,6 +1,5 @@
|
||||
import { Plug } from "../../plugos/plug.ts";
|
||||
import { FileMeta } from "../types.ts";
|
||||
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
|
||||
import { SpacePrimitives } from "./space_primitives.ts";
|
||||
import type { SysCallMapping } from "../../plugos/system.ts";
|
||||
|
||||
// Enriches the file list listing with custom metadata from the page index
|
||||
@ -40,9 +39,8 @@ export class FileMetaSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
readFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
): Promise<{ data: FileData; meta: FileMeta }> {
|
||||
return this.wrapped.readFile(name, encoding);
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
return this.wrapped.readFile(name);
|
||||
}
|
||||
|
||||
getFileMeta(name: string): Promise<FileMeta> {
|
||||
@ -51,28 +49,19 @@ export class FileMetaSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
writeFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: FileData,
|
||||
data: Uint8Array,
|
||||
selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta> {
|
||||
return this.wrapped.writeFile(name, encoding, data, selfUpdate);
|
||||
return this.wrapped.writeFile(
|
||||
name,
|
||||
data,
|
||||
selfUpdate,
|
||||
lastModified,
|
||||
);
|
||||
}
|
||||
|
||||
deleteFile(name: string): Promise<void> {
|
||||
return this.wrapped.deleteFile(name);
|
||||
}
|
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any> {
|
||||
return this.wrapped.proxySyscall(plug, name, args);
|
||||
}
|
||||
|
||||
invokeFunction(
|
||||
plug: Plug<any>,
|
||||
env: string,
|
||||
name: string,
|
||||
args: any[],
|
||||
): Promise<any> {
|
||||
return this.wrapped.invokeFunction(plug, env, name, args);
|
||||
}
|
||||
}
|
||||
|
@ -1,132 +1,93 @@
|
||||
import { FileMeta } from "../types.ts";
|
||||
import { Plug } from "../../plugos/plug.ts";
|
||||
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
|
||||
import {
|
||||
base64DecodeDataUrl,
|
||||
base64Encode,
|
||||
base64EncodedDataUrl,
|
||||
} from "../../plugos/asset_bundle/base64.ts";
|
||||
import { mime } from "../../plugos/deps.ts";
|
||||
import { SpacePrimitives } from "./space_primitives.ts";
|
||||
import { flushCachesAndUnregisterServiceWorker } from "../sw_util.ts";
|
||||
|
||||
export class HttpSpacePrimitives implements SpacePrimitives {
|
||||
private fsUrl: string;
|
||||
private plugUrl: string;
|
||||
|
||||
constructor(
|
||||
url: string,
|
||||
readonly user?: string,
|
||||
readonly password?: string,
|
||||
readonly base64Put?: boolean,
|
||||
readonly url: string,
|
||||
readonly expectedSpacePath?: string,
|
||||
readonly syncMode = false,
|
||||
) {
|
||||
this.fsUrl = url + "/fs";
|
||||
this.plugUrl = url + "/plug";
|
||||
}
|
||||
|
||||
private async authenticatedFetch(
|
||||
public async authenticatedFetch(
|
||||
url: string,
|
||||
options: Record<string, any>,
|
||||
options: RequestInit,
|
||||
): Promise<Response> {
|
||||
if (this.user && this.password) {
|
||||
// Explicitly set an auth cookie
|
||||
if (!options.headers) {
|
||||
options.headers = {};
|
||||
}
|
||||
options.headers["cookie"] = `auth=${
|
||||
btoa(`${this.user}:${this.password}`)
|
||||
}`;
|
||||
}
|
||||
const result = await fetch(url, options);
|
||||
if (result.status === 401 || result.redirected) {
|
||||
// Invalid credentials, reloading the browser should trigger authentication
|
||||
if (typeof location !== "undefined") {
|
||||
location.reload();
|
||||
if (this.syncMode) {
|
||||
options.headers = { ...options.headers, ...{ "X-Sync-Mode": "true" } };
|
||||
}
|
||||
|
||||
throw Error("Unauthorized");
|
||||
const result = await fetch(url, { ...options });
|
||||
if (
|
||||
result.status === 401
|
||||
) {
|
||||
// Invalid credentials, reloading the browser should trigger authentication
|
||||
console.log("Going to redirect after", url);
|
||||
location.href = "/.auth?refer=" + location.pathname;
|
||||
throw new Error("Invalid credentials");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async fetchFileList(): Promise<FileMeta[]> {
|
||||
const req = await this.authenticatedFetch(this.fsUrl, {
|
||||
const resp = await this.authenticatedFetch(this.url, {
|
||||
method: "GET",
|
||||
});
|
||||
|
||||
return req.json();
|
||||
if (
|
||||
resp.status === 200 &&
|
||||
this.expectedSpacePath &&
|
||||
resp.headers.get("X-Space-Path") !== this.expectedSpacePath
|
||||
) {
|
||||
await flushCachesAndUnregisterServiceWorker();
|
||||
alert("Space folder path different on server, reloading the page");
|
||||
location.reload();
|
||||
}
|
||||
|
||||
return resp.json();
|
||||
}
|
||||
|
||||
async readFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
): Promise<{ data: FileData; meta: FileMeta }> {
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
const res = await this.authenticatedFetch(
|
||||
`${this.fsUrl}/${encodeURI(name)}`,
|
||||
`${this.url}/${encodeURI(name)}`,
|
||||
{
|
||||
method: "GET",
|
||||
},
|
||||
);
|
||||
if (res.status === 404) {
|
||||
throw new Error(`Page not found`);
|
||||
}
|
||||
let data: FileData | null = null;
|
||||
switch (encoding) {
|
||||
case "arraybuffer":
|
||||
{
|
||||
data = await res.arrayBuffer();
|
||||
}
|
||||
break;
|
||||
case "dataurl":
|
||||
{
|
||||
data = base64EncodedDataUrl(
|
||||
mime.getType(name) || "application/octet-stream",
|
||||
new Uint8Array(await res.arrayBuffer()),
|
||||
);
|
||||
}
|
||||
break;
|
||||
case "utf8":
|
||||
data = await res.text();
|
||||
break;
|
||||
throw new Error(`Not found`);
|
||||
}
|
||||
return {
|
||||
data: data,
|
||||
data: new Uint8Array(await res.arrayBuffer()),
|
||||
meta: this.responseToMeta(name, res),
|
||||
};
|
||||
}
|
||||
|
||||
async writeFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: FileData,
|
||||
data: Uint8Array,
|
||||
_selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta> {
|
||||
let body: any = null;
|
||||
|
||||
switch (encoding) {
|
||||
case "arraybuffer":
|
||||
// actually we want an Uint8Array
|
||||
body = data instanceof ArrayBuffer ? new Uint8Array(data) : data;
|
||||
break;
|
||||
case "utf8":
|
||||
body = data;
|
||||
break;
|
||||
case "dataurl":
|
||||
data = base64DecodeDataUrl(data as string);
|
||||
break;
|
||||
}
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
};
|
||||
if (this.base64Put) {
|
||||
headers["X-Content-Base64"] = "true";
|
||||
headers["Content-Type"] = "text/plain";
|
||||
body = base64Encode(body);
|
||||
if (lastModified) {
|
||||
headers["X-Last-Modified"] = "" + lastModified;
|
||||
}
|
||||
|
||||
const res = await this.authenticatedFetch(
|
||||
`${this.fsUrl}/${encodeURI(name)}`,
|
||||
`${this.url}/${encodeURI(name)}`,
|
||||
{
|
||||
method: "PUT",
|
||||
headers,
|
||||
body,
|
||||
body: data,
|
||||
},
|
||||
);
|
||||
const newMeta = this.responseToMeta(name, res);
|
||||
@ -135,7 +96,7 @@ export class HttpSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
async deleteFile(name: string): Promise<void> {
|
||||
const req = await this.authenticatedFetch(
|
||||
`${this.fsUrl}/${encodeURI(name)}`,
|
||||
`${this.url}/${encodeURI(name)}`,
|
||||
{
|
||||
method: "DELETE",
|
||||
},
|
||||
@ -147,13 +108,13 @@ export class HttpSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
const res = await this.authenticatedFetch(
|
||||
`${this.fsUrl}/${encodeURI(name)}`,
|
||||
`${this.url}/${encodeURI(name)}`,
|
||||
{
|
||||
method: "OPTIONS",
|
||||
},
|
||||
);
|
||||
if (res.status === 404) {
|
||||
throw new Error(`File not found`);
|
||||
throw new Error(`Not found`);
|
||||
}
|
||||
return this.responseToMeta(name, res);
|
||||
}
|
||||
@ -167,62 +128,4 @@ export class HttpSpacePrimitives implements SpacePrimitives {
|
||||
perm: (res.headers.get("X-Permission") as "rw" | "ro") || "rw",
|
||||
};
|
||||
}
|
||||
|
||||
// Plugs
|
||||
|
||||
async proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any> {
|
||||
const req = await this.authenticatedFetch(
|
||||
`${this.plugUrl}/${plug.name}/syscall/${name}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(args),
|
||||
},
|
||||
);
|
||||
if (req.status !== 200) {
|
||||
const error = await req.text();
|
||||
throw Error(error);
|
||||
}
|
||||
if (req.headers.get("Content-length") === "0") {
|
||||
return;
|
||||
}
|
||||
return await req.json();
|
||||
}
|
||||
|
||||
async invokeFunction(
|
||||
plug: Plug<any>,
|
||||
env: string,
|
||||
name: string,
|
||||
args: any[],
|
||||
): Promise<any> {
|
||||
// Invoke locally
|
||||
if (!env || env === "client") {
|
||||
return plug.invoke(name, args);
|
||||
}
|
||||
// Or dispatch to server
|
||||
const req = await this.authenticatedFetch(
|
||||
`${this.plugUrl}/${plug.name}/function/${name}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(args),
|
||||
},
|
||||
);
|
||||
if (req.status !== 200) {
|
||||
const error = await req.text();
|
||||
throw Error(error);
|
||||
}
|
||||
if (req.headers.get("Content-length") === "0") {
|
||||
return;
|
||||
}
|
||||
if (req.headers.get("Content-type")?.includes("application/json")) {
|
||||
return await req.json();
|
||||
} else {
|
||||
return await req.text();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
35
common/spaces/indexeddb_space_primitives.test.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { indexedDB } from "https://deno.land/x/indexeddb@v1.1.0/ponyfill_memory.ts";
|
||||
import { IndexedDBSpacePrimitives } from "./indexeddb_space_primitives.ts";
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
|
||||
Deno.test("IndexedDBSpacePrimitives", async () => {
|
||||
const space = new IndexedDBSpacePrimitives("test", indexedDB);
|
||||
const files = await space.fetchFileList();
|
||||
assertEquals(files, []);
|
||||
// Write text file
|
||||
const fileMeta = await space.writeFile(
|
||||
"test.txt",
|
||||
stringToBytes("Hello World"),
|
||||
);
|
||||
assertEquals(
|
||||
(await space.readFile("test.txt")).data,
|
||||
stringToBytes("Hello World"),
|
||||
);
|
||||
const fbContent = (await space.readFile("test.txt"))
|
||||
.data;
|
||||
assertEquals(new TextDecoder().decode(fbContent), "Hello World");
|
||||
assertEquals(await space.fetchFileList(), [fileMeta]);
|
||||
const buf = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
// Write binary file
|
||||
await space.writeFile("test.bin", buf);
|
||||
const fMeta = await space.getFileMeta("test.bin");
|
||||
assertEquals(fMeta.size, 5);
|
||||
assertEquals((await space.fetchFileList()).length, 2);
|
||||
|
||||
await space.deleteFile("test.bin");
|
||||
assertEquals(await space.fetchFileList(), [fileMeta]);
|
||||
});
|
||||
|
||||
function stringToBytes(str: string): Uint8Array {
|
||||
return new TextEncoder().encode(str);
|
||||
}
|
87
common/spaces/indexeddb_space_primitives.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import type { FileMeta } from "../types.ts";
|
||||
import type { SpacePrimitives } from "./space_primitives.ts";
|
||||
import Dexie, { Table } from "dexie";
|
||||
import { mime } from "../deps.ts";
|
||||
|
||||
export type FileContent = {
|
||||
name: string;
|
||||
data: Uint8Array;
|
||||
};
|
||||
|
||||
export class IndexedDBSpacePrimitives implements SpacePrimitives {
|
||||
private db: Dexie;
|
||||
filesMetaTable: Table<FileMeta, string>;
|
||||
filesContentTable: Table<FileContent, string>;
|
||||
|
||||
constructor(
|
||||
dbName: string,
|
||||
indexedDB?: any,
|
||||
) {
|
||||
this.db = new Dexie(dbName, {
|
||||
indexedDB,
|
||||
});
|
||||
this.db.version(1).stores({
|
||||
fileMeta: "name",
|
||||
fileContent: "name",
|
||||
});
|
||||
this.filesMetaTable = this.db.table("fileMeta");
|
||||
this.filesContentTable = this.db.table<FileContent, string>("fileContent");
|
||||
}
|
||||
|
||||
fetchFileList(): Promise<FileMeta[]> {
|
||||
return this.filesMetaTable.toArray();
|
||||
}
|
||||
|
||||
async readFile(
|
||||
name: string,
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
const fileMeta = await this.filesMetaTable.get(name);
|
||||
if (!fileMeta) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
const fileContent = await this.filesContentTable.get(name);
|
||||
if (!fileContent) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
|
||||
return {
|
||||
data: fileContent.data,
|
||||
meta: fileMeta,
|
||||
};
|
||||
}
|
||||
|
||||
async writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
_selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta> {
|
||||
const fileMeta: FileMeta = {
|
||||
name,
|
||||
lastModified: lastModified || Date.now(),
|
||||
contentType: mime.getType(name) || "application/octet-stream",
|
||||
size: data.byteLength,
|
||||
perm: "rw",
|
||||
};
|
||||
await this.filesContentTable.put({ name, data });
|
||||
await this.filesMetaTable.put(fileMeta);
|
||||
return fileMeta;
|
||||
}
|
||||
|
||||
async deleteFile(name: string): Promise<void> {
|
||||
const fileMeta = await this.filesMetaTable.get(name);
|
||||
if (!fileMeta) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
await this.filesMetaTable.delete(name);
|
||||
await this.filesContentTable.delete(name);
|
||||
}
|
||||
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
const fileMeta = await this.filesMetaTable.get(name);
|
||||
if (!fileMeta) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
return fileMeta;
|
||||
}
|
||||
}
|
@ -1,15 +1,14 @@
|
||||
import { Plug } from "../../plugos/plug.ts";
|
||||
import {
|
||||
FileData,
|
||||
FileEncoding,
|
||||
SpacePrimitives,
|
||||
} from "../../common/spaces/space_primitives.ts";
|
||||
import { SpacePrimitives } from "../../common/spaces/space_primitives.ts";
|
||||
import { FileMeta } from "../../common/types.ts";
|
||||
import {
|
||||
NamespaceOperation,
|
||||
PageNamespaceHook,
|
||||
} from "../hooks/page_namespace.ts";
|
||||
import { base64DecodeDataUrl } from "../../plugos/asset_bundle/base64.ts";
|
||||
import {
|
||||
base64DecodeDataUrl,
|
||||
base64EncodedDataUrl,
|
||||
} from "../../plugos/asset_bundle/base64.ts";
|
||||
import { mime } from "../deps.ts";
|
||||
|
||||
export class PlugSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
@ -18,19 +17,34 @@ export class PlugSpacePrimitives implements SpacePrimitives {
|
||||
private env?: string,
|
||||
) {}
|
||||
|
||||
// Used e.g. by the sync engine to see if it should sync a certain path (likely not the case when we have a plug space override)
|
||||
public isLikelyHandled(path: string): boolean {
|
||||
for (
|
||||
const { pattern, env } of this.hook.spaceFunctions
|
||||
) {
|
||||
if (
|
||||
path.match(pattern) &&
|
||||
(!this.env || (env && env === this.env))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
performOperation(
|
||||
type: NamespaceOperation,
|
||||
pageName: string,
|
||||
path: string,
|
||||
...args: any[]
|
||||
): Promise<any> | false {
|
||||
for (
|
||||
const { operation, pattern, plug, name, env } of this.hook.spaceFunctions
|
||||
) {
|
||||
if (
|
||||
operation === type && pageName.match(pattern) &&
|
||||
operation === type && path.match(pattern) &&
|
||||
(!this.env || (env && env === this.env))
|
||||
) {
|
||||
return plug.invoke(name, [pageName, ...args]);
|
||||
return plug.invoke(name, [path, ...args]);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
@ -58,26 +72,19 @@ export class PlugSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
async readFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
): Promise<{ data: FileData; meta: FileMeta }> {
|
||||
const wantArrayBuffer = encoding === "arraybuffer";
|
||||
const result: { data: FileData; meta: FileMeta } | false = await this
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
const result: { data: string; meta: FileMeta } | false = await this
|
||||
.performOperation(
|
||||
"readFile",
|
||||
name,
|
||||
wantArrayBuffer ? "dataurl" : encoding,
|
||||
);
|
||||
if (result) {
|
||||
if (wantArrayBuffer) {
|
||||
return {
|
||||
data: base64DecodeDataUrl(result.data as string),
|
||||
data: base64DecodeDataUrl(result.data),
|
||||
meta: result.meta,
|
||||
};
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return this.wrapped.readFile(name, encoding);
|
||||
return this.wrapped.readFile(name);
|
||||
}
|
||||
|
||||
getFileMeta(name: string): Promise<FileMeta> {
|
||||
@ -90,22 +97,29 @@ export class PlugSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
writeFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: FileData,
|
||||
data: Uint8Array,
|
||||
selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta> {
|
||||
const result = this.performOperation(
|
||||
"writeFile",
|
||||
name,
|
||||
encoding,
|
||||
base64EncodedDataUrl(
|
||||
mime.getType(name) || "application/octet-stream",
|
||||
data,
|
||||
),
|
||||
selfUpdate,
|
||||
);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return this.wrapped.writeFile(name, encoding, data, selfUpdate);
|
||||
return this.wrapped.writeFile(
|
||||
name,
|
||||
data,
|
||||
selfUpdate,
|
||||
lastModified,
|
||||
);
|
||||
}
|
||||
|
||||
deleteFile(name: string): Promise<void> {
|
||||
@ -115,17 +129,4 @@ export class PlugSpacePrimitives implements SpacePrimitives {
|
||||
}
|
||||
return this.wrapped.deleteFile(name);
|
||||
}
|
||||
|
||||
proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any> {
|
||||
return this.wrapped.proxySyscall(plug, name, args);
|
||||
}
|
||||
|
||||
invokeFunction(
|
||||
plug: Plug<any>,
|
||||
env: string,
|
||||
name: string,
|
||||
args: any[],
|
||||
): Promise<any> {
|
||||
return this.wrapped.invokeFunction(plug, env, name, args);
|
||||
}
|
||||
}
|
||||
|
@ -1,31 +1,21 @@
|
||||
import { Plug } from "../../plugos/plug.ts";
|
||||
import { FileMeta } from "../types.ts";
|
||||
import type { FileMeta } from "../types.ts";
|
||||
|
||||
// export type FileEncoding = "utf8" | "arraybuffer" | "dataurl";
|
||||
// export type FileData = ArrayBuffer | string;
|
||||
|
||||
export type FileEncoding = "utf8" | "arraybuffer" | "dataurl";
|
||||
export type FileData = ArrayBuffer | string;
|
||||
export interface SpacePrimitives {
|
||||
// Returns a list of file meta data as well as the timestamp of this snapshot
|
||||
fetchFileList(): Promise<FileMeta[]>;
|
||||
readFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
): Promise<{ data: FileData; meta: FileMeta }>;
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }>;
|
||||
getFileMeta(name: string): Promise<FileMeta>;
|
||||
writeFile(
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: FileData,
|
||||
data: Uint8Array,
|
||||
// Used to decide whether or not to emit change events
|
||||
selfUpdate?: boolean,
|
||||
lastModified?: number,
|
||||
): Promise<FileMeta>;
|
||||
deleteFile(name: string): Promise<void>;
|
||||
|
||||
// Plugs
|
||||
proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any>;
|
||||
invokeFunction(
|
||||
plug: Plug<any>,
|
||||
env: string,
|
||||
name: string,
|
||||
args: any[],
|
||||
): Promise<any>;
|
||||
}
|
||||
|
@ -9,23 +9,28 @@ Deno.test("Test store", async () => {
|
||||
console.log("Secondary", secondaryPath);
|
||||
const primary = new DiskSpacePrimitives(primaryPath);
|
||||
const secondary = new DiskSpacePrimitives(secondaryPath);
|
||||
const statusMap = new Map<string, SyncStatusItem>();
|
||||
const sync = new SpaceSync(primary, secondary, statusMap, {});
|
||||
const snapshot = new Map<string, SyncStatusItem>();
|
||||
const sync = new SpaceSync(primary, secondary, {
|
||||
conflictResolver: SpaceSync.primaryConflictResolver,
|
||||
});
|
||||
|
||||
// Write one page to primary
|
||||
await primary.writeFile("index", "utf8", "Hello");
|
||||
await primary.writeFile("index", stringToBytes("Hello"));
|
||||
assertEquals((await secondary.fetchFileList()).length, 0);
|
||||
console.log("Initial sync ops", await doSync());
|
||||
|
||||
assertEquals((await secondary.fetchFileList()).length, 1);
|
||||
assertEquals((await secondary.readFile("index", "utf8")).data, "Hello");
|
||||
assertEquals(
|
||||
(await secondary.readFile("index")).data,
|
||||
stringToBytes("Hello"),
|
||||
);
|
||||
|
||||
// Should be a no-op
|
||||
assertEquals(await doSync(), 0);
|
||||
|
||||
// Now let's make a change on the secondary
|
||||
await secondary.writeFile("index", "utf8", "Hello!!");
|
||||
await secondary.writeFile("test", "utf8", "Test page");
|
||||
await secondary.writeFile("index", stringToBytes("Hello!!"));
|
||||
await secondary.writeFile("test", stringToBytes("Test page"));
|
||||
|
||||
// And sync it
|
||||
await doSync();
|
||||
@ -33,13 +38,16 @@ Deno.test("Test store", async () => {
|
||||
assertEquals((await primary.fetchFileList()).length, 2);
|
||||
assertEquals((await secondary.fetchFileList()).length, 2);
|
||||
|
||||
assertEquals((await primary.readFile("index", "utf8")).data, "Hello!!");
|
||||
assertEquals(
|
||||
(await primary.readFile("index")).data,
|
||||
stringToBytes("Hello!!"),
|
||||
);
|
||||
|
||||
// Let's make some random edits on both ends
|
||||
await primary.writeFile("index", "utf8", "1");
|
||||
await primary.writeFile("index2", "utf8", "2");
|
||||
await secondary.writeFile("index3", "utf8", "3");
|
||||
await secondary.writeFile("index4", "utf8", "4");
|
||||
await primary.writeFile("index", stringToBytes("1"));
|
||||
await primary.writeFile("index2", stringToBytes("2"));
|
||||
await secondary.writeFile("index3", stringToBytes("3"));
|
||||
await secondary.writeFile("index4", stringToBytes("4"));
|
||||
await doSync();
|
||||
|
||||
assertEquals((await primary.fetchFileList()).length, 5);
|
||||
@ -72,16 +80,19 @@ Deno.test("Test store", async () => {
|
||||
// No-op
|
||||
assertEquals(await doSync(), 0);
|
||||
|
||||
await secondary.writeFile("index", "utf8", "I'm back");
|
||||
await secondary.writeFile("index", stringToBytes("I'm back"));
|
||||
|
||||
await doSync();
|
||||
|
||||
assertEquals((await primary.readFile("index", "utf8")).data, "I'm back");
|
||||
assertEquals(
|
||||
(await primary.readFile("index")).data,
|
||||
stringToBytes("I'm back"),
|
||||
);
|
||||
|
||||
// Cause a conflict
|
||||
console.log("Introducing a conflict now");
|
||||
await primary.writeFile("index", "utf8", "Hello 1");
|
||||
await secondary.writeFile("index", "utf8", "Hello 2");
|
||||
await primary.writeFile("index", stringToBytes("Hello 1"));
|
||||
await secondary.writeFile("index", stringToBytes("Hello 2"));
|
||||
|
||||
await doSync();
|
||||
|
||||
@ -89,27 +100,33 @@ Deno.test("Test store", async () => {
|
||||
await doSync();
|
||||
|
||||
// Verify that primary won
|
||||
assertEquals((await primary.readFile("index", "utf8")).data, "Hello 1");
|
||||
assertEquals((await secondary.readFile("index", "utf8")).data, "Hello 1");
|
||||
assertEquals(
|
||||
(await primary.readFile("index")).data,
|
||||
stringToBytes("Hello 1"),
|
||||
);
|
||||
assertEquals(
|
||||
(await secondary.readFile("index")).data,
|
||||
stringToBytes("Hello 1"),
|
||||
);
|
||||
|
||||
// test + index + index.conflicting copy
|
||||
assertEquals((await primary.fetchFileList()).length, 3);
|
||||
assertEquals((await secondary.fetchFileList()).length, 3);
|
||||
|
||||
// Introducing a fake conflict (same content, so not really conflicting)
|
||||
await primary.writeFile("index", "utf8", "Hello 1");
|
||||
await secondary.writeFile("index", "utf8", "Hello 1");
|
||||
await primary.writeFile("index", stringToBytes("Hello 1"));
|
||||
await secondary.writeFile("index", stringToBytes("Hello 1"));
|
||||
|
||||
// And two more files with different bodies, but only within a query directive — shouldn't conflict
|
||||
await primary.writeFile(
|
||||
"index.md",
|
||||
"utf8",
|
||||
stringToBytes(
|
||||
"Hello\n<!-- #query page -->\nHello 1\n<!-- /query -->",
|
||||
),
|
||||
);
|
||||
await secondary.writeFile(
|
||||
"index.md",
|
||||
"utf8",
|
||||
"Hello\n<!-- #query page -->\nHello 2\n<!-- /query -->",
|
||||
stringToBytes("Hello\n<!-- #query page -->\nHello 2\n<!-- /query -->"),
|
||||
);
|
||||
|
||||
await doSync();
|
||||
@ -128,15 +145,17 @@ Deno.test("Test store", async () => {
|
||||
const sync2 = new SpaceSync(
|
||||
secondary,
|
||||
ternary,
|
||||
new Map<string, SyncStatusItem>(),
|
||||
{},
|
||||
{
|
||||
conflictResolver: SpaceSync.primaryConflictResolver,
|
||||
},
|
||||
);
|
||||
const snapshot2 = new Map<string, SyncStatusItem>();
|
||||
console.log(
|
||||
"N ops",
|
||||
await sync2.syncFiles(SpaceSync.primaryConflictResolver),
|
||||
await sync2.syncFiles(snapshot2),
|
||||
);
|
||||
await sleep(2);
|
||||
assertEquals(await sync2.syncFiles(SpaceSync.primaryConflictResolver), 0);
|
||||
assertEquals(await sync2.syncFiles(snapshot2), 0);
|
||||
|
||||
// I had to look up what follows ternary (https://english.stackexchange.com/questions/25116/what-follows-next-in-the-sequence-unary-binary-ternary)
|
||||
const quaternaryPath = await Deno.makeTempDir();
|
||||
@ -144,12 +163,12 @@ Deno.test("Test store", async () => {
|
||||
const sync3 = new SpaceSync(
|
||||
secondary,
|
||||
quaternary,
|
||||
new Map<string, SyncStatusItem>(),
|
||||
{
|
||||
excludePrefixes: ["index"],
|
||||
isSyncCandidate: (path) => !path.startsWith("index"),
|
||||
conflictResolver: SpaceSync.primaryConflictResolver,
|
||||
},
|
||||
);
|
||||
const selectingOps = await sync3.syncFiles(SpaceSync.primaryConflictResolver);
|
||||
const selectingOps = await sync3.syncFiles(new Map());
|
||||
|
||||
assertEquals(selectingOps, 1);
|
||||
|
||||
@ -160,9 +179,7 @@ Deno.test("Test store", async () => {
|
||||
|
||||
async function doSync() {
|
||||
await sleep();
|
||||
const r = await sync.syncFiles(
|
||||
SpaceSync.primaryConflictResolver,
|
||||
);
|
||||
const r = await sync.syncFiles(snapshot);
|
||||
await sleep();
|
||||
return r;
|
||||
}
|
||||
@ -193,3 +210,7 @@ Hello
|
||||
`,
|
||||
);
|
||||
});
|
||||
|
||||
function stringToBytes(s: string): Uint8Array {
|
||||
return new TextEncoder().encode(s);
|
||||
}
|
||||
|
@ -10,52 +10,41 @@ type SyncHash = number;
|
||||
// and the second item the lastModified value of the secondary space
|
||||
export type SyncStatusItem = [SyncHash, SyncHash];
|
||||
|
||||
export interface Logger {
|
||||
log(level: string, ...messageBits: any[]): void;
|
||||
}
|
||||
|
||||
class ConsoleLogger implements Logger {
|
||||
log(_level: string, ...messageBits: any[]) {
|
||||
console.log(...messageBits);
|
||||
}
|
||||
}
|
||||
|
||||
export type SyncOptions = {
|
||||
logger?: Logger;
|
||||
excludePrefixes?: string[];
|
||||
export type SyncStatus = {
|
||||
filesProcessed: number;
|
||||
totalFiles: number;
|
||||
snapshot: Map<string, SyncStatusItem>;
|
||||
};
|
||||
|
||||
// Implementation of this algorithm https://unterwaditzer.net/2016/sync-algorithm.html
|
||||
export class SpaceSync {
|
||||
logger: ConsoleLogger;
|
||||
excludePrefixes: string[];
|
||||
|
||||
constructor(
|
||||
private primary: SpacePrimitives,
|
||||
private secondary: SpacePrimitives,
|
||||
readonly snapshot: Map<string, SyncStatusItem>,
|
||||
readonly options: SyncOptions,
|
||||
) {
|
||||
this.logger = options.logger || new ConsoleLogger();
|
||||
this.excludePrefixes = options.excludePrefixes || [];
|
||||
}
|
||||
|
||||
async syncFiles(
|
||||
export type SyncOptions = {
|
||||
conflictResolver: (
|
||||
name: string,
|
||||
snapshot: Map<string, SyncStatusItem>,
|
||||
primarySpace: SpacePrimitives,
|
||||
secondarySpace: SpacePrimitives,
|
||||
logger: Logger,
|
||||
) => Promise<number>,
|
||||
): Promise<number> {
|
||||
) => Promise<number>;
|
||||
isSyncCandidate?: (path: string) => boolean;
|
||||
// Used to track progress, may want to pass more specific info later
|
||||
onSyncProgress?: (syncStatus: SyncStatus) => void;
|
||||
};
|
||||
|
||||
// Implementation of this algorithm https://unterwaditzer.net/2016/sync-algorithm.html
|
||||
export class SpaceSync {
|
||||
constructor(
|
||||
private primary: SpacePrimitives,
|
||||
private secondary: SpacePrimitives,
|
||||
readonly options: SyncOptions,
|
||||
) {
|
||||
}
|
||||
|
||||
async syncFiles(snapshot: Map<string, SyncStatusItem>): Promise<number> {
|
||||
let operations = 0;
|
||||
this.logger.log("info", "Fetching snapshot from primary");
|
||||
console.log("[sync]", "Fetching snapshot from primary");
|
||||
const primaryAllPages = this.syncCandidates(
|
||||
await this.primary.fetchFileList(),
|
||||
);
|
||||
|
||||
this.logger.log("info", "Fetching snapshot from secondary");
|
||||
console.log("[sync]", "Fetching snapshot from secondary");
|
||||
try {
|
||||
const secondaryAllPages = this.syncCandidates(
|
||||
await this.secondary.fetchFileList(),
|
||||
@ -69,177 +58,188 @@ export class SpaceSync {
|
||||
);
|
||||
|
||||
const allFilesToProcess = new Set([
|
||||
...this.snapshot.keys(),
|
||||
...snapshot.keys(),
|
||||
...primaryFileMap.keys(),
|
||||
...secondaryFileMap.keys(),
|
||||
]);
|
||||
|
||||
this.logger.log("info", "Iterating over all files");
|
||||
for (const name of allFilesToProcess) {
|
||||
const sortedFilenames = [...allFilesToProcess];
|
||||
sortedFilenames.sort((a) => {
|
||||
// Just make sure that _plug/ files appear first
|
||||
// This is important for the initial sync: plugs are loaded the moment they are pulled into the space,
|
||||
// which would activate e.g. any indexing logic for the remaining space content
|
||||
return a.startsWith("_plug/") ? -1 : 1;
|
||||
});
|
||||
// console.log("[sync]", "Iterating over all files");
|
||||
let filesProcessed = 0;
|
||||
for (const name of sortedFilenames) {
|
||||
try {
|
||||
operations += await this.syncFile(
|
||||
snapshot,
|
||||
name,
|
||||
primaryFileMap.get(name),
|
||||
secondaryFileMap.get(name),
|
||||
conflictResolver,
|
||||
);
|
||||
filesProcessed++;
|
||||
// Only report something significant
|
||||
if (operations > 1 && this.options.onSyncProgress) {
|
||||
this.options.onSyncProgress({
|
||||
filesProcessed,
|
||||
totalFiles: sortedFilenames.length,
|
||||
snapshot,
|
||||
});
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.logger.log("error", "Error syncing file", name, e.message);
|
||||
console.log("error", "Error syncing file", name, e.message);
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.logger.log("error", "General sync error:", e.message);
|
||||
console.log("error", "General sync error:", e.message);
|
||||
throw e;
|
||||
}
|
||||
this.logger.log("info", "Sync complete, operations performed", operations);
|
||||
console.log("[sync]", "Sync complete, operations performed", operations);
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
async syncFile(
|
||||
snapshot: Map<string, SyncStatusItem>,
|
||||
name: string,
|
||||
primaryHash: SyncHash | undefined,
|
||||
secondaryHash: SyncHash | undefined,
|
||||
conflictResolver: (
|
||||
name: string,
|
||||
snapshot: Map<string, SyncStatusItem>,
|
||||
primarySpace: SpacePrimitives,
|
||||
secondarySpace: SpacePrimitives,
|
||||
logger: Logger,
|
||||
) => Promise<number>,
|
||||
): Promise<number> {
|
||||
if (this.options.isSyncCandidate && !this.options.isSyncCandidate(name)) {
|
||||
return 0;
|
||||
}
|
||||
// console.log("Syncing", name, primaryHash, secondaryHash);
|
||||
let operations = 0;
|
||||
|
||||
// Check if not matching one of the excluded prefixes
|
||||
for (const prefix of this.excludePrefixes) {
|
||||
if (name.startsWith(prefix)) {
|
||||
return operations;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
primaryHash !== undefined && secondaryHash === undefined &&
|
||||
!this.snapshot.has(name)
|
||||
!snapshot.has(name)
|
||||
) {
|
||||
// New file, created on primary, copy from primary to secondary
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"New file created on primary, copying to secondary",
|
||||
name,
|
||||
);
|
||||
const { data } = await this.primary.readFile(name, "arraybuffer");
|
||||
const { data, meta } = await this.primary.readFile(name);
|
||||
const writtenMeta = await this.secondary.writeFile(
|
||||
name,
|
||||
"arraybuffer",
|
||||
data,
|
||||
false,
|
||||
meta.lastModified,
|
||||
);
|
||||
this.snapshot.set(name, [
|
||||
snapshot.set(name, [
|
||||
primaryHash,
|
||||
writtenMeta.lastModified,
|
||||
]);
|
||||
operations++;
|
||||
} else if (
|
||||
secondaryHash !== undefined && primaryHash === undefined &&
|
||||
!this.snapshot.has(name)
|
||||
!snapshot.has(name)
|
||||
) {
|
||||
// New file, created on secondary, copy from secondary to primary
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"New file created on secondary, copying from secondary to primary",
|
||||
name,
|
||||
);
|
||||
const { data } = await this.secondary.readFile(name, "arraybuffer");
|
||||
const { data, meta } = await this.secondary.readFile(name);
|
||||
const writtenMeta = await this.primary.writeFile(
|
||||
name,
|
||||
"arraybuffer",
|
||||
data,
|
||||
false,
|
||||
meta.lastModified,
|
||||
);
|
||||
this.snapshot.set(name, [
|
||||
snapshot.set(name, [
|
||||
writtenMeta.lastModified,
|
||||
secondaryHash,
|
||||
]);
|
||||
operations++;
|
||||
} else if (
|
||||
primaryHash !== undefined && this.snapshot.has(name) &&
|
||||
primaryHash !== undefined && snapshot.has(name) &&
|
||||
secondaryHash === undefined
|
||||
) {
|
||||
// File deleted on B
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"File deleted on secondary, deleting from primary",
|
||||
name,
|
||||
);
|
||||
await this.primary.deleteFile(name);
|
||||
this.snapshot.delete(name);
|
||||
snapshot.delete(name);
|
||||
operations++;
|
||||
} else if (
|
||||
secondaryHash !== undefined && this.snapshot.has(name) &&
|
||||
secondaryHash !== undefined && snapshot.has(name) &&
|
||||
primaryHash === undefined
|
||||
) {
|
||||
// File deleted on A
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"File deleted on primary, deleting from secondary",
|
||||
name,
|
||||
);
|
||||
await this.secondary.deleteFile(name);
|
||||
this.snapshot.delete(name);
|
||||
snapshot.delete(name);
|
||||
operations++;
|
||||
} else if (
|
||||
this.snapshot.has(name) && primaryHash === undefined &&
|
||||
snapshot.has(name) && primaryHash === undefined &&
|
||||
secondaryHash === undefined
|
||||
) {
|
||||
// File deleted on both sides, :shrug:
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"File deleted on both ends, deleting from status",
|
||||
name,
|
||||
);
|
||||
this.snapshot.delete(name);
|
||||
snapshot.delete(name);
|
||||
operations++;
|
||||
} else if (
|
||||
primaryHash !== undefined && secondaryHash !== undefined &&
|
||||
this.snapshot.get(name) &&
|
||||
primaryHash !== this.snapshot.get(name)![0] &&
|
||||
secondaryHash === this.snapshot.get(name)![1]
|
||||
snapshot.get(name) &&
|
||||
primaryHash !== snapshot.get(name)![0] &&
|
||||
secondaryHash === snapshot.get(name)![1]
|
||||
) {
|
||||
// File has changed on primary, but not secondary: copy from primary to secondary
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"File changed on primary, copying to secondary",
|
||||
name,
|
||||
);
|
||||
const { data } = await this.primary.readFile(name, "arraybuffer");
|
||||
const { data, meta } = await this.primary.readFile(name);
|
||||
const writtenMeta = await this.secondary.writeFile(
|
||||
name,
|
||||
"arraybuffer",
|
||||
data,
|
||||
false,
|
||||
meta.lastModified,
|
||||
);
|
||||
this.snapshot.set(name, [
|
||||
snapshot.set(name, [
|
||||
primaryHash,
|
||||
writtenMeta.lastModified,
|
||||
]);
|
||||
operations++;
|
||||
} else if (
|
||||
primaryHash !== undefined && secondaryHash !== undefined &&
|
||||
this.snapshot.get(name) &&
|
||||
secondaryHash !== this.snapshot.get(name)![1] &&
|
||||
primaryHash === this.snapshot.get(name)![0]
|
||||
snapshot.get(name) &&
|
||||
secondaryHash !== snapshot.get(name)![1] &&
|
||||
primaryHash === snapshot.get(name)![0]
|
||||
) {
|
||||
// File has changed on secondary, but not primary: copy from secondary to primary
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"File has changed on secondary, but not primary: copy from secondary to primary",
|
||||
name,
|
||||
);
|
||||
const { data } = await this.secondary.readFile(name, "arraybuffer");
|
||||
const { data, meta } = await this.secondary.readFile(name);
|
||||
const writtenMeta = await this.primary.writeFile(
|
||||
name,
|
||||
"arraybuffer",
|
||||
data,
|
||||
false,
|
||||
meta.lastModified,
|
||||
);
|
||||
this.snapshot.set(name, [
|
||||
snapshot.set(name, [
|
||||
writtenMeta.lastModified,
|
||||
secondaryHash,
|
||||
]);
|
||||
@ -247,26 +247,25 @@ export class SpaceSync {
|
||||
} else if (
|
||||
( // File changed on both ends, but we don't have any info in the snapshot (resync scenario?): have to run through conflict handling
|
||||
primaryHash !== undefined && secondaryHash !== undefined &&
|
||||
!this.snapshot.has(name)
|
||||
!snapshot.has(name)
|
||||
) ||
|
||||
( // File changed on both ends, CONFLICT!
|
||||
primaryHash && secondaryHash &&
|
||||
this.snapshot.get(name) &&
|
||||
secondaryHash !== this.snapshot.get(name)![1] &&
|
||||
primaryHash !== this.snapshot.get(name)![0]
|
||||
snapshot.get(name) &&
|
||||
secondaryHash !== snapshot.get(name)![1] &&
|
||||
primaryHash !== snapshot.get(name)![0]
|
||||
)
|
||||
) {
|
||||
this.logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"File changed on both ends, potential conflict",
|
||||
name,
|
||||
);
|
||||
operations += await conflictResolver(
|
||||
operations += await this.options.conflictResolver!(
|
||||
name,
|
||||
this.snapshot,
|
||||
snapshot,
|
||||
this.primary,
|
||||
this.secondary,
|
||||
this.logger,
|
||||
);
|
||||
} else {
|
||||
// Nothing needs to happen
|
||||
@ -280,27 +279,29 @@ export class SpaceSync {
|
||||
snapshot: Map<string, SyncStatusItem>,
|
||||
primary: SpacePrimitives,
|
||||
secondary: SpacePrimitives,
|
||||
logger: Logger,
|
||||
): Promise<number> {
|
||||
logger.log("info", "Starting conflict resolution for", name);
|
||||
console.log("[sync]", "Starting conflict resolution for", name);
|
||||
const filePieces = name.split(".");
|
||||
const fileNameBase = filePieces.slice(0, -1).join(".");
|
||||
const fileNameExt = filePieces[filePieces.length - 1];
|
||||
const pageData1 = await primary.readFile(name, "arraybuffer");
|
||||
const pageData2 = await secondary.readFile(name, "arraybuffer");
|
||||
const pageData1 = await primary.readFile(name);
|
||||
const pageData2 = await secondary.readFile(name);
|
||||
|
||||
if (name.endsWith(".md")) {
|
||||
logger.log("info", "File is markdown, using smart conflict resolution");
|
||||
console.log(
|
||||
"[sync]",
|
||||
"File is markdown, using smart conflict resolution",
|
||||
);
|
||||
// Let's use a smartert check for markdown files, ignoring directive bodies
|
||||
const pageText1 = removeDirectiveBody(
|
||||
new TextDecoder().decode(pageData1.data as Uint8Array),
|
||||
new TextDecoder().decode(pageData1.data),
|
||||
);
|
||||
const pageText2 = removeDirectiveBody(
|
||||
new TextDecoder().decode(pageData2.data as Uint8Array),
|
||||
new TextDecoder().decode(pageData2.data),
|
||||
);
|
||||
if (pageText1 === pageText2) {
|
||||
logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"Files are the same (eliminating the directive bodies), no conflict",
|
||||
);
|
||||
snapshot.set(name, [
|
||||
@ -311,8 +312,8 @@ export class SpaceSync {
|
||||
}
|
||||
} else {
|
||||
let byteWiseMatch = true;
|
||||
const arrayBuffer1 = new Uint8Array(pageData1.data as ArrayBuffer);
|
||||
const arrayBuffer2 = new Uint8Array(pageData2.data as ArrayBuffer);
|
||||
const arrayBuffer1 = pageData1.data;
|
||||
const arrayBuffer2 = pageData2.data;
|
||||
if (arrayBuffer1.byteLength !== arrayBuffer2.byteLength) {
|
||||
byteWiseMatch = false;
|
||||
}
|
||||
@ -326,7 +327,8 @@ export class SpaceSync {
|
||||
}
|
||||
// Byte wise they're still the same, so no confict
|
||||
if (byteWiseMatch) {
|
||||
logger.log("info", "Files are the same, no conflict");
|
||||
console.log("[sync]", "Files are the same, no conflict");
|
||||
|
||||
snapshot.set(name, [
|
||||
pageData1.meta.lastModified,
|
||||
pageData2.meta.lastModified,
|
||||
@ -335,11 +337,12 @@ export class SpaceSync {
|
||||
}
|
||||
}
|
||||
}
|
||||
let operations = 0;
|
||||
const revisionFileName = filePieces.length === 1
|
||||
? `${name}.conflicted.${pageData2.meta.lastModified}`
|
||||
: `${fileNameBase}.conflicted.${pageData2.meta.lastModified}.${fileNameExt}`;
|
||||
logger.log(
|
||||
"info",
|
||||
console.log(
|
||||
"[sync]",
|
||||
"Going to create conflicting copy",
|
||||
revisionFileName,
|
||||
);
|
||||
@ -347,14 +350,22 @@ export class SpaceSync {
|
||||
// Copy secondary to conflict copy
|
||||
const localConflictMeta = await primary.writeFile(
|
||||
revisionFileName,
|
||||
"arraybuffer",
|
||||
pageData2.data,
|
||||
);
|
||||
operations++;
|
||||
const remoteConflictMeta = await secondary.writeFile(
|
||||
revisionFileName,
|
||||
"arraybuffer",
|
||||
pageData2.data,
|
||||
);
|
||||
operations++;
|
||||
|
||||
// Write replacement on top
|
||||
const writeMeta = await secondary.writeFile(
|
||||
name,
|
||||
pageData1.data,
|
||||
true,
|
||||
);
|
||||
operations++;
|
||||
|
||||
// Updating snapshot
|
||||
snapshot.set(revisionFileName, [
|
||||
@ -362,22 +373,16 @@ export class SpaceSync {
|
||||
remoteConflictMeta.lastModified,
|
||||
]);
|
||||
|
||||
// Write replacement on top
|
||||
const writeMeta = await secondary.writeFile(
|
||||
name,
|
||||
"arraybuffer",
|
||||
pageData1.data,
|
||||
true,
|
||||
);
|
||||
|
||||
snapshot.set(name, [pageData1.meta.lastModified, writeMeta.lastModified]);
|
||||
return 1;
|
||||
return operations;
|
||||
}
|
||||
|
||||
syncCandidates(files: FileMeta[]): FileMeta[] {
|
||||
return files.filter((f) =>
|
||||
!f.name.startsWith("_plug/") && f.lastModified > 0
|
||||
);
|
||||
if (this.options.isSyncCandidate) {
|
||||
return files.filter((meta) => this.options.isSyncCandidate!(meta.name));
|
||||
} else {
|
||||
return files;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
27
common/sw_util.ts
Normal file
@ -0,0 +1,27 @@
|
||||
export function flushCachesAndUnregisterServiceWorker() {
|
||||
return new Promise<void>((resolve) => {
|
||||
if (!navigator.serviceWorker) {
|
||||
console.log("No service worker active");
|
||||
return resolve();
|
||||
}
|
||||
|
||||
navigator.serviceWorker.addEventListener("message", (event) => {
|
||||
if (event.data.type === "cacheFlushed") {
|
||||
console.log("Cache flushed");
|
||||
// Then unregister all service workers
|
||||
navigator.serviceWorker.getRegistrations().then((registrations) => {
|
||||
for (const registration of registrations) {
|
||||
registration.unregister();
|
||||
console.log("Service worker unregistered");
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// First flush active cache
|
||||
navigator.serviceWorker.ready.then((registration) => {
|
||||
registration.active!.postMessage({ type: "flushCache" });
|
||||
});
|
||||
});
|
||||
}
|
@ -1,69 +0,0 @@
|
||||
import { AttachmentMeta, PageMeta } from "../../common/types.ts";
|
||||
import { SysCallMapping } from "../../plugos/system.ts";
|
||||
import { Space } from "../../common/spaces/space.ts";
|
||||
import {
|
||||
FileData,
|
||||
FileEncoding,
|
||||
} from "../../common/spaces/space_primitives.ts";
|
||||
|
||||
import { FileMeta as PlugFileMeta } from "../../plug-api/plugos-syscall/types.ts";
|
||||
|
||||
export default (space: Space): SysCallMapping => {
|
||||
return {
|
||||
"space.listPages": (): PageMeta[] => {
|
||||
return space.listPages();
|
||||
},
|
||||
"space.readPage": async (
|
||||
_ctx,
|
||||
name: string,
|
||||
): Promise<string> => {
|
||||
return (await space.readPage(name)).text;
|
||||
},
|
||||
"space.getPageMeta": (_ctx, name: string): Promise<PageMeta> => {
|
||||
return space.getPageMeta(name);
|
||||
},
|
||||
"space.writePage": (
|
||||
_ctx,
|
||||
name: string,
|
||||
text: string,
|
||||
): Promise<PageMeta> => {
|
||||
return space.writePage(name, text);
|
||||
},
|
||||
"space.deletePage": (_ctx, name: string) => {
|
||||
return space.deletePage(name);
|
||||
},
|
||||
"space.listPlugs": (): Promise<string[]> => {
|
||||
return space.listPlugs();
|
||||
},
|
||||
"space.listAttachments": async (): Promise<AttachmentMeta[]> => {
|
||||
return await space.fetchAttachmentList();
|
||||
},
|
||||
"space.readAttachment": async (
|
||||
_ctx,
|
||||
name: string,
|
||||
): Promise<FileData> => {
|
||||
return (await space.readAttachment(name, "dataurl")).data;
|
||||
},
|
||||
"space.getAttachmentMeta": async (
|
||||
_ctx,
|
||||
name: string,
|
||||
): Promise<AttachmentMeta> => {
|
||||
return await space.getAttachmentMeta(name);
|
||||
},
|
||||
"space.writeAttachment": async (
|
||||
_ctx,
|
||||
name: string,
|
||||
encoding: FileEncoding,
|
||||
data: string,
|
||||
): Promise<AttachmentMeta> => {
|
||||
return await space.writeAttachment(name, encoding, data);
|
||||
},
|
||||
"space.deleteAttachment": async (_ctx, name: string) => {
|
||||
await space.deleteAttachment(name);
|
||||
},
|
||||
|
||||
"space.listFiles": (_ctx, path: string): Promise<PlugFileMeta[]> => {
|
||||
return space.listFiles(path);
|
||||
},
|
||||
};
|
||||
};
|
@ -1,132 +0,0 @@
|
||||
import { SysCallMapping, System } from "../../plugos/system.ts";
|
||||
import type { SyncEndpoint } from "../../plug-api/silverbullet-syscall/sync.ts";
|
||||
import { SpaceSync, SyncStatusItem } from "../spaces/sync.ts";
|
||||
import { HttpSpacePrimitives } from "../spaces/http_space_primitives.ts";
|
||||
import { SpacePrimitives } from "../spaces/space_primitives.ts";
|
||||
|
||||
export function syncSyscalls(
|
||||
localSpace: SpacePrimitives,
|
||||
system: System<any>,
|
||||
): SysCallMapping {
|
||||
return {
|
||||
"sync.syncAll": async (
|
||||
_ctx,
|
||||
endpoint: SyncEndpoint,
|
||||
snapshot: Record<string, SyncStatusItem>,
|
||||
): Promise<
|
||||
{
|
||||
snapshot: Record<string, SyncStatusItem>;
|
||||
operations: number;
|
||||
// The reason to not just throw an Error is so that the partially updated snapshot can still be saved
|
||||
error?: string;
|
||||
}
|
||||
> => {
|
||||
const { spaceSync } = setupSync(endpoint, snapshot);
|
||||
|
||||
try {
|
||||
const operations = await spaceSync.syncFiles(
|
||||
SpaceSync.primaryConflictResolver,
|
||||
);
|
||||
return {
|
||||
// And convert back to JSON
|
||||
snapshot: Object.fromEntries(spaceSync.snapshot),
|
||||
operations,
|
||||
};
|
||||
} catch (e: any) {
|
||||
return {
|
||||
snapshot: Object.fromEntries(spaceSync.snapshot),
|
||||
operations: -1,
|
||||
error: e.message,
|
||||
};
|
||||
}
|
||||
},
|
||||
"sync.syncFile": async (
|
||||
_ctx,
|
||||
endpoint: SyncEndpoint,
|
||||
snapshot: Record<string, SyncStatusItem>,
|
||||
name: string,
|
||||
): Promise<
|
||||
{
|
||||
snapshot: Record<string, SyncStatusItem>;
|
||||
operations: number;
|
||||
// The reason to not just throw an Error is so that the partially updated snapshot can still be saved
|
||||
error?: string;
|
||||
}
|
||||
> => {
|
||||
const { spaceSync, remoteSpace } = setupSync(endpoint, snapshot);
|
||||
try {
|
||||
const localHash = (await localSpace.getFileMeta(name)).lastModified;
|
||||
let remoteHash: number | undefined = undefined;
|
||||
try {
|
||||
remoteHash = (await remoteSpace.getFileMeta(name)).lastModified;
|
||||
} catch (e: any) {
|
||||
if (e.message.includes("File not found")) {
|
||||
// File doesn't exist remotely, that's ok
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
const operations = await spaceSync.syncFile(
|
||||
name,
|
||||
localHash,
|
||||
remoteHash,
|
||||
SpaceSync.primaryConflictResolver,
|
||||
);
|
||||
return {
|
||||
// And convert back to JSON
|
||||
snapshot: Object.fromEntries(spaceSync.snapshot),
|
||||
operations,
|
||||
};
|
||||
} catch (e: any) {
|
||||
return {
|
||||
snapshot: Object.fromEntries(spaceSync.snapshot),
|
||||
operations: -1,
|
||||
error: e.message,
|
||||
};
|
||||
}
|
||||
},
|
||||
"sync.check": async (_ctx, endpoint: SyncEndpoint): Promise<void> => {
|
||||
const syncSpace = new HttpSpacePrimitives(
|
||||
endpoint.url,
|
||||
endpoint.user,
|
||||
endpoint.password,
|
||||
);
|
||||
// Let's just fetch the file list and see if it works
|
||||
try {
|
||||
await syncSpace.fetchFileList();
|
||||
} catch (e: any) {
|
||||
console.error("Sync check failure", e.message);
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
function setupSync(
|
||||
endpoint: SyncEndpoint,
|
||||
snapshot: Record<string, SyncStatusItem>,
|
||||
) {
|
||||
const remoteSpace = new HttpSpacePrimitives(
|
||||
endpoint.url,
|
||||
endpoint.user,
|
||||
endpoint.password,
|
||||
// Base64 PUTs to support mobile
|
||||
true,
|
||||
);
|
||||
// Convert from JSON to a Map
|
||||
const syncStatusMap = new Map<string, SyncStatusItem>(
|
||||
Object.entries(snapshot),
|
||||
);
|
||||
const spaceSync = new SpaceSync(
|
||||
localSpace,
|
||||
remoteSpace,
|
||||
syncStatusMap,
|
||||
{
|
||||
excludePrefixes: endpoint.excludePrefixes,
|
||||
// Log to the "sync" plug sandbox
|
||||
logger: system.loadedPlugs.get("sync")!.sandbox!,
|
||||
},
|
||||
);
|
||||
return { spaceSync, remoteSpace };
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
export const maximumAttachmentSize = 100 * 1024 * 1024; // 100 MB
|
||||
export const maximumAttachmentSize = 20 * 1024 * 1024; // 10 MB
|
||||
|
||||
export type FileMeta = {
|
||||
name: string;
|
||||
@ -7,25 +7,3 @@ export type FileMeta = {
|
||||
size: number;
|
||||
perm: "ro" | "rw";
|
||||
} & Record<string, any>;
|
||||
|
||||
export type PageMeta = {
|
||||
name: string;
|
||||
lastModified: number;
|
||||
lastOpened?: number;
|
||||
perm: "ro" | "rw";
|
||||
} & Record<string, any>;
|
||||
|
||||
export type AttachmentMeta = {
|
||||
name: string;
|
||||
contentType: string;
|
||||
lastModified: number;
|
||||
size: number;
|
||||
perm: "ro" | "rw";
|
||||
};
|
||||
|
||||
// Used by FilterBox
|
||||
export type FilterOption = {
|
||||
name: string;
|
||||
orderId?: number;
|
||||
hint?: string;
|
||||
} & Record<string, any>;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { SETTINGS_TEMPLATE } from "./settings_template.ts";
|
||||
import { YAML } from "./deps.ts";
|
||||
import { Space } from "./spaces/space.ts";
|
||||
import { SpacePrimitives } from "./spaces/space_primitives.ts";
|
||||
|
||||
export function safeRun(fn: () => Promise<void>) {
|
||||
fn().catch((e) => {
|
||||
@ -33,43 +33,31 @@ export function parseYamlSettings(settingsMarkdown: string): {
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureAndLoadSettings(
|
||||
space: Space,
|
||||
dontCreate: boolean,
|
||||
export async function ensureSettingsAndIndex(
|
||||
space: SpacePrimitives,
|
||||
): Promise<any> {
|
||||
if (dontCreate) {
|
||||
return {
|
||||
indexPage: "index",
|
||||
};
|
||||
}
|
||||
try {
|
||||
await space.getPageMeta("SETTINGS");
|
||||
await space.getFileMeta("SETTINGS.md");
|
||||
} catch {
|
||||
await space.writePage(
|
||||
"SETTINGS",
|
||||
SETTINGS_TEMPLATE,
|
||||
await space.writeFile(
|
||||
"SETTINGS.md",
|
||||
new TextEncoder().encode(SETTINGS_TEMPLATE),
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
const { text: settingsText } = await space.readPage("SETTINGS");
|
||||
const settings = parseYamlSettings(settingsText);
|
||||
if (!settings.indexPage) {
|
||||
settings.indexPage = "index";
|
||||
}
|
||||
|
||||
// Ok, then let's also write the index page
|
||||
try {
|
||||
await space.getPageMeta(settings.indexPage);
|
||||
await space.getFileMeta("index.md");
|
||||
} catch {
|
||||
await space.writePage(
|
||||
settings.indexPage,
|
||||
await space.writeFile(
|
||||
"index.md",
|
||||
new TextEncoder().encode(
|
||||
`Hello! And welcome to your brand new SilverBullet space!
|
||||
|
||||
<!-- #use [[💭 silverbullet.md/Getting Started]] -->
|
||||
Loading some onboarding content for you (but doing so does require a working internet connection)...
|
||||
<!-- /use -->`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
|
32
deno.jsonc
@ -1,30 +1,20 @@
|
||||
{
|
||||
"tasks": {
|
||||
"clean": "rm -rf dist dist_bundle",
|
||||
"install": "deno install -f -A --unstable --importmap import_map.json silverbullet.ts",
|
||||
"check": "find web common server plugs cmd plug-api plugos -name '*.ts*' | xargs deno check",
|
||||
"clean": "rm -rf dist dist_client_bundle dist_plug_bundle website_build",
|
||||
"install": "deno install -f -A --importmap import_map.json silverbullet.ts",
|
||||
"check": "find . -name '*.ts*' | xargs deno check",
|
||||
"test": "deno test -A --unstable",
|
||||
"build": "deno run -A --unstable build_plugs.ts && deno run -A --unstable build_web.ts",
|
||||
"plugs": "deno run -A --unstable build_plugs.ts",
|
||||
"watch-web": "deno run -A --unstable --check build_web.ts --watch",
|
||||
"server": "deno run -A --unstable --check silverbullet.ts",
|
||||
"watch-server": "deno run -A --unstable --check --watch silverbullet.ts",
|
||||
"build": "deno run -A build_plugs.ts && deno run -A --unstable build_web.ts",
|
||||
"plugs": "deno run -A build_plugs.ts",
|
||||
"watch-web": "deno run -A --check build_web.ts --watch",
|
||||
"server": "deno run -A --check silverbullet.ts",
|
||||
"watch-server": "deno run -A --check --watch silverbullet.ts",
|
||||
// The only reason to run a shell script is that deno task doesn't support globs yet (e.g. *.plug.yaml)
|
||||
"watch-plugs": "deno run -A --unstable --check build_plugs.ts -w",
|
||||
"bundle": "deno bundle silverbullet.ts dist/silverbullet.js",
|
||||
"watch-plugs": "deno run -A --check build_plugs.ts -w",
|
||||
"bundle": "deno run -A build_bundle.ts",
|
||||
// Regenerates some bundle files (checked into the repo)
|
||||
// Install lezer-generator with "npm install -g @lezer/generator"
|
||||
"generate": "deno run -A plugos/gen.ts && lezer-generator common/markdown_parser/query.grammar -o common/markdown_parser/parse-query.js",
|
||||
// Install npm dependencies for desktop app
|
||||
"desktop:deps": "cd desktop && npm install",
|
||||
// Run the desktop app for local development
|
||||
"desktop:run": "cd desktop && npm start",
|
||||
// Build the desktop app as a package for this platform
|
||||
"desktop:build": "deno task build && deno task bundle && cd desktop && npm run make",
|
||||
// Mobile
|
||||
"mobile:deps": "cd mobile && npm install",
|
||||
"mobile:clean-build": "deno task clean && deno task plugs && deno run -A --unstable --check build_mobile.ts && cd mobile && npx cap sync",
|
||||
"mobile:build": "deno run -A --unstable --check build_mobile.ts && cd mobile && npx cap copy"
|
||||
"generate": "deno run -A plugos/gen.ts && lezer-generator common/markdown_parser/query.grammar -o common/markdown_parser/parse-query.js"
|
||||
},
|
||||
|
||||
"compilerOptions": {
|
||||
|
@ -1,16 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es6": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:import/recommended",
|
||||
"plugin:import/electron",
|
||||
"plugin:import/typescript"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser"
|
||||
}
|
92
desktop/.gitignore
vendored
@ -1,92 +0,0 @@
|
||||
resources
|
||||
deno-download*
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
.DS_Store
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# nuxt.js build output
|
||||
.nuxt
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# Webpack
|
||||
.webpack/
|
||||
|
||||
# Electron-Forge
|
||||
out/
|
@ -1,15 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<!--
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-executable-page-protection</key>
|
||||
<true/> -->
|
||||
</dict>
|
||||
</plist>
|
@ -1,145 +0,0 @@
|
||||
import type { ForgeConfig } from "@electron-forge/shared-types";
|
||||
import type { TargetArch } from "electron-packager";
|
||||
import { MakerSquirrel } from "@electron-forge/maker-squirrel";
|
||||
import { MakerZIP } from "@electron-forge/maker-zip";
|
||||
import { MakerDeb } from "@electron-forge/maker-deb";
|
||||
import { MakerRpm } from "@electron-forge/maker-rpm";
|
||||
import { WebpackPlugin } from "@electron-forge/plugin-webpack";
|
||||
|
||||
import { mainConfig } from "./webpack.main.config";
|
||||
import { rendererConfig } from "./webpack.renderer.config";
|
||||
import { platform } from "node:os";
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import decompress from "decompress";
|
||||
import { downloadFile } from "./http_util";
|
||||
|
||||
const denoVersion = "v1.29.1";
|
||||
|
||||
const denoZip: Record<string, string> = {
|
||||
"win32-x64": "deno-x86_64-pc-windows-msvc.zip",
|
||||
"darwin-x64": "deno-x86_64-apple-darwin.zip",
|
||||
"darwin-arm64": "deno-aarch64-apple-darwin.zip",
|
||||
"linux-x64": "deno-x86_64-unknown-linux-gnu.zip",
|
||||
};
|
||||
|
||||
const denoExecutableResource = platform() === "win32"
|
||||
? "resources/deno.exe"
|
||||
: "resources/deno";
|
||||
|
||||
async function downloadDeno(platform: string, arch: string): Promise<void> {
|
||||
const folder = fs.mkdtempSync("deno-download");
|
||||
const destFile = path.join(folder, "deno.zip");
|
||||
const zipFile = denoZip[`${platform}-${arch}`];
|
||||
if (!zipFile) {
|
||||
throw new Error(`No deno binary for ${platform}-${arch}`);
|
||||
}
|
||||
await downloadFile(
|
||||
`https://github.com/denoland/deno/releases/download/${denoVersion}/${zipFile}`,
|
||||
destFile,
|
||||
);
|
||||
await decompress(destFile, "resources");
|
||||
fs.rmSync(folder, { recursive: true });
|
||||
}
|
||||
|
||||
const config: ForgeConfig = {
|
||||
packagerConfig: {
|
||||
name: process.platform === "linux" ? "silverbullet" : "SilverBullet",
|
||||
executableName: process.platform === "linux"
|
||||
? "silverbullet"
|
||||
: "SilverBullet",
|
||||
icon: "../web/images/logo",
|
||||
appBundleId: "md.silverbullet",
|
||||
extraResource: [denoExecutableResource, "resources/silverbullet.js", "resources/logo.png"],
|
||||
beforeCopyExtraResources: [(
|
||||
_buildPath: string,
|
||||
_electronVersion: string,
|
||||
platform: TargetArch,
|
||||
arch: TargetArch,
|
||||
callback: (err?: Error | null) => void,
|
||||
) => {
|
||||
if (fs.existsSync(denoExecutableResource)) {
|
||||
fs.rmSync(denoExecutableResource, { force: true });
|
||||
}
|
||||
Promise.resolve().then(async () => {
|
||||
// Download deno
|
||||
await downloadDeno(platform, arch);
|
||||
// Copy silverbullet.js
|
||||
fs.copyFileSync("../dist/silverbullet.js", "resources/silverbullet.js");
|
||||
fs.copyFileSync("../web/images/logo.png", "resources/logo.png");
|
||||
}).then((r) => callback()).catch(callback);
|
||||
}],
|
||||
osxSign: {
|
||||
optionsForFile: (filePath: string) => {
|
||||
// So these entitlements somehow only seem to be needed for the Intel macOS build
|
||||
// Why? No idea. But it works.
|
||||
return {
|
||||
entitlements: "entitlements.plist",
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
rebuildConfig: {},
|
||||
makers: [
|
||||
new MakerSquirrel({
|
||||
iconUrl: "https://silverbullet.md/logo.ico",
|
||||
setupIcon: "../web/images/logo.ico"
|
||||
}),
|
||||
new MakerZIP({}, ["darwin", "linux"]),
|
||||
new MakerRpm({}),
|
||||
new MakerDeb({
|
||||
options: {
|
||||
icon: "../web/images/logo.png"
|
||||
}
|
||||
}),
|
||||
],
|
||||
|
||||
plugins: [
|
||||
new WebpackPlugin({
|
||||
port: 3001,
|
||||
mainConfig,
|
||||
renderer: {
|
||||
config: rendererConfig,
|
||||
|
||||
entryPoints: [
|
||||
{
|
||||
// html: "./src/index.html",
|
||||
// js: "./src/renderer.ts",
|
||||
name: "main_window",
|
||||
preload: {
|
||||
js: "./src/preload.ts",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
};
|
||||
|
||||
function notarizeMaybe() {
|
||||
if (process.platform !== "darwin") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!process.env.APPLE_ID || !process.env.APPLE_ID_PASSWORD) {
|
||||
console.warn(
|
||||
"Should be notarizing, but environment variables APPLE_ID or APPLE_ID_PASSWORD are missing!",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
config.packagerConfig!.osxNotarize = {
|
||||
appleId: process.env.APPLE_ID!,
|
||||
appleIdPassword: process.env.APPLE_ID_PASSWORD!,
|
||||
teamId: process.env.APPLE_TEAM_ID!,
|
||||
};
|
||||
}
|
||||
|
||||
notarizeMaybe();
|
||||
|
||||
export default config;
|
@ -1,29 +0,0 @@
|
||||
import axios from "axios";
|
||||
import fs from "node:fs";
|
||||
|
||||
export async function downloadFile(
|
||||
url: string,
|
||||
destFile: string,
|
||||
): Promise<void> {
|
||||
const file = fs.createWriteStream(destFile);
|
||||
let response = await axios.request({
|
||||
url: url,
|
||||
method: "GET",
|
||||
responseType: "stream",
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
response.data.pipe(file);
|
||||
let error: Error | null = null;
|
||||
file.on("error", (e) => {
|
||||
error = e;
|
||||
reject(e);
|
||||
});
|
||||
file.on("close", () => {
|
||||
if (error) {
|
||||
return;
|
||||
}
|
||||
file.close();
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
19081
desktop/package-lock.json
generated
@ -1,53 +0,0 @@
|
||||
{
|
||||
"name": "silverbullet",
|
||||
"version": "0.2.13",
|
||||
"description": "Markdown as a platform",
|
||||
"main": ".webpack/main",
|
||||
"scripts": {
|
||||
"start": "electron-forge start",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"publish": "electron-forge publish",
|
||||
"lint": "eslint --ext .ts,.tsx .",
|
||||
"clean": "rm -rf out"
|
||||
},
|
||||
"keywords": [],
|
||||
"repository": "github:silverbulletmd/silverbullet",
|
||||
"author": {
|
||||
"name": "Zef Hemel",
|
||||
"email": "zef@zef.me"
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@electron-forge/cli": "^6.0.4",
|
||||
"@electron-forge/maker-deb": "^6.0.4",
|
||||
"@electron-forge/maker-rpm": "^6.0.4",
|
||||
"@electron-forge/maker-squirrel": "^6.0.4",
|
||||
"@electron-forge/maker-zip": "^6.0.4",
|
||||
"@electron-forge/plugin-webpack": "^6.0.4",
|
||||
"@types/decompress": "^4.2.4",
|
||||
"@typescript-eslint/eslint-plugin": "^5.47.1",
|
||||
"@typescript-eslint/parser": "^5.47.1",
|
||||
"@vercel/webpack-asset-relocator-loader": "^1.7.3",
|
||||
"css-loader": "^6.7.3",
|
||||
"electron": "22.0.0",
|
||||
"eslint": "^8.31.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"fork-ts-checker-webpack-plugin": "^7.2.14",
|
||||
"node-loader": "^2.0.0",
|
||||
"style-loader": "^3.3.1",
|
||||
"ts-loader": "^9.4.2",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "~4.5.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@electron-forge/publisher-github": "^6.0.4",
|
||||
"axios": "^1.2.2",
|
||||
"decompress": "^4.2.1",
|
||||
"electron-squirrel-startup": "^1.0.0",
|
||||
"electron-store": "^8.1.0",
|
||||
"node-fetch": "^3.3.0",
|
||||
"portfinder": "^1.0.32",
|
||||
"update-electron-app": "^2.0.1"
|
||||
}
|
||||
}
|
@ -1,61 +0,0 @@
|
||||
import { app, BrowserWindow, Menu } from "electron";
|
||||
import { openFolder, openFolderPicker } from "./instance";
|
||||
import { menu } from "./menu";
|
||||
import { getOpenWindows, newWindowState } from "./store";
|
||||
|
||||
// This allows TypeScript to pick up the magic constants that's auto-generated by Forge's Webpack
|
||||
// plugin that tells the Electron app where to look for the Webpack-bundled app code (depending on
|
||||
// whether you're running in development or production).
|
||||
declare const MAIN_WINDOW_WEBPACK_ENTRY: string;
|
||||
declare const MAIN_WINDOW_PRELOAD_WEBPACK_ENTRY: string;
|
||||
|
||||
// Handle creating/removing shortcuts on Windows when installing/uninstalling.
|
||||
if (require("electron-squirrel-startup")) {
|
||||
app.quit();
|
||||
}
|
||||
|
||||
// Auto updater
|
||||
require("update-electron-app")();
|
||||
|
||||
async function boot() {
|
||||
console.log("Process args", process.argv);
|
||||
const openWindows = getOpenWindows();
|
||||
if (openWindows.length === 0) {
|
||||
await openFolderPicker();
|
||||
} else {
|
||||
for (const window of openWindows) {
|
||||
// Doing this sequentially to avoid race conditions in starting servers
|
||||
await openFolder(window);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This method will be called when Electron has finished
|
||||
// initialization and is ready to create browser windows.
|
||||
// Some APIs can only be used after this event occurs.
|
||||
app.on("ready", () => {
|
||||
Menu.setApplicationMenu(menu);
|
||||
console.log("App data path", app.getPath("userData"));
|
||||
boot().catch(console.error);
|
||||
});
|
||||
|
||||
app.on("open-file", (event, path) => {
|
||||
openFolder(newWindowState(path)).catch(console.error);
|
||||
});
|
||||
|
||||
// Quit when all windows are closed, except on macOS. There, it's common
|
||||
// for applications and their menu bar to stay active until the user quits
|
||||
// explicitly with Cmd + Q.
|
||||
app.on("window-all-closed", () => {
|
||||
if (process.platform !== "darwin") {
|
||||
app.quit();
|
||||
}
|
||||
});
|
||||
|
||||
app.on("activate", () => {
|
||||
// On OS X it's common to re-create a window in the app when the
|
||||
// dock icon is clicked and there are no other windows open.
|
||||
if (BrowserWindow.getAllWindows().length === 0) {
|
||||
boot();
|
||||
}
|
||||
});
|
@ -1,251 +0,0 @@
|
||||
import { ChildProcessWithoutNullStreams, spawn } from "node:child_process";
|
||||
import {
|
||||
app,
|
||||
BrowserWindow,
|
||||
dialog,
|
||||
Menu,
|
||||
MenuItem,
|
||||
nativeImage,
|
||||
shell,
|
||||
} from "electron";
|
||||
import portfinder from "portfinder";
|
||||
import fetch from "node-fetch";
|
||||
import { existsSync } from "node:fs";
|
||||
import { platform } from "node:os";
|
||||
import {
|
||||
newWindowState,
|
||||
persistWindowState,
|
||||
removeWindow,
|
||||
WindowState,
|
||||
} from "./store";
|
||||
|
||||
declare const MAIN_WINDOW_PRELOAD_WEBPACK_ENTRY: string;
|
||||
|
||||
type Instance = {
|
||||
folder: string;
|
||||
port: number;
|
||||
// Increased with "browser-window-created" event, decreased wtih "close" event
|
||||
refcount: number;
|
||||
proc: ChildProcessWithoutNullStreams;
|
||||
};
|
||||
|
||||
export const runningServers = new Map<string, Instance>();
|
||||
|
||||
// Should work for Liux and Mac
|
||||
let denoPath = `${process.resourcesPath}/deno`;
|
||||
|
||||
// If not...
|
||||
if (!existsSync(denoPath)) {
|
||||
// Windows
|
||||
if (platform() === "win32") {
|
||||
if (existsSync(`${process.resourcesPath}/deno.exe`)) {
|
||||
denoPath = `${process.resourcesPath}/deno.exe`;
|
||||
} else {
|
||||
denoPath = "deno.exe";
|
||||
}
|
||||
} else {
|
||||
// Everything else
|
||||
denoPath = "deno";
|
||||
}
|
||||
}
|
||||
|
||||
async function folderPicker(): Promise<string> {
|
||||
const dialogReturn = await dialog.showOpenDialog({
|
||||
title: "Pick a page folder",
|
||||
properties: ["openDirectory", "createDirectory"],
|
||||
});
|
||||
|
||||
if (dialogReturn.filePaths.length === 1) {
|
||||
return dialogReturn.filePaths[0];
|
||||
}
|
||||
}
|
||||
|
||||
export async function openFolderPicker() {
|
||||
const folderPath = await folderPicker();
|
||||
if (folderPath) {
|
||||
app.addRecentDocument(folderPath);
|
||||
openFolder(newWindowState(folderPath));
|
||||
}
|
||||
}
|
||||
|
||||
export async function openFolder(windowState: WindowState): Promise<void> {
|
||||
const instance = await spawnInstance(windowState.folderPath);
|
||||
newWindow(instance, windowState);
|
||||
}
|
||||
|
||||
function determineSilverBulletScriptPath(): string {
|
||||
let scriptPath = `${process.resourcesPath}/silverbullet.js`;
|
||||
if (!existsSync(scriptPath)) {
|
||||
console.log("Dev mode");
|
||||
// Assumption: we're running in dev mode (npm start)
|
||||
scriptPath = "../silverbullet.ts";
|
||||
}
|
||||
return scriptPath;
|
||||
}
|
||||
|
||||
async function spawnInstance(pagePath: string): Promise<Instance> {
|
||||
let instance = runningServers.get(pagePath);
|
||||
if (instance) {
|
||||
return instance;
|
||||
}
|
||||
|
||||
// Pick random port
|
||||
portfinder.setBasePort(3010);
|
||||
portfinder.setHighestPort(3999);
|
||||
const port = await portfinder.getPortPromise();
|
||||
|
||||
const proc = spawn(denoPath, [
|
||||
"run",
|
||||
"-A",
|
||||
"--unstable",
|
||||
determineSilverBulletScriptPath(),
|
||||
"--port",
|
||||
"" + port,
|
||||
pagePath,
|
||||
]);
|
||||
|
||||
proc.stdout.on("data", (data) => {
|
||||
process.stdout.write(`[SB Out] ${data}`);
|
||||
});
|
||||
|
||||
proc.stderr.on("data", (data) => {
|
||||
process.stderr.write(`[SB Err] ${data}`);
|
||||
});
|
||||
|
||||
proc.on("close", (code) => {
|
||||
if (code) {
|
||||
console.log(`child process exited with code ${code}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Try for 15s to see if SB is live
|
||||
for (let i = 0; i < 30; i++) {
|
||||
try {
|
||||
const result = await fetch(`http://localhost:${port}`);
|
||||
if (result.ok) {
|
||||
console.log("Live!");
|
||||
instance = {
|
||||
folder: pagePath,
|
||||
port: port,
|
||||
refcount: 0,
|
||||
proc: proc,
|
||||
};
|
||||
runningServers.set(pagePath, instance);
|
||||
return instance;
|
||||
}
|
||||
console.log("Still booting...");
|
||||
} catch {
|
||||
console.log("Still booting...");
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Make more specific
|
||||
export function findInstanceByUrl(url: URL) {
|
||||
for (const instance of runningServers.values()) {
|
||||
if (instance.port === +url.port) {
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
let quitting = false;
|
||||
|
||||
const icon = nativeImage.createFromPath(process.resourcesPath + "/logo.png");
|
||||
export function newWindow(instance: Instance, windowState: WindowState) {
|
||||
const window = new BrowserWindow({
|
||||
height: windowState.height,
|
||||
width: windowState.width,
|
||||
x: windowState.x,
|
||||
y: windowState.y,
|
||||
icon,
|
||||
webPreferences: {
|
||||
preload: MAIN_WINDOW_PRELOAD_WEBPACK_ENTRY,
|
||||
},
|
||||
});
|
||||
|
||||
instance.refcount++;
|
||||
|
||||
persistWindowState(windowState, window);
|
||||
|
||||
window.webContents.setWindowOpenHandler(({ url }) => {
|
||||
const instance = findInstanceByUrl(new URL(url));
|
||||
if (instance) {
|
||||
newWindow(instance, newWindowState(instance.folder));
|
||||
} else {
|
||||
shell.openExternal(url);
|
||||
}
|
||||
return { action: "deny" };
|
||||
});
|
||||
|
||||
window.webContents.on("context-menu", (event, params) => {
|
||||
const menu = new Menu();
|
||||
|
||||
// Allow users to add the misspelled word to the dictionary
|
||||
if (params.misspelledWord) {
|
||||
// Add each spelling suggestion
|
||||
for (const suggestion of params.dictionarySuggestions) {
|
||||
menu.append(
|
||||
new MenuItem({
|
||||
label: suggestion,
|
||||
click: () => window.webContents.replaceMisspelling(suggestion),
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (params.dictionarySuggestions.length > 0) {
|
||||
menu.append(new MenuItem({ type: "separator" }));
|
||||
}
|
||||
menu.append(
|
||||
new MenuItem({
|
||||
label: "Add to dictionary",
|
||||
click: () =>
|
||||
window.webContents.session.addWordToSpellCheckerDictionary(
|
||||
params.misspelledWord,
|
||||
),
|
||||
}),
|
||||
);
|
||||
menu.append(new MenuItem({ type: "separator" }));
|
||||
}
|
||||
|
||||
menu.append(new MenuItem({ label: "Cut", role: "cut" }));
|
||||
menu.append(new MenuItem({ label: "Copy", role: "copy" }));
|
||||
menu.append(new MenuItem({ label: "Paste", role: "paste" }));
|
||||
menu.popup();
|
||||
});
|
||||
|
||||
window.on("resized", () => {
|
||||
console.log("Reized window");
|
||||
persistWindowState(windowState, window);
|
||||
});
|
||||
|
||||
window.on("moved", () => {
|
||||
persistWindowState(windowState, window);
|
||||
});
|
||||
|
||||
window.webContents.on("did-navigate-in-page", () => {
|
||||
persistWindowState(windowState, window);
|
||||
});
|
||||
|
||||
window.once("close", () => {
|
||||
console.log("Closed window");
|
||||
instance.refcount--;
|
||||
console.log("Refcount", instance.refcount);
|
||||
if (!quitting) {
|
||||
removeWindow(windowState);
|
||||
}
|
||||
if (instance.refcount === 0) {
|
||||
console.log("Stopping server");
|
||||
instance.proc.kill();
|
||||
runningServers.delete(instance.folder);
|
||||
}
|
||||
});
|
||||
|
||||
window.loadURL(`http://localhost:${instance.port}${windowState.urlPath}`);
|
||||
}
|
||||
|
||||
app.on("before-quit", () => {
|
||||
console.log("Quitting");
|
||||
quitting = true;
|
||||
});
|
@ -1,198 +0,0 @@
|
||||
import { app, Menu, MenuItemConstructorOptions, shell } from "electron";
|
||||
import { findInstanceByUrl, newWindow, openFolderPicker } from "./instance";
|
||||
import { newWindowState } from "./store";
|
||||
import os from "node:os";
|
||||
|
||||
const template: MenuItemConstructorOptions[] = [
|
||||
{
|
||||
label: "File",
|
||||
role: "fileMenu",
|
||||
submenu: [
|
||||
{
|
||||
label: "New Window",
|
||||
accelerator: "CommandOrControl+N",
|
||||
click: (_item, win) => {
|
||||
const url = new URL(win.webContents.getURL());
|
||||
const instance = findInstanceByUrl(url);
|
||||
if (instance) {
|
||||
newWindow(instance, newWindowState(instance.folder));
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Open Space",
|
||||
accelerator: "CommandOrControl+Shift+O",
|
||||
click: () => {
|
||||
openFolderPicker();
|
||||
},
|
||||
},
|
||||
os.platform() === "darwin"
|
||||
? {
|
||||
role: "recentDocuments",
|
||||
submenu: [
|
||||
{
|
||||
role: "clearRecentDocuments",
|
||||
},
|
||||
],
|
||||
}
|
||||
: { type: "separator" },
|
||||
{ type: "separator" },
|
||||
{
|
||||
label: "Quit",
|
||||
accelerator: "CommandOrControl+Q",
|
||||
role: "quit",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: "Edit",
|
||||
role: "editMenu",
|
||||
submenu: [
|
||||
{
|
||||
label: "Undo",
|
||||
accelerator: "CommandOrControl+Z",
|
||||
role: "undo",
|
||||
},
|
||||
{
|
||||
label: "Redo",
|
||||
accelerator: "Shift+CommandOrControl+Z",
|
||||
role: "redo",
|
||||
},
|
||||
{ type: "separator" },
|
||||
{
|
||||
label: "Cut",
|
||||
accelerator: "CommandOrControl+X",
|
||||
role: "cut",
|
||||
},
|
||||
{
|
||||
label: "Copy",
|
||||
accelerator: "CommandOrControl+C",
|
||||
role: "copy",
|
||||
},
|
||||
{
|
||||
label: "Paste",
|
||||
accelerator: "CommandOrControl+V",
|
||||
role: "paste",
|
||||
},
|
||||
{
|
||||
label: "Paste and match style",
|
||||
accelerator: "CommandOrControl+Shift+V",
|
||||
role: "pasteAndMatchStyle",
|
||||
},
|
||||
{
|
||||
label: "Select All",
|
||||
accelerator: "CommandOrControl+A",
|
||||
role: "selectAll",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: "Navigate",
|
||||
submenu: [
|
||||
{
|
||||
label: "Home",
|
||||
accelerator: "Alt+h",
|
||||
click: (_item, win) => {
|
||||
win.loadURL(new URL(win.webContents.getURL()).origin);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Reload",
|
||||
accelerator: "CommandOrControl+r",
|
||||
role: "forceReload",
|
||||
},
|
||||
{
|
||||
label: "Back",
|
||||
accelerator: "CommandOrControl+[",
|
||||
click: (_item, win) => {
|
||||
win.webContents.goBack();
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Forward",
|
||||
accelerator: "CommandOrControl+]",
|
||||
click: (_item, win) => {
|
||||
win.webContents.goForward();
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: "Develop",
|
||||
submenu: [
|
||||
{
|
||||
label: "Open in Browser",
|
||||
click: (_item, win) => {
|
||||
shell.openExternal(win.webContents.getURL());
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Open Space Folder",
|
||||
click: (_item, win) => {
|
||||
let url = win.webContents.getURL();
|
||||
shell.openPath(findInstanceByUrl(new URL(url)).folder);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Toggle Dev Tools",
|
||||
accelerator: "CommandOrControl+Alt+J",
|
||||
role: "toggleDevTools",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: "Window",
|
||||
role: "windowMenu",
|
||||
submenu: [
|
||||
{
|
||||
label: "Minimize",
|
||||
accelerator: "CommandOrControl+M",
|
||||
role: "minimize",
|
||||
},
|
||||
{
|
||||
label: "Maximize",
|
||||
click: (_item, win) => {
|
||||
win.maximize();
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Close",
|
||||
accelerator: "CommandOrControl+W",
|
||||
role: "close",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: "Help",
|
||||
submenu: [
|
||||
{
|
||||
label: "Documentation",
|
||||
click: () => {
|
||||
shell.openExternal("https://silverbullet.md");
|
||||
},
|
||||
},
|
||||
{
|
||||
role: "about",
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
if (process.platform === "darwin") {
|
||||
const name = app.getName();
|
||||
template.unshift({
|
||||
label: name,
|
||||
submenu: [
|
||||
{ role: "about" },
|
||||
{ role: "services" },
|
||||
{ type: "separator" },
|
||||
{ role: "hide" },
|
||||
{ role: "hideOthers" },
|
||||
{ role: "unhide" },
|
||||
{ type: "separator" },
|
||||
{ role: "quit" },
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
export const menu = Menu.buildFromTemplate(template);
|
@ -1,3 +0,0 @@
|
||||
// See the Electron documentation for details on how to use preload scripts:
|
||||
// https://www.electronjs.org/docs/latest/tutorial/process-model#preload-scripts
|
||||
console.log("Yo, I'm preload.ts!");
|
@ -1,31 +0,0 @@
|
||||
/**
|
||||
* This file will automatically be loaded by webpack and run in the "renderer" context.
|
||||
* To learn more about the differences between the "main" and the "renderer" context in
|
||||
* Electron, visit:
|
||||
*
|
||||
* https://electronjs.org/docs/latest/tutorial/process-model
|
||||
*
|
||||
* By default, Node.js integration in this file is disabled. When enabling Node.js integration
|
||||
* in a renderer process, please be aware of potential security implications. You can read
|
||||
* more about security risks here:
|
||||
*
|
||||
* https://electronjs.org/docs/tutorial/security
|
||||
*
|
||||
* To enable Node.js integration in this file, open up `main.js` and enable the `nodeIntegration`
|
||||
* flag:
|
||||
*
|
||||
* ```
|
||||
* // Create the browser window.
|
||||
* mainWindow = new BrowserWindow({
|
||||
* width: 800,
|
||||
* height: 600,
|
||||
* webPreferences: {
|
||||
* nodeIntegration: true
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
|
||||
import './index.css';
|
||||
|
||||
console.log('👋 This message is being logged by "renderer.js", included via webpack');
|
@ -1,79 +0,0 @@
|
||||
import { BrowserWindow } from "electron";
|
||||
import Store from "electron-store";
|
||||
|
||||
export type WindowState = {
|
||||
id: string; // random GUID
|
||||
width: number;
|
||||
height: number;
|
||||
x?: number;
|
||||
y?: number;
|
||||
folderPath: string;
|
||||
urlPath: string;
|
||||
};
|
||||
|
||||
const store = new Store({
|
||||
defaults: {
|
||||
openWindows: [],
|
||||
},
|
||||
});
|
||||
|
||||
export function getOpenWindows(): WindowState[] {
|
||||
return store.get("openWindows");
|
||||
}
|
||||
|
||||
import crypto from "node:crypto";
|
||||
|
||||
export function newWindowState(folderPath: string): WindowState {
|
||||
return {
|
||||
id: crypto.randomBytes(16).toString("hex"),
|
||||
width: 800,
|
||||
height: 600,
|
||||
x: undefined,
|
||||
y: undefined,
|
||||
folderPath,
|
||||
urlPath: "/",
|
||||
};
|
||||
}
|
||||
|
||||
export function persistWindowState(
|
||||
windowState: WindowState,
|
||||
window: BrowserWindow,
|
||||
) {
|
||||
const [width, height] = window.getSize();
|
||||
const [x, y] = window.getPosition();
|
||||
windowState.height = height;
|
||||
windowState.width = width;
|
||||
windowState.x = x;
|
||||
windowState.y = y;
|
||||
const urlString = window.webContents.getURL();
|
||||
if (urlString) {
|
||||
windowState.urlPath = new URL(urlString).pathname;
|
||||
}
|
||||
|
||||
let found = false;
|
||||
const newWindows = getOpenWindows().map((win) => {
|
||||
if (win.id === windowState.id) {
|
||||
found = true;
|
||||
return windowState;
|
||||
} else {
|
||||
return win;
|
||||
}
|
||||
});
|
||||
if (!found) {
|
||||
newWindows.push(windowState);
|
||||
}
|
||||
store.set(
|
||||
"openWindows",
|
||||
newWindows,
|
||||
);
|
||||
}
|
||||
|
||||
export function removeWindow(windowState: WindowState) {
|
||||
const newWindows = getOpenWindows().filter((win) =>
|
||||
win.id !== windowState.id
|
||||
);
|
||||
store.set(
|
||||
"openWindows",
|
||||
newWindows,
|
||||
);
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"allowJs": true,
|
||||
"module": "commonjs",
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": true,
|
||||
"noImplicitAny": true,
|
||||
"sourceMap": true,
|
||||
"baseUrl": ".",
|
||||
"outDir": "dist",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"paths": {
|
||||
"*": ["node_modules/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"]
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
import type { Configuration } from 'webpack';
|
||||
|
||||
import { rules } from './webpack.rules';
|
||||
|
||||
export const mainConfig: Configuration = {
|
||||
/**
|
||||
* This is the main entry point for your application, it's the first file
|
||||
* that runs in the main process.
|
||||
*/
|
||||
entry: './src/index.ts',
|
||||
// Put your normal webpack config below here
|
||||
module: {
|
||||
rules,
|
||||
},
|
||||
resolve: {
|
||||
extensions: ['.js', '.ts', '.jsx', '.tsx', '.css', '.json'],
|
||||
},
|
||||
};
|
@ -1,12 +0,0 @@
|
||||
import type IForkTsCheckerWebpackPlugin from "fork-ts-checker-webpack-plugin";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const ForkTsCheckerWebpackPlugin: typeof IForkTsCheckerWebpackPlugin = require(
|
||||
"fork-ts-checker-webpack-plugin",
|
||||
);
|
||||
|
||||
export const plugins = [
|
||||
new ForkTsCheckerWebpackPlugin({
|
||||
logger: "webpack-infrastructure",
|
||||
}),
|
||||
];
|
@ -1,19 +0,0 @@
|
||||
import type { Configuration } from "webpack";
|
||||
|
||||
import { rules } from "./webpack.rules";
|
||||
import { plugins } from "./webpack.plugins";
|
||||
|
||||
rules.push({
|
||||
test: /\.css$/,
|
||||
use: [{ loader: "style-loader" }, { loader: "css-loader" }],
|
||||
});
|
||||
|
||||
export const rendererConfig: Configuration = {
|
||||
module: {
|
||||
rules,
|
||||
},
|
||||
plugins,
|
||||
resolve: {
|
||||
extensions: [".js", ".ts", ".jsx", ".tsx", ".css"],
|
||||
},
|
||||
};
|
@ -1,31 +0,0 @@
|
||||
import type { ModuleOptions } from 'webpack';
|
||||
|
||||
export const rules: Required<ModuleOptions>['rules'] = [
|
||||
// Add support for native node modules
|
||||
{
|
||||
// We're specifying native_modules in the test because the asset relocator loader generates a
|
||||
// "fake" .node file which is really a cjs file.
|
||||
test: /native_modules[/\\].+\.node$/,
|
||||
use: 'node-loader',
|
||||
},
|
||||
{
|
||||
test: /[/\\]node_modules[/\\].+\.(m?js|node)$/,
|
||||
parser: { amd: false },
|
||||
use: {
|
||||
loader: '@vercel/webpack-asset-relocator-loader',
|
||||
options: {
|
||||
outputAssetBase: 'native_modules',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
test: /\.tsx?$/,
|
||||
exclude: /(node_modules|\.webpack)/,
|
||||
use: {
|
||||
loader: 'ts-loader',
|
||||
options: {
|
||||
transpileOnly: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
@ -2,6 +2,7 @@
|
||||
"imports": {
|
||||
"@codemirror/state": "https://esm.sh/@codemirror/state@6.2.0",
|
||||
"@lezer/common": "https://esm.sh/@lezer/common@1.0.2",
|
||||
"@lezer/lr": "https://esm.sh/@lezer/lr@1.3.3?external=@lezer/common",
|
||||
"@lezer/markdown": "https://esm.sh/@lezer/markdown@1.0.2?external=@lezer/common,@codemirror/language,@lezer/highlight",
|
||||
"@lezer/javascript": "https://esm.sh/@lezer/javascript@1.4.1?external=@lezer/common,@codemirror/language,@lezer/highlight",
|
||||
"@codemirror/language": "https://esm.sh/@codemirror/language@6.6.0?external=@codemirror/state,@lezer/common,@lezer/lr,@codemirror/view,@lezer/highlight",
|
||||
@ -15,12 +16,7 @@
|
||||
"preact": "https://esm.sh/preact@10.11.1",
|
||||
"yjs": "https://esm.sh/yjs@13.5.42",
|
||||
"$sb/": "./plug-api/",
|
||||
"handlebars": "https://esm.sh/handlebars",
|
||||
"@lezer/lr": "https://esm.sh/@lezer/lr@1.3.3?external=@lezer/common",
|
||||
"yaml": "https://deno.land/std@0.184.0/yaml/mod.ts",
|
||||
|
||||
"@capacitor/core": "https://esm.sh/@capacitor/core@4.6.2",
|
||||
"@capacitor/filesystem": "https://esm.sh/@capacitor/filesystem@4.1.4",
|
||||
"@capacitor/app": "https://esm.sh/@capacitor/app@4.1.1"
|
||||
"handlebars": "https://esm.sh/handlebars@4.7.7?target=es2022",
|
||||
"dexie": "https://esm.sh/dexie@3.2.2"
|
||||
}
|
||||
}
|
||||
|
5
mobile/.gitignore
vendored
@ -1,5 +0,0 @@
|
||||
node_modules/
|
||||
*.map
|
||||
.DS_Store
|
||||
.sourcemaps
|
||||
dist/
|
@ -1,6 +0,0 @@
|
||||
## Mobile Capacitor app for SilverBullet
|
||||
|
||||
Current status:
|
||||
|
||||
* iOS fully functional
|
||||
* Android still WIP
|
101
mobile/android/.gitignore
vendored
@ -1,101 +0,0 @@
|
||||
# Using Android gitignore template: https://github.com/github/gitignore/blob/HEAD/Android.gitignore
|
||||
|
||||
# Built application files
|
||||
*.apk
|
||||
*.aar
|
||||
*.ap_
|
||||
*.aab
|
||||
|
||||
# Files for the ART/Dalvik VM
|
||||
*.dex
|
||||
|
||||
# Java class files
|
||||
*.class
|
||||
|
||||
# Generated files
|
||||
bin/
|
||||
gen/
|
||||
out/
|
||||
# Uncomment the following line in case you need and you don't have the release build type files in your app
|
||||
# release/
|
||||
|
||||
# Gradle files
|
||||
.gradle/
|
||||
build/
|
||||
|
||||
# Local configuration file (sdk path, etc)
|
||||
local.properties
|
||||
|
||||
# Proguard folder generated by Eclipse
|
||||
proguard/
|
||||
|
||||
# Log Files
|
||||
*.log
|
||||
|
||||
# Android Studio Navigation editor temp files
|
||||
.navigation/
|
||||
|
||||
# Android Studio captures folder
|
||||
captures/
|
||||
|
||||
# IntelliJ
|
||||
*.iml
|
||||
.idea/workspace.xml
|
||||
.idea/tasks.xml
|
||||
.idea/gradle.xml
|
||||
.idea/assetWizardSettings.xml
|
||||
.idea/dictionaries
|
||||
.idea/libraries
|
||||
# Android Studio 3 in .gitignore file.
|
||||
.idea/caches
|
||||
.idea/modules.xml
|
||||
# Comment next line if keeping position of elements in Navigation Editor is relevant for you
|
||||
.idea/navEditor.xml
|
||||
|
||||
# Keystore files
|
||||
# Uncomment the following lines if you do not want to check your keystore files in.
|
||||
#*.jks
|
||||
#*.keystore
|
||||
|
||||
# External native build folder generated in Android Studio 2.2 and later
|
||||
.externalNativeBuild
|
||||
.cxx/
|
||||
|
||||
# Google Services (e.g. APIs or Firebase)
|
||||
# google-services.json
|
||||
|
||||
# Freeline
|
||||
freeline.py
|
||||
freeline/
|
||||
freeline_project_description.json
|
||||
|
||||
# fastlane
|
||||
fastlane/report.xml
|
||||
fastlane/Preview.html
|
||||
fastlane/screenshots
|
||||
fastlane/test_output
|
||||
fastlane/readme.md
|
||||
|
||||
# Version control
|
||||
vcs.xml
|
||||
|
||||
# lint
|
||||
lint/intermediates/
|
||||
lint/generated/
|
||||
lint/outputs/
|
||||
lint/tmp/
|
||||
# lint/reports/
|
||||
|
||||
# Android Profiling
|
||||
*.hprof
|
||||
|
||||
# Cordova plugins for Capacitor
|
||||
capacitor-cordova-android-plugins
|
||||
|
||||
# Copied web assets
|
||||
app/src/main/assets/public
|
||||
|
||||
# Generated Config files
|
||||
app/src/main/assets/capacitor.config.json
|
||||
app/src/main/assets/capacitor.plugins.json
|
||||
app/src/main/res/xml/config.xml
|
2
mobile/android/app/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
/build/*
|
||||
!/build/.npmkeep
|
@ -1,53 +0,0 @@
|
||||
apply plugin: 'com.android.application'
|
||||
|
||||
android {
|
||||
compileSdkVersion rootProject.ext.compileSdkVersion
|
||||
defaultConfig {
|
||||
applicationId "md.silverbullet"
|
||||
minSdkVersion rootProject.ext.minSdkVersion
|
||||
targetSdkVersion rootProject.ext.targetSdkVersion
|
||||
versionCode 1
|
||||
versionName "1.0"
|
||||
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
|
||||
aaptOptions {
|
||||
// Files and dirs to omit from the packaged assets dir, modified to accommodate modern web apps.
|
||||
// Default: https://android.googlesource.com/platform/frameworks/base/+/282e181b58cf72b6ca770dc7ca5f91f135444502/tools/aapt/AaptAssets.cpp#61
|
||||
ignoreAssetsPattern '!.svn:!.git:!.ds_store:!*.scc:.*:!CVS:!thumbs.db:!picasa.ini:!*~'
|
||||
}
|
||||
}
|
||||
buildTypes {
|
||||
release {
|
||||
minifyEnabled false
|
||||
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
repositories {
|
||||
flatDir{
|
||||
dirs '../capacitor-cordova-android-plugins/src/main/libs', 'libs'
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation fileTree(include: ['*.jar'], dir: 'libs')
|
||||
implementation "androidx.appcompat:appcompat:$androidxAppCompatVersion"
|
||||
implementation "androidx.coordinatorlayout:coordinatorlayout:$androidxCoordinatorLayoutVersion"
|
||||
implementation "androidx.core:core-splashscreen:$coreSplashScreenVersion"
|
||||
implementation project(':capacitor-android')
|
||||
testImplementation "junit:junit:$junitVersion"
|
||||
androidTestImplementation "androidx.test.ext:junit:$androidxJunitVersion"
|
||||
androidTestImplementation "androidx.test.espresso:espresso-core:$androidxEspressoCoreVersion"
|
||||
implementation project(':capacitor-cordova-android-plugins')
|
||||
}
|
||||
|
||||
apply from: 'capacitor.build.gradle'
|
||||
|
||||
try {
|
||||
def servicesJSON = file('google-services.json')
|
||||
if (servicesJSON.text) {
|
||||
apply plugin: 'com.google.gms.google-services'
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.info("google-services.json not found, google-services plugin not applied. Push Notifications won't work")
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
// DO NOT EDIT THIS FILE! IT IS GENERATED EACH TIME "capacitor update" IS RUN
|
||||
|
||||
android {
|
||||
compileOptions {
|
||||
sourceCompatibility JavaVersion.VERSION_11
|
||||
targetCompatibility JavaVersion.VERSION_11
|
||||
}
|
||||
}
|
||||
|
||||
apply from: "../capacitor-cordova-android-plugins/cordova.variables.gradle"
|
||||
dependencies {
|
||||
implementation project(':capacitor-community-sqlite')
|
||||
implementation project(':capacitor-app')
|
||||
implementation project(':capacitor-filesystem')
|
||||
implementation project(':capacitor-keyboard')
|
||||
implementation project(':capacitor-splash-screen')
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (hasProperty('postBuildExtras')) {
|
||||
postBuildExtras()
|
||||
}
|
21
mobile/android/app/proguard-rules.pro
vendored
@ -1,21 +0,0 @@
|
||||
# Add project specific ProGuard rules here.
|
||||
# You can control the set of applied configuration files using the
|
||||
# proguardFiles setting in build.gradle.
|
||||
#
|
||||
# For more details, see
|
||||
# http://developer.android.com/guide/developing/tools/proguard.html
|
||||
|
||||
# If your project uses WebView with JS, uncomment the following
|
||||
# and specify the fully qualified class name to the JavaScript interface
|
||||
# class:
|
||||
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
|
||||
# public *;
|
||||
#}
|
||||
|
||||
# Uncomment this to preserve the line number information for
|
||||
# debugging stack traces.
|
||||
#-keepattributes SourceFile,LineNumberTable
|
||||
|
||||
# If you keep the line number information, uncomment this to
|
||||
# hide the original source file name.
|
||||
#-renamesourcefileattribute SourceFile
|
@ -1,20 +0,0 @@
|
||||
{
|
||||
"version": 3,
|
||||
"artifactType": {
|
||||
"type": "APK",
|
||||
"kind": "Directory"
|
||||
},
|
||||
"applicationId": "md.silverbullet",
|
||||
"variantName": "release",
|
||||
"elements": [
|
||||
{
|
||||
"type": "SINGLE",
|
||||
"filters": [],
|
||||
"attributes": [],
|
||||
"versionCode": 1,
|
||||
"versionName": "1.0",
|
||||
"outputFile": "app-release.apk"
|
||||
}
|
||||
],
|
||||
"elementType": "File"
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
package com.getcapacitor.myapp;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import androidx.test.platform.app.InstrumentationRegistry;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/**
|
||||
* Instrumented test, which will execute on an Android device.
|
||||
*
|
||||
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
|
||||
*/
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class ExampleInstrumentedTest {
|
||||
|
||||
@Test
|
||||
public void useAppContext() throws Exception {
|
||||
// Context of the app under test.
|
||||
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
|
||||
|
||||
assertEquals("com.getcapacitor.app", appContext.getPackageName());
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="md.silverbullet">
|
||||
|
||||
<application
|
||||
android:allowBackup="true"
|
||||
android:icon="@mipmap/ic_launcher"
|
||||
android:label="@string/app_name"
|
||||
android:roundIcon="@mipmap/ic_launcher_round"
|
||||
android:supportsRtl="true"
|
||||
android:usesCleartextTraffic="true"
|
||||
android:theme="@style/AppTheme">
|
||||
|
||||
<activity
|
||||
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|locale|smallestScreenSize|screenLayout|uiMode"
|
||||
android:name="md.silverbullet.MainActivity"
|
||||
android:label="@string/title_activity_main"
|
||||
android:theme="@style/AppTheme.NoActionBarLaunch"
|
||||
android:launchMode="singleTask"
|
||||
android:exported="true">
|
||||
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.MAIN" />
|
||||
<category android:name="android.intent.category.LAUNCHER" />
|
||||
</intent-filter>
|
||||
|
||||
</activity>
|
||||
|
||||
<provider
|
||||
android:name="androidx.core.content.FileProvider"
|
||||
android:authorities="${applicationId}.fileprovider"
|
||||
android:exported="false"
|
||||
android:grantUriPermissions="true">
|
||||
<meta-data
|
||||
android:name="android.support.FILE_PROVIDER_PATHS"
|
||||
android:resource="@xml/file_paths"></meta-data>
|
||||
</provider>
|
||||
</application>
|
||||
|
||||
<!-- Permissions -->
|
||||
|
||||
<uses-permission android:name="android.permission.INTERNET" />
|
||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
|
||||
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
|
||||
</manifest>
|
@ -1,5 +0,0 @@
|
||||
package md.silverbullet;
|
||||
|
||||
import com.getcapacitor.BridgeActivity;
|
||||
|
||||
public class MainActivity extends BridgeActivity {}
|
Before Width: | Height: | Size: 7.5 KiB |
Before Width: | Height: | Size: 3.9 KiB |
Before Width: | Height: | Size: 9.0 KiB |
Before Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 7.7 KiB |
Before Width: | Height: | Size: 4.0 KiB |
Before Width: | Height: | Size: 9.6 KiB |
Before Width: | Height: | Size: 13 KiB |
Before Width: | Height: | Size: 17 KiB |
@ -1,34 +0,0 @@
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:aapt="http://schemas.android.com/aapt"
|
||||
android:width="108dp"
|
||||
android:height="108dp"
|
||||
android:viewportHeight="108"
|
||||
android:viewportWidth="108">
|
||||
<path
|
||||
android:fillType="evenOdd"
|
||||
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
|
||||
android:strokeColor="#00000000"
|
||||
android:strokeWidth="1">
|
||||
<aapt:attr name="android:fillColor">
|
||||
<gradient
|
||||
android:endX="78.5885"
|
||||
android:endY="90.9159"
|
||||
android:startX="48.7653"
|
||||
android:startY="61.0927"
|
||||
android:type="linear">
|
||||
<item
|
||||
android:color="#44000000"
|
||||
android:offset="0.0" />
|
||||
<item
|
||||
android:color="#00000000"
|
||||
android:offset="1.0" />
|
||||
</gradient>
|
||||
</aapt:attr>
|
||||
</path>
|
||||
<path
|
||||
android:fillColor="#FFFFFF"
|
||||
android:fillType="nonZero"
|
||||
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
|
||||
android:strokeColor="#00000000"
|
||||
android:strokeWidth="1" />
|
||||
</vector>
|
@ -1,170 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="108dp"
|
||||
android:height="108dp"
|
||||
android:viewportHeight="108"
|
||||
android:viewportWidth="108">
|
||||
<path
|
||||
android:fillColor="#26A69A"
|
||||
android:pathData="M0,0h108v108h-108z" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M9,0L9,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M19,0L19,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M29,0L29,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M39,0L39,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M49,0L49,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M59,0L59,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M69,0L69,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M79,0L79,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M89,0L89,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M99,0L99,108"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,9L108,9"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,19L108,19"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,29L108,29"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,39L108,39"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,49L108,49"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,59L108,59"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,69L108,69"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,79L108,79"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,89L108,89"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M0,99L108,99"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M19,29L89,29"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M19,39L89,39"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M19,49L89,49"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M19,59L89,59"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M19,69L89,69"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M19,79L89,79"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M29,19L29,89"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M39,19L39,89"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M49,19L49,89"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M59,19L59,89"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M69,19L69,89"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
<path
|
||||
android:fillColor="#00000000"
|
||||
android:pathData="M79,19L79,89"
|
||||
android:strokeColor="#33FFFFFF"
|
||||
android:strokeWidth="0.8" />
|
||||
</vector>
|
Before Width: | Height: | Size: 3.9 KiB |
@ -1,12 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<androidx.coordinatorlayout.widget.CoordinatorLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
tools:context=".MainActivity">
|
||||
|
||||
<WebView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent" />
|
||||
</androidx.coordinatorlayout.widget.CoordinatorLayout>
|
@ -1,5 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<background android:drawable="@color/ic_launcher_background"/>
|
||||
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
|
||||
</adaptive-icon>
|
@ -1,5 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<background android:drawable="@color/ic_launcher_background"/>
|
||||
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
|
||||
</adaptive-icon>
|
Before Width: | Height: | Size: 2.7 KiB |
Before Width: | Height: | Size: 3.4 KiB |
Before Width: | Height: | Size: 4.2 KiB |
Before Width: | Height: | Size: 1.8 KiB |
Before Width: | Height: | Size: 2.1 KiB |
Before Width: | Height: | Size: 2.7 KiB |
Before Width: | Height: | Size: 3.9 KiB |
Before Width: | Height: | Size: 4.9 KiB |
Before Width: | Height: | Size: 6.4 KiB |
Before Width: | Height: | Size: 6.5 KiB |
Before Width: | Height: | Size: 9.6 KiB |
Before Width: | Height: | Size: 10 KiB |
Before Width: | Height: | Size: 9.2 KiB |
Before Width: | Height: | Size: 15 KiB |