parent
60d7cc704a
commit
9f082c83a9
5
.gitignore
vendored
5
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
.env
|
||||
.DS_Store
|
||||
dist_client_bundle
|
||||
dist_plug_bundle
|
||||
@ -10,7 +11,7 @@ deno.lock
|
||||
fly.toml
|
||||
env.sh
|
||||
node_modules
|
||||
*.db
|
||||
*.db*
|
||||
test_space
|
||||
silverbullet
|
||||
.silverbullet.db*
|
||||
deploy.json
|
@ -11,7 +11,6 @@ import { sleep } from "$sb/lib/async.ts";
|
||||
|
||||
import { determineDatabaseBackend } from "../server/db_backend.ts";
|
||||
import { SpaceServerConfig } from "../server/instance.ts";
|
||||
import { path } from "../common/deps.ts";
|
||||
|
||||
export async function serveCommand(
|
||||
options: {
|
||||
@ -22,6 +21,7 @@ export async function serveCommand(
|
||||
cert?: string;
|
||||
key?: string;
|
||||
reindex?: boolean;
|
||||
syncOnly?: boolean;
|
||||
},
|
||||
folder?: string,
|
||||
) {
|
||||
@ -29,7 +29,7 @@ export async function serveCommand(
|
||||
"127.0.0.1";
|
||||
const port = options.port ||
|
||||
(Deno.env.get("SB_PORT") && +Deno.env.get("SB_PORT")!) || 3000;
|
||||
|
||||
const syncOnly = options.syncOnly || !!Deno.env.get("SB_SYNC_ONLY");
|
||||
const app = new Application();
|
||||
|
||||
if (!folder) {
|
||||
@ -42,7 +42,6 @@ export async function serveCommand(
|
||||
Deno.exit(1);
|
||||
}
|
||||
}
|
||||
folder = path.resolve(Deno.cwd(), folder);
|
||||
|
||||
const baseKvPrimitives = await determineDatabaseBackend(folder);
|
||||
|
||||
@ -59,11 +58,17 @@ To allow outside connections, pass -L 0.0.0.0 as a flag, and put a TLS terminato
|
||||
|
||||
const userAuth = options.user ?? Deno.env.get("SB_USER");
|
||||
|
||||
let userCredentials: { user: string; pass: string } | undefined;
|
||||
if (userAuth) {
|
||||
const [user, pass] = userAuth.split(":");
|
||||
userCredentials = { user, pass };
|
||||
}
|
||||
const configs = new Map<string, SpaceServerConfig>();
|
||||
configs.set("*", {
|
||||
hostname,
|
||||
namespace: "*",
|
||||
auth: userAuth,
|
||||
auth: userCredentials,
|
||||
authToken: Deno.env.get("SB_AUTH_TOKEN"),
|
||||
pagesPath: folder,
|
||||
});
|
||||
|
||||
@ -74,7 +79,7 @@ To allow outside connections, pass -L 0.0.0.0 as a flag, and put a TLS terminato
|
||||
clientAssetBundle: new AssetBundle(clientAssetBundle as AssetJson),
|
||||
plugAssetBundle: new AssetBundle(plugAssetBundle as AssetJson),
|
||||
baseKvPrimitives,
|
||||
syncOnly: baseKvPrimitives === undefined,
|
||||
syncOnly,
|
||||
keyFile: options.key,
|
||||
certFile: options.cert,
|
||||
configs,
|
||||
|
72
cmd/sync.ts
Normal file
72
cmd/sync.ts
Normal file
@ -0,0 +1,72 @@
|
||||
import { SpaceSync, SyncStatusItem } from "../common/spaces/sync.ts";
|
||||
import { MemoryKvPrimitives } from "../plugos/lib/memory_kv_primitives.ts";
|
||||
import { determineStorageBackend } from "../server/storage_backend.ts";
|
||||
|
||||
export async function syncCommand(
|
||||
options: {
|
||||
snapshot?: string;
|
||||
wipeSecondary?: boolean;
|
||||
},
|
||||
primary: string,
|
||||
secondary: string,
|
||||
) {
|
||||
const memoryKv = new MemoryKvPrimitives();
|
||||
console.log("Going to synchronize", primary, "and", secondary);
|
||||
|
||||
const primarySpacePrimitives = await determineStorageBackend(
|
||||
memoryKv,
|
||||
primary,
|
||||
);
|
||||
const secondarySpacePrimitives = await determineStorageBackend(
|
||||
memoryKv,
|
||||
secondary,
|
||||
);
|
||||
|
||||
if (options.wipeSecondary) {
|
||||
if (
|
||||
!confirm(
|
||||
`About to wipe the secondary storage at ${secondary}, are you sure?`,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const allFiles = await secondarySpacePrimitives.fetchFileList();
|
||||
for (const file of allFiles) {
|
||||
try {
|
||||
console.log("Deleting", file.name);
|
||||
await secondarySpacePrimitives.deleteFile(file.name);
|
||||
} catch (e: any) {
|
||||
console.warn("Failed to delete file", file.name, e.message);
|
||||
}
|
||||
}
|
||||
console.log("Done wiping secondary storage.");
|
||||
}
|
||||
|
||||
const sync = new SpaceSync(primarySpacePrimitives, secondarySpacePrimitives, {
|
||||
conflictResolver: SpaceSync.primaryConflictResolver,
|
||||
isSyncCandidate: () => true,
|
||||
});
|
||||
let snapshot = new Map<string, SyncStatusItem>();
|
||||
if (options.snapshot) {
|
||||
try {
|
||||
snapshot = new Map(
|
||||
Object.entries(JSON.parse(await Deno.readTextFile(options.snapshot))),
|
||||
);
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
"Failed to read snapshot file",
|
||||
e.message,
|
||||
"using empty snapshot",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const operations = await sync.syncFiles(snapshot);
|
||||
console.log("Sync completed, operations:", operations);
|
||||
if (options.snapshot) {
|
||||
await Deno.writeTextFile(
|
||||
options.snapshot,
|
||||
JSON.stringify(Object.fromEntries(snapshot.entries())),
|
||||
);
|
||||
}
|
||||
}
|
18
common/spaces/chunked_datastore_space_primitives.test.ts
Normal file
18
common/spaces/chunked_datastore_space_primitives.test.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import { MemoryKvPrimitives } from "../../plugos/lib/memory_kv_primitives.ts";
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { ChunkedKvStoreSpacePrimitives } from "./chunked_datastore_space_primitives.ts";
|
||||
import { testSpacePrimitives } from "./space_primitives.test.ts";
|
||||
|
||||
Deno.test("chunked_datastore_space_primitives", async () => {
|
||||
const memoryKv = new MemoryKvPrimitives();
|
||||
// In memory store and tiny chunks for testing
|
||||
const spacePrimitives = new ChunkedKvStoreSpacePrimitives(memoryKv, 5);
|
||||
await testSpacePrimitives(spacePrimitives);
|
||||
const [deletedChunk] = await memoryKv.batchGet([[
|
||||
"content",
|
||||
"test.bin",
|
||||
"000",
|
||||
]]);
|
||||
// This one was deleted during the test (but here we're checking the underlying store for content)
|
||||
assertEquals(deletedChunk, undefined);
|
||||
});
|
81
common/spaces/chunked_datastore_space_primitives.ts
Normal file
81
common/spaces/chunked_datastore_space_primitives.ts
Normal file
@ -0,0 +1,81 @@
|
||||
import type { SpacePrimitives } from "./space_primitives.ts";
|
||||
import { KvKey } from "$sb/types.ts";
|
||||
import { KvPrimitives } from "../../plugos/lib/kv_primitives.ts";
|
||||
import { KvMetaSpacePrimitives } from "./kv_meta_space_primitives.ts";
|
||||
import { PrefixedKvPrimitives } from "../../plugos/lib/prefixed_kv_primitives.ts";
|
||||
|
||||
/**
|
||||
* A space primitives implementation that stores files in chunks in a KV store.
|
||||
* This is useful for KV stores that have a size limit per value, such as DenoKV.
|
||||
* Meta data will be kept with a "meta" prefix and content will be kept with a "content" prefix
|
||||
* Example use with DenoKV:
|
||||
* const denoKv = new DenoKvPrimitives(await Deno.openKv());
|
||||
* const spacePrimitives = new ChunkedDataStoreSpacePrimitives(denoKv, 65536); // max 64kb per chunk
|
||||
*/
|
||||
export class ChunkedKvStoreSpacePrimitives extends KvMetaSpacePrimitives {
|
||||
/**
|
||||
* @param baseKv the underlying kv primitives (not prefixed with e.g. meta and content)
|
||||
* @param chunkSize
|
||||
* @param metaPrefix
|
||||
* @param contentPrefix
|
||||
*/
|
||||
constructor(
|
||||
baseKv: KvPrimitives,
|
||||
chunkSize: number,
|
||||
metaPrefix = ["meta"],
|
||||
contentPrefix = ["content"],
|
||||
) {
|
||||
// Super call with a metaPrefix for storing the file metadata
|
||||
super(new PrefixedKvPrimitives(baseKv, metaPrefix), {
|
||||
async readFile(name: string, spacePrimitives: SpacePrimitives) {
|
||||
const meta = await spacePrimitives.getFileMeta(name);
|
||||
|
||||
// Buffer to store the concatenated chunks
|
||||
const concatenatedChunks = new Uint8Array(meta.size);
|
||||
let offset = 0;
|
||||
// Implicit assumption, chunks are ordered by chunk id by the underlying store
|
||||
for await (
|
||||
const { value } of baseKv.query({
|
||||
prefix: [...contentPrefix, name],
|
||||
})
|
||||
) {
|
||||
concatenatedChunks.set(value, offset);
|
||||
offset += value.length;
|
||||
}
|
||||
|
||||
return concatenatedChunks;
|
||||
},
|
||||
async writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
) {
|
||||
// Persist the data, chunk by chunk
|
||||
let chunkId = 0;
|
||||
for (let i = 0; i < data.byteLength; i += chunkSize) {
|
||||
const chunk = data.slice(i, i + chunkSize);
|
||||
await baseKv.batchSet([{
|
||||
// "3 digits ought to be enough for anybody" — famous last words
|
||||
key: [...contentPrefix, name, String(chunkId).padStart(3, "0")],
|
||||
value: chunk,
|
||||
}]);
|
||||
chunkId++;
|
||||
}
|
||||
},
|
||||
async deleteFile(name: string, spacePrimitives: SpacePrimitives) {
|
||||
const fileMeta = await spacePrimitives.getFileMeta(name);
|
||||
// Using this we can calculate the chunk keys
|
||||
const keysToDelete: KvKey[] = [];
|
||||
let chunkId = 0;
|
||||
for (let i = 0; i < fileMeta.size; i += chunkSize) {
|
||||
keysToDelete.push([
|
||||
...contentPrefix,
|
||||
name,
|
||||
String(chunkId).padStart(3, "0"),
|
||||
]);
|
||||
chunkId++;
|
||||
}
|
||||
return baseKv.batchDelete(keysToDelete);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
import "https://esm.sh/fake-indexeddb@4.0.2/auto";
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { DataStore } from "../../plugos/lib/datastore.ts";
|
||||
import { IndexedDBKvPrimitives } from "../../plugos/lib/indexeddb_kv_primitives.ts";
|
||||
import { DataStoreSpacePrimitives } from "./datastore_space_primitives.ts";
|
||||
import { testSpacePrimitives } from "./space_primitives.test.ts";
|
||||
|
||||
Deno.test("DataStoreSpacePrimitives", {
|
||||
sanitizeResources: false,
|
||||
@ -12,34 +12,6 @@ Deno.test("DataStoreSpacePrimitives", {
|
||||
await db.init();
|
||||
|
||||
const space = new DataStoreSpacePrimitives(new DataStore(db));
|
||||
const files = await space.fetchFileList();
|
||||
assertEquals(files, []);
|
||||
// Write text file
|
||||
const fileMeta = await space.writeFile(
|
||||
"test.txt",
|
||||
stringToBytes("Hello World"),
|
||||
);
|
||||
assertEquals(
|
||||
(await space.readFile("test.txt")).data,
|
||||
stringToBytes("Hello World"),
|
||||
);
|
||||
const fbContent = (await space.readFile("test.txt"))
|
||||
.data;
|
||||
assertEquals(new TextDecoder().decode(fbContent), "Hello World");
|
||||
assertEquals(await space.fetchFileList(), [fileMeta]);
|
||||
const buf = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
// Write binary file
|
||||
await space.writeFile("test.bin", buf);
|
||||
const fMeta = await space.getFileMeta("test.bin");
|
||||
assertEquals(fMeta.size, 5);
|
||||
assertEquals((await space.fetchFileList()).length, 2);
|
||||
|
||||
await space.deleteFile("test.bin");
|
||||
assertEquals(await space.fetchFileList(), [fileMeta]);
|
||||
|
||||
await testSpacePrimitives(space);
|
||||
db.close();
|
||||
});
|
||||
|
||||
function stringToBytes(str: string): Uint8Array {
|
||||
return new TextEncoder().encode(str);
|
||||
}
|
||||
|
@ -12,6 +12,9 @@ export type FileContent = {
|
||||
const filesMetaPrefix = ["file", "meta"];
|
||||
const filesContentPrefix = ["file", "content"];
|
||||
|
||||
/**
|
||||
* TODO: Replace this with ChunkedDatastoreSpacePrimitives
|
||||
*/
|
||||
export class DataStoreSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
private ds: DataStore,
|
||||
@ -46,14 +49,27 @@ export class DataStoreSpacePrimitives implements SpacePrimitives {
|
||||
_selfUpdate?: boolean,
|
||||
suggestedMeta?: FileMeta,
|
||||
): Promise<FileMeta> {
|
||||
const meta: FileMeta = {
|
||||
name,
|
||||
created: suggestedMeta?.lastModified || Date.now(),
|
||||
lastModified: suggestedMeta?.lastModified || Date.now(),
|
||||
contentType: mime.getType(name) || "application/octet-stream",
|
||||
size: data.byteLength,
|
||||
perm: suggestedMeta?.perm || "rw",
|
||||
};
|
||||
let meta: FileMeta | undefined;
|
||||
try {
|
||||
// Build off of the existing file meta, if file exists
|
||||
meta = await this.getFileMeta(name);
|
||||
} catch {
|
||||
// Not found, that's fine
|
||||
}
|
||||
if (!meta) {
|
||||
meta = {
|
||||
name,
|
||||
created: suggestedMeta?.lastModified || Date.now(),
|
||||
perm: suggestedMeta?.perm || "rw",
|
||||
contentType: mime.getType(name) || "application/octet-stream",
|
||||
// Overwritten in a sec
|
||||
lastModified: 0,
|
||||
size: 0,
|
||||
};
|
||||
}
|
||||
meta.lastModified = suggestedMeta?.lastModified || Date.now();
|
||||
meta.size = data.byteLength;
|
||||
|
||||
await this.ds.batchSet<FileMeta | FileContent>([
|
||||
{
|
||||
key: [...filesContentPrefix, name],
|
||||
|
@ -1,31 +1,12 @@
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { DenoKVSpacePrimitives } from "./deno_kv_space_primitives.ts";
|
||||
import { DenoKvPrimitives } from "../../plugos/lib/deno_kv_primitives.ts";
|
||||
import { ChunkedKvStoreSpacePrimitives } from "./chunked_datastore_space_primitives.ts";
|
||||
import { testSpacePrimitives } from "./space_primitives.test.ts";
|
||||
|
||||
Deno.test("deno_kv_space_primitives", async () => {
|
||||
Deno.test("deno kv test", async () => {
|
||||
const tempFile = await Deno.makeTempFile({ suffix: ".db" });
|
||||
const spacePrimitives = new DenoKVSpacePrimitives();
|
||||
await spacePrimitives.init(tempFile);
|
||||
await spacePrimitives.writeFile("test.txt", new TextEncoder().encode("test"));
|
||||
let result = await spacePrimitives.readFile("test.txt");
|
||||
assertEquals(result.data, new TextEncoder().encode("test"));
|
||||
let listing = await spacePrimitives.fetchFileList();
|
||||
assertEquals(listing.length, 1);
|
||||
await spacePrimitives.writeFile(
|
||||
"test.txt",
|
||||
new TextEncoder().encode("test2"),
|
||||
);
|
||||
result = await spacePrimitives.readFile("test.txt");
|
||||
assertEquals(result.data, new TextEncoder().encode("test2"));
|
||||
await spacePrimitives.deleteFile("test.txt");
|
||||
listing = await spacePrimitives.fetchFileList();
|
||||
try {
|
||||
await spacePrimitives.readFile("test.txt");
|
||||
throw new Error("Should not be here");
|
||||
} catch (e: any) {
|
||||
assertEquals(e.message, "Not found");
|
||||
}
|
||||
assertEquals(listing.length, 0);
|
||||
|
||||
spacePrimitives.close();
|
||||
const denoKv = new DenoKvPrimitives(await Deno.openKv(tempFile));
|
||||
const spacePrimitives = new ChunkedKvStoreSpacePrimitives(denoKv, 65536);
|
||||
await testSpacePrimitives(spacePrimitives);
|
||||
denoKv.close();
|
||||
await Deno.remove(tempFile);
|
||||
});
|
||||
|
@ -1,84 +0,0 @@
|
||||
/// <reference lib="deno.unstable" />
|
||||
|
||||
import { FileMeta } from "$sb/types.ts";
|
||||
import type { SpacePrimitives } from "./space_primitives.ts";
|
||||
import { mime } from "https://deno.land/x/mimetypes@v1.0.0/mod.ts";
|
||||
|
||||
export class DenoKVSpacePrimitives implements SpacePrimitives {
|
||||
private kv!: Deno.Kv;
|
||||
private dataAttribute = "file";
|
||||
private metaAttribute = "meta";
|
||||
|
||||
async init(path?: string) {
|
||||
this.kv = await Deno.openKv(path);
|
||||
}
|
||||
|
||||
close() {
|
||||
this.kv.close();
|
||||
}
|
||||
|
||||
async fetchFileList(): Promise<FileMeta[]> {
|
||||
const results: FileMeta[] = [];
|
||||
for await (
|
||||
const result of this.kv.list({
|
||||
prefix: [this.metaAttribute],
|
||||
})
|
||||
) {
|
||||
results.push(result.value as FileMeta);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
async readFile(name: string): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
const [meta, data] = await this.kv.getMany([[this.metaAttribute, name], [
|
||||
this.dataAttribute,
|
||||
name,
|
||||
]]);
|
||||
if (!meta.value) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
return {
|
||||
data: data.value as Uint8Array,
|
||||
meta: meta.value as FileMeta,
|
||||
};
|
||||
}
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
const result = await this.kv.get([this.metaAttribute, name]);
|
||||
if (result.value) {
|
||||
return result.value as FileMeta;
|
||||
} else {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
}
|
||||
async writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
_selfUpdate?: boolean | undefined,
|
||||
suggestedMeta?: FileMeta | undefined,
|
||||
): Promise<FileMeta> {
|
||||
const meta: FileMeta = {
|
||||
name,
|
||||
created: suggestedMeta?.created || Date.now(),
|
||||
lastModified: suggestedMeta?.lastModified || Date.now(),
|
||||
contentType: mime.getType(name) || "application/octet-stream",
|
||||
size: data.byteLength,
|
||||
perm: suggestedMeta?.perm || "rw",
|
||||
};
|
||||
const res = await this.kv.atomic()
|
||||
.set([this.dataAttribute, name], data)
|
||||
.set([this.metaAttribute, name], meta)
|
||||
.commit();
|
||||
if (!res.ok) {
|
||||
throw res;
|
||||
}
|
||||
return meta;
|
||||
}
|
||||
async deleteFile(name: string): Promise<void> {
|
||||
const res = await this.kv.atomic()
|
||||
.delete([this.dataAttribute, name])
|
||||
.delete([this.metaAttribute, name])
|
||||
.commit();
|
||||
if (!res.ok) {
|
||||
throw res;
|
||||
}
|
||||
}
|
||||
}
|
@ -21,7 +21,8 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
private wrapped: SpacePrimitives,
|
||||
private eventHook: EventHook,
|
||||
) {}
|
||||
) {
|
||||
}
|
||||
|
||||
dispatchEvent(name: string, ...args: any[]): Promise<any[]> {
|
||||
return this.eventHook.dispatchEvent(name, ...args);
|
||||
|
@ -6,6 +6,7 @@ export class HttpSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
readonly url: string,
|
||||
readonly expectedSpacePath?: string,
|
||||
private bearerToken?: string,
|
||||
) {
|
||||
}
|
||||
|
||||
@ -20,6 +21,12 @@ export class HttpSpacePrimitives implements SpacePrimitives {
|
||||
...options.headers,
|
||||
"X-Sync-Mode": "true",
|
||||
};
|
||||
if (this.bearerToken) {
|
||||
options.headers = {
|
||||
...options.headers,
|
||||
"Authorization": `Bearer ${this.bearerToken}`,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await fetch(url, options);
|
||||
|
95
common/spaces/kv_meta_space_primitives.ts
Normal file
95
common/spaces/kv_meta_space_primitives.ts
Normal file
@ -0,0 +1,95 @@
|
||||
import { FileMeta } from "$sb/types.ts";
|
||||
import { KvPrimitives } from "../../plugos/lib/kv_primitives.ts";
|
||||
import { mime } from "../deps.ts";
|
||||
import { SpacePrimitives } from "./space_primitives.ts";
|
||||
|
||||
export type KvMetaSpacePrimitivesCallbacks = {
|
||||
readFile: (
|
||||
name: string,
|
||||
spacePrimitives: SpacePrimitives,
|
||||
) => Promise<Uint8Array>;
|
||||
writeFile: (
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
spacePrimitives: SpacePrimitives,
|
||||
) => Promise<void>;
|
||||
deleteFile: (name: string, spacePrimitives: SpacePrimitives) => Promise<void>;
|
||||
};
|
||||
|
||||
export class KvMetaSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
protected kv: KvPrimitives,
|
||||
private callbacks: KvMetaSpacePrimitivesCallbacks,
|
||||
) {
|
||||
}
|
||||
|
||||
async readFile(
|
||||
name: string,
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
const [data, [meta]] = await Promise.all([
|
||||
this.callbacks.readFile(name, this),
|
||||
this.kv.batchGet([[name]]),
|
||||
]);
|
||||
return { data, meta: meta };
|
||||
}
|
||||
|
||||
async writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
_selfUpdate?: boolean | undefined,
|
||||
desiredMeta?: FileMeta | undefined,
|
||||
): Promise<FileMeta> {
|
||||
let meta: FileMeta | undefined;
|
||||
try {
|
||||
// Build off of the existing file meta, if file exists
|
||||
meta = await this.getFileMeta(name);
|
||||
} catch {
|
||||
// Not found, that's fine
|
||||
}
|
||||
if (!meta) {
|
||||
meta = {
|
||||
name,
|
||||
perm: "rw",
|
||||
created: Date.now(),
|
||||
contentType: mime.getType(name) || "application/octet-stream",
|
||||
// These will be overwritten in a bit
|
||||
lastModified: 0,
|
||||
size: 0,
|
||||
};
|
||||
}
|
||||
meta = {
|
||||
...meta,
|
||||
lastModified: desiredMeta?.lastModified || Date.now(),
|
||||
size: data.byteLength,
|
||||
};
|
||||
await Promise.all([
|
||||
this.callbacks.writeFile(name, data, this),
|
||||
this.kv.batchSet([{ key: [name], value: meta }]),
|
||||
]);
|
||||
|
||||
return meta;
|
||||
}
|
||||
|
||||
async deleteFile(name: string): Promise<void> {
|
||||
await Promise.all([
|
||||
this.callbacks.deleteFile(name, this),
|
||||
this.kv.batchDelete([[name]]),
|
||||
]);
|
||||
}
|
||||
|
||||
async fetchFileList(): Promise<FileMeta[]> {
|
||||
const files: FileMeta[] = [];
|
||||
for await (const meta of this.kv.query({})) {
|
||||
files.push(meta.value);
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
const fileMeta = (await this.kv.batchGet([[name]]))[0];
|
||||
if (!fileMeta) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
return fileMeta;
|
||||
}
|
||||
}
|
22
common/spaces/s3_space_primitives.test.ts
Normal file
22
common/spaces/s3_space_primitives.test.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { S3SpacePrimitives } from "./s3_space_primitives.ts";
|
||||
import { MemoryKvPrimitives } from "../../plugos/lib/memory_kv_primitives.ts";
|
||||
import { testSpacePrimitives } from "./space_primitives.test.ts";
|
||||
|
||||
Deno.test("s3_space_primitives", async () => {
|
||||
return;
|
||||
const options = {
|
||||
accessKey: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
||||
endPoint: Deno.env.get("AWS_ENDPOINT")!,
|
||||
region: Deno.env.get("AWS_REGION")!,
|
||||
bucket: Deno.env.get("AWS_BUCKET")!,
|
||||
};
|
||||
|
||||
const primitives = new S3SpacePrimitives(
|
||||
new MemoryKvPrimitives(),
|
||||
["meta"],
|
||||
"test",
|
||||
options,
|
||||
);
|
||||
await testSpacePrimitives(primitives);
|
||||
});
|
117
common/spaces/s3_space_primitives.ts
Normal file
117
common/spaces/s3_space_primitives.ts
Normal file
@ -0,0 +1,117 @@
|
||||
// We're explicitly using 0.4.0 to be able to hijack the path encoding, which is inconsisently broken in 0.5.0
|
||||
import { S3Client } from "https://deno.land/x/s3_lite_client@0.4.0/mod.ts";
|
||||
import type { ClientOptions } from "https://deno.land/x/s3_lite_client@0.4.0/client.ts";
|
||||
import { KvMetaSpacePrimitives } from "./kv_meta_space_primitives.ts";
|
||||
import { KvPrimitives } from "../../plugos/lib/kv_primitives.ts";
|
||||
import { mime } from "../deps.ts";
|
||||
import { KV, KvKey } from "$sb/types.ts";
|
||||
import { PrefixedKvPrimitives } from "../../plugos/lib/prefixed_kv_primitives.ts";
|
||||
|
||||
export type S3SpacePrimitivesOptions = ClientOptions;
|
||||
|
||||
/**
|
||||
* Because S3 cannot store arbitrary metadata (well it can, but you cannot retrieve it when listing objects), we need to store it in a separate KV store
|
||||
*/
|
||||
export class S3SpacePrimitives extends KvMetaSpacePrimitives {
|
||||
client: S3Client;
|
||||
objectPrefix: string;
|
||||
constructor(
|
||||
baseKv: KvPrimitives,
|
||||
metaPrefix: KvKey,
|
||||
objectPrefix: string,
|
||||
options: S3SpacePrimitivesOptions,
|
||||
) {
|
||||
const client = new S3Client(options);
|
||||
super(new PrefixedKvPrimitives(baseKv, metaPrefix), {
|
||||
async readFile(
|
||||
name: string,
|
||||
): Promise<Uint8Array> {
|
||||
try {
|
||||
const obj = await client.getObject(encodePath(objectPrefix + name));
|
||||
return new Uint8Array(await obj.arrayBuffer());
|
||||
} catch (e: any) {
|
||||
console.error("Got S3 error", e.message);
|
||||
|
||||
if (e.message.includes("does not exist")) {
|
||||
throw new Error(`Not found`);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
): Promise<void> {
|
||||
await client.putObject(encodePath(objectPrefix + name), data);
|
||||
},
|
||||
async deleteFile(name: string): Promise<void> {
|
||||
await client.deleteObject(encodePath(objectPrefix + name));
|
||||
},
|
||||
});
|
||||
this.client = client;
|
||||
this.objectPrefix = objectPrefix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all objects from S3 bucket, finds any missing files and adds them to the KV store
|
||||
* Doesn't delete items, nor update any existing items
|
||||
*/
|
||||
async syncFileList(): Promise<void> {
|
||||
const currentFiles = await this.fetchFileList();
|
||||
const entriesToAdd: KV[] = [];
|
||||
for await (
|
||||
const objectData of this.client.listObjects({
|
||||
prefix: this.objectPrefix,
|
||||
})
|
||||
) {
|
||||
// Find the file meta for this object
|
||||
let fileMeta = currentFiles.find((f) =>
|
||||
f.name === decodePath(objectData.key.slice(this.objectPrefix.length))
|
||||
);
|
||||
if (fileMeta) {
|
||||
// Exists, continue
|
||||
continue;
|
||||
}
|
||||
fileMeta = {
|
||||
name: decodePath(objectData.key.slice(this.objectPrefix.length)),
|
||||
created: objectData.lastModified.getTime(),
|
||||
lastModified: objectData.lastModified.getTime(),
|
||||
contentType: mime.getType(objectData.key) || "application/octet-stream",
|
||||
size: objectData.size,
|
||||
perm: "rw",
|
||||
};
|
||||
console.log("Adding file metadata to KV", fileMeta.name);
|
||||
entriesToAdd.push({
|
||||
key: [fileMeta.name],
|
||||
value: fileMeta,
|
||||
});
|
||||
}
|
||||
return this.kv.batchSet(entriesToAdd);
|
||||
}
|
||||
}
|
||||
|
||||
// Stolen from https://github.com/aws/aws-sdk-js/blob/master/lib/util.js
|
||||
|
||||
function uriEscapePath(string: string): string {
|
||||
return string.split("/").map(uriEscape).join("/");
|
||||
}
|
||||
|
||||
function uriEscape(string: string): string {
|
||||
let output = encodeURIComponent(string);
|
||||
output = output.replace(/[^A-Za-z0-9_.~\-%]+/g, escape);
|
||||
|
||||
// AWS percent-encodes some extra non-standard characters in a URI
|
||||
output = output.replace(/[*]/g, function (ch) {
|
||||
return "%" + ch.charCodeAt(0).toString(16).toUpperCase();
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
function encodePath(name: string): string {
|
||||
return uriEscapePath(name);
|
||||
}
|
||||
function decodePath(encoded: string): string {
|
||||
// AWS only returns ' replace with '
|
||||
return encoded.replaceAll("'", "'");
|
||||
}
|
69
common/spaces/space_primitives.test.ts
Normal file
69
common/spaces/space_primitives.test.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import { assert, assertEquals } from "../../test_deps.ts";
|
||||
import { SpacePrimitives } from "./space_primitives.ts";
|
||||
|
||||
export async function testSpacePrimitives(spacePrimitives: SpacePrimitives) {
|
||||
const files = await spacePrimitives.fetchFileList();
|
||||
assertEquals(files, []);
|
||||
// Write text file
|
||||
const fileMeta = await spacePrimitives.writeFile(
|
||||
"test.txt",
|
||||
stringToBytes("Hello World"),
|
||||
false,
|
||||
{
|
||||
name: "test.txt",
|
||||
perm: "rw",
|
||||
created: 10,
|
||||
contentType: "text/plain",
|
||||
lastModified: 20,
|
||||
size: 11,
|
||||
},
|
||||
);
|
||||
|
||||
const { data: retrievedData, meta: retrievedMeta } = await spacePrimitives
|
||||
.readFile("test.txt");
|
||||
|
||||
assertEquals(retrievedData, stringToBytes("Hello World"));
|
||||
// Check that the meta data is persisted
|
||||
assertEquals(retrievedMeta.lastModified, 20);
|
||||
|
||||
const fbContent = (await spacePrimitives.readFile("test.txt"))
|
||||
.data;
|
||||
assertEquals(new TextDecoder().decode(fbContent), "Hello World");
|
||||
|
||||
assertEquals(await spacePrimitives.fetchFileList(), [fileMeta]);
|
||||
const buf = new Uint8Array(1024 * 1024);
|
||||
buf.set([1, 2, 3, 4, 5]);
|
||||
// Write binary file
|
||||
await spacePrimitives.writeFile("test.bin", buf);
|
||||
const fMeta = await spacePrimitives.getFileMeta("test.bin");
|
||||
assertEquals(fMeta.size, 1024 * 1024);
|
||||
assertEquals((await spacePrimitives.fetchFileList()).length, 2);
|
||||
// console.log(spacePrimitives);
|
||||
|
||||
await spacePrimitives.deleteFile("test.bin");
|
||||
assertEquals(await spacePrimitives.fetchFileList(), [fileMeta]);
|
||||
|
||||
// Clean up
|
||||
await spacePrimitives.deleteFile("test.txt");
|
||||
assertEquals(await spacePrimitives.fetchFileList(), []);
|
||||
|
||||
// Test weird file names
|
||||
await spacePrimitives.writeFile("test+'s.txt", stringToBytes("Hello world!"));
|
||||
assertEquals(
|
||||
stringToBytes("Hello world!"),
|
||||
(await spacePrimitives.readFile("test+'s.txt")).data,
|
||||
);
|
||||
await spacePrimitives.deleteFile("test+'s.txt");
|
||||
|
||||
// Check deletion of weird file file name
|
||||
try {
|
||||
await spacePrimitives.getFileMeta("test+'s.txt");
|
||||
assert(false);
|
||||
} catch (e: any) {
|
||||
assertEquals(e.message, "Not found");
|
||||
}
|
||||
}
|
||||
|
||||
function stringToBytes(str: string): Uint8Array {
|
||||
return new TextEncoder().encode(str);
|
||||
}
|
@ -1,21 +1,26 @@
|
||||
// export type FileEncoding = "utf8" | "arraybuffer" | "dataurl";
|
||||
// export type FileData = ArrayBuffer | string;
|
||||
|
||||
import { FileMeta } from "$sb/types.ts";
|
||||
import type { FileMeta } from "$sb/types.ts";
|
||||
|
||||
/**
|
||||
* A generic interface used by `Space` to interact with the underlying storage, designed to be easy to implement for different storage backends
|
||||
*/
|
||||
export interface SpacePrimitives {
|
||||
// Returns a list of file meta data as well as the timestamp of this snapshot
|
||||
fetchFileList(): Promise<FileMeta[]>;
|
||||
|
||||
// The result of this should be consistent with the result of fetchFileList for this entry
|
||||
getFileMeta(name: string): Promise<FileMeta>;
|
||||
|
||||
readFile(
|
||||
name: string,
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }>;
|
||||
getFileMeta(name: string): Promise<FileMeta>;
|
||||
|
||||
writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
// Used to decide whether or not to emit change events
|
||||
selfUpdate?: boolean,
|
||||
// May be ignored, but ideally should be used to set the lastModified time
|
||||
meta?: FileMeta,
|
||||
): Promise<FileMeta>;
|
||||
|
||||
deleteFile(name: string): Promise<void>;
|
||||
}
|
||||
|
73
plugos/lib/dynamodb_kv_primitives.ts
Normal file
73
plugos/lib/dynamodb_kv_primitives.ts
Normal file
@ -0,0 +1,73 @@
|
||||
import { KV, KvKey } from "../../plug-api/types.ts";
|
||||
import { KvPrimitives, KvQueryOptions } from "./kv_primitives.ts";
|
||||
import {
|
||||
createClient,
|
||||
DynamoDBClient,
|
||||
} from "https://denopkg.com/chiefbiiko/dynamodb@master/mod.ts";
|
||||
|
||||
export type AwsOptions = {
|
||||
accessKey: string;
|
||||
secretKey: string;
|
||||
region: string;
|
||||
};
|
||||
|
||||
const keySeparator = "\0";
|
||||
|
||||
const batchReadSize = 100;
|
||||
|
||||
/**
|
||||
* Start of an implementation, to be continued at some point
|
||||
*/
|
||||
|
||||
export class DynamoDBKvPrimitives implements KvPrimitives {
|
||||
client: DynamoDBClient;
|
||||
partitionKey: string;
|
||||
tableName: string;
|
||||
|
||||
constructor(tableName: string, partitionKey: string, options: AwsOptions) {
|
||||
this.tableName = tableName;
|
||||
this.partitionKey = partitionKey;
|
||||
this.client = createClient({
|
||||
credentials: {
|
||||
accessKeyId: options.accessKey,
|
||||
secretAccessKey: options.secretKey,
|
||||
},
|
||||
region: options.region,
|
||||
});
|
||||
}
|
||||
|
||||
batchGet(keys: KvKey[]): Promise<any[]> {
|
||||
const allResults: any[] = [];
|
||||
const promises: Promise<any>[] = [];
|
||||
for (let i = 0; i < keys.length; i += batchReadSize) {
|
||||
const batch = keys.slice(i, i + batchReadSize);
|
||||
promises.push(
|
||||
this.client.batchGetItem(
|
||||
{
|
||||
RequestItems: {
|
||||
[this.tableName]: {
|
||||
Keys: batch.map((key) => ({
|
||||
pk: this.partitionKey,
|
||||
sk: key.join(keySeparator),
|
||||
})),
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
batchSet(entries: KV[]): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
batchDelete(keys: KvKey[]): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
query(options: KvQueryOptions): AsyncIterableIterator<KV> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
close(): void {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
}
|
@ -43,12 +43,16 @@ export class MemoryKvPrimitives implements KvPrimitives {
|
||||
}
|
||||
|
||||
async *query(options: KvQueryOptions): AsyncIterableIterator<KV> {
|
||||
const prefix = options.prefix?.join("/");
|
||||
for (const [key, value] of this.store) {
|
||||
const prefix = options.prefix?.join(memoryKeySeparator);
|
||||
const sortedKeys = [...this.store.keys()].sort();
|
||||
for (const key of sortedKeys) {
|
||||
if (prefix && !key.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
yield { key: key.split(memoryKeySeparator), value };
|
||||
yield {
|
||||
key: key.split(memoryKeySeparator),
|
||||
value: this.store.get(key),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,19 +62,23 @@ export class Sandbox<HookT> {
|
||||
}
|
||||
|
||||
async onMessage(data: ControllerMessage) {
|
||||
if (!this.worker) {
|
||||
console.warn("Received message for terminated worker, ignoring");
|
||||
return;
|
||||
}
|
||||
switch (data.type) {
|
||||
case "sys":
|
||||
try {
|
||||
const result = await this.plug.syscall(data.name!, data.args!);
|
||||
|
||||
this.worker!.postMessage({
|
||||
this.worker && this.worker!.postMessage({
|
||||
type: "sysr",
|
||||
id: data.id,
|
||||
result: result,
|
||||
} as WorkerMessage);
|
||||
} catch (e: any) {
|
||||
// console.error("Syscall fail", e);
|
||||
this.worker!.postMessage({
|
||||
this.worker && this.worker!.postMessage({
|
||||
type: "sysr",
|
||||
id: data.id,
|
||||
error: e.message,
|
||||
|
@ -139,7 +139,6 @@ export class System<HookT> extends EventEmitter<SystemEvents<HookT>> {
|
||||
}
|
||||
|
||||
unload(name: string) {
|
||||
// console.log("Unloading", name);
|
||||
const plug = this.plugs.get(name);
|
||||
if (!plug) {
|
||||
return;
|
||||
|
@ -215,7 +215,16 @@ export async function applyPageTemplateCommand() {
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadPageObject(pageName: string): Promise<PageMeta> {
|
||||
export async function loadPageObject(pageName?: string): Promise<PageMeta> {
|
||||
if (!pageName) {
|
||||
return {
|
||||
ref: "",
|
||||
name: "",
|
||||
tags: ["page"],
|
||||
lastModified: "",
|
||||
created: "",
|
||||
} as PageMeta;
|
||||
}
|
||||
return (await getObjectByRef<PageMeta>(
|
||||
pageName,
|
||||
"page",
|
||||
|
@ -13,6 +13,7 @@ export class JWTIssuer {
|
||||
constructor(readonly kv: KvPrimitives) {
|
||||
}
|
||||
|
||||
// authString is only used to compare hashes to see if the auth has changed
|
||||
async init(authString: string) {
|
||||
const [secret] = await this.kv.batchGet([[jwtSecretKey]]);
|
||||
if (!secret) {
|
||||
|
@ -1,18 +1,17 @@
|
||||
import { DenoKvPrimitives } from "../plugos/lib/deno_kv_primitives.ts";
|
||||
import { KvPrimitives } from "../plugos/lib/kv_primitives.ts";
|
||||
import { MemoryKvPrimitives } from "../plugos/lib/memory_kv_primitives.ts";
|
||||
import { path } from "./deps.ts";
|
||||
|
||||
/**
|
||||
* Environment variables:
|
||||
* - SB_DB_BACKEND: "denokv" or "off" (default: denokv)
|
||||
* - SB_KV_DB (denokv only): path to the database file (default .silverbullet.db) or ":cloud:" for cloud storage
|
||||
* - SB_DB_BACKEND: "denokv" or "memory" (default: denokv)
|
||||
* - SB_KV_DB (denokv only): path to the database file (default .silverbullet.db)
|
||||
*/
|
||||
|
||||
export async function determineDatabaseBackend(
|
||||
singleTenantFolder?: string,
|
||||
): Promise<
|
||||
KvPrimitives | undefined
|
||||
> {
|
||||
): Promise<KvPrimitives> {
|
||||
const backendConfig = Deno.env.get("SB_DB_BACKEND") || "denokv";
|
||||
switch (backendConfig) {
|
||||
case "denokv": {
|
||||
@ -24,21 +23,19 @@ export async function determineDatabaseBackend(
|
||||
dbFile = path.resolve(singleTenantFolder, dbFile);
|
||||
}
|
||||
|
||||
if (dbFile === ":cloud:") {
|
||||
if (Deno.env.get("DENO_DEPLOYMENT_ID") !== undefined) { // We're running in Deno Deploy
|
||||
dbFile = undefined; // Deno Deploy will use the default KV store
|
||||
}
|
||||
const denoDb = await Deno.openKv(dbFile);
|
||||
console.info(
|
||||
`Using DenoKV as a database backend (${
|
||||
dbFile || "cloud"
|
||||
}), running in server-processing mode.`,
|
||||
`Using DenoKV as a database backend (${dbFile || "cloud"}.`,
|
||||
);
|
||||
return new DenoKvPrimitives(denoDb);
|
||||
}
|
||||
default:
|
||||
console.info(
|
||||
"Running in databaseless mode: no server-side indexing and state keeping (beyond space files) will happen.",
|
||||
"Running in in-memory database mode: index data will be flushed on every restart. Not recommended, but to each their own.",
|
||||
);
|
||||
return;
|
||||
return new MemoryKvPrimitives();
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ export type ServerOptions = {
|
||||
port: number;
|
||||
clientAssetBundle: AssetBundle;
|
||||
plugAssetBundle: AssetBundle;
|
||||
baseKvPrimitives?: KvPrimitives;
|
||||
baseKvPrimitives: KvPrimitives;
|
||||
syncOnly: boolean;
|
||||
certFile?: string;
|
||||
keyFile?: string;
|
||||
@ -43,7 +43,7 @@ export class HttpServer {
|
||||
|
||||
spaceServers = new Map<string, Promise<SpaceServer>>();
|
||||
syncOnly: boolean;
|
||||
baseKvPrimitives?: KvPrimitives;
|
||||
baseKvPrimitives: KvPrimitives;
|
||||
configs: Map<string, SpaceServerConfig>;
|
||||
|
||||
constructor(options: ServerOptions) {
|
||||
@ -64,11 +64,10 @@ export class HttpServer {
|
||||
config,
|
||||
determineShellBackend(config.pagesPath),
|
||||
this.plugAssetBundle,
|
||||
this.baseKvPrimitives
|
||||
? new PrefixedKvPrimitives(this.baseKvPrimitives, [
|
||||
config.namespace,
|
||||
])
|
||||
: undefined,
|
||||
new PrefixedKvPrimitives(this.baseKvPrimitives, [
|
||||
config.namespace,
|
||||
]),
|
||||
this.syncOnly,
|
||||
);
|
||||
await spaceServer.init();
|
||||
|
||||
@ -140,7 +139,7 @@ export class HttpServer {
|
||||
return endpointHook.handleRequest(spaceServer.system!, context, next);
|
||||
});
|
||||
|
||||
this.addPasswordAuth(this.app);
|
||||
this.addAuth(this.app);
|
||||
const fsRouter = this.addFsRoutes();
|
||||
this.app.use(fsRouter.routes());
|
||||
this.app.use(fsRouter.allowedMethods());
|
||||
@ -226,7 +225,7 @@ export class HttpServer {
|
||||
}
|
||||
}
|
||||
|
||||
private addPasswordAuth(app: Application) {
|
||||
private addAuth(app: Application) {
|
||||
const excludedPaths = [
|
||||
"/manifest.json",
|
||||
"/favicon.png",
|
||||
@ -252,20 +251,9 @@ export class HttpServer {
|
||||
const values = await request.body({ type: "form" }).value;
|
||||
const username = values.get("username")!;
|
||||
const password = values.get("password")!;
|
||||
|
||||
const formCSRF = values.get("csrf");
|
||||
const cookieCSRF = await cookies.get("csrf_token");
|
||||
|
||||
if (formCSRF !== cookieCSRF) {
|
||||
response.redirect("/.auth?error=2");
|
||||
console.log("CSRF mismatch", formCSRF, cookieCSRF);
|
||||
return;
|
||||
}
|
||||
|
||||
await cookies.delete("csrf_token");
|
||||
|
||||
const spaceServer = await this.ensureSpaceServer(request);
|
||||
const [expectedUser, expectedPassword] = spaceServer.auth!.split(":");
|
||||
const { user: expectedUser, pass: expectedPassword } = spaceServer
|
||||
.auth!;
|
||||
if (username === expectedUser && password === expectedPassword) {
|
||||
// Generate a JWT and set it as a cookie
|
||||
const jwt = await spaceServer.jwtIssuer.createJWT(
|
||||
@ -305,18 +293,35 @@ export class HttpServer {
|
||||
}
|
||||
const host = request.url.host;
|
||||
if (!excludedPaths.includes(request.url.pathname)) {
|
||||
const authCookie = await cookies.get(authCookieName(host));
|
||||
if (!authCookie) {
|
||||
const authToken = await cookies.get(authCookieName(host));
|
||||
|
||||
if (!authToken && spaceServer.authToken) {
|
||||
// Attempt Bearer Authorization based authentication
|
||||
const authHeader = request.headers.get("Authorization");
|
||||
if (authHeader && authHeader.startsWith("Bearer ")) {
|
||||
const authToken = authHeader.slice("Bearer ".length);
|
||||
if (authToken === spaceServer.authToken) {
|
||||
// All good, let's proceed
|
||||
return next();
|
||||
} else {
|
||||
console.log(
|
||||
"Unauthorized token access, redirecting to auth page",
|
||||
);
|
||||
response.status = 401;
|
||||
response.body = "Unauthorized";
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!authToken) {
|
||||
console.log("Unauthorized access, redirecting to auth page");
|
||||
return response.redirect("/.auth");
|
||||
}
|
||||
const [expectedUser] = spaceServer.auth!.split(
|
||||
":",
|
||||
);
|
||||
const { user: expectedUser } = spaceServer.auth!;
|
||||
|
||||
try {
|
||||
const verifiedJwt = await spaceServer.jwtIssuer.verifyAndDecodeJWT(
|
||||
authCookie,
|
||||
authToken,
|
||||
);
|
||||
if (verifiedJwt.username !== expectedUser) {
|
||||
throw new Error("Username mismatch");
|
||||
@ -329,7 +334,7 @@ export class HttpServer {
|
||||
return response.redirect("/.auth");
|
||||
}
|
||||
}
|
||||
await next();
|
||||
return next();
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,6 @@ import { SpacePrimitives } from "../common/spaces/space_primitives.ts";
|
||||
import { ensureSettingsAndIndex } from "../common/util.ts";
|
||||
import { AssetBundle } from "../plugos/asset_bundle/bundle.ts";
|
||||
import { KvPrimitives } from "../plugos/lib/kv_primitives.ts";
|
||||
import { MemoryKvPrimitives } from "../plugos/lib/memory_kv_primitives.ts";
|
||||
import { System } from "../plugos/system.ts";
|
||||
import { BuiltinSettings } from "../web/types.ts";
|
||||
import { JWTIssuer } from "./crypto.ts";
|
||||
@ -17,17 +16,21 @@ import { determineStorageBackend } from "./storage_backend.ts";
|
||||
export type SpaceServerConfig = {
|
||||
hostname: string;
|
||||
namespace: string;
|
||||
auth?: string; // username:password
|
||||
// Enable username/password auth
|
||||
auth?: { user: string; pass: string };
|
||||
// Additional API auth token
|
||||
authToken?: string;
|
||||
pagesPath: string;
|
||||
};
|
||||
|
||||
export class SpaceServer {
|
||||
public pagesPath: string;
|
||||
auth?: string;
|
||||
auth?: { user: string; pass: string };
|
||||
authToken?: string;
|
||||
hostname: string;
|
||||
|
||||
private settings?: BuiltinSettings;
|
||||
spacePrimitives: SpacePrimitives;
|
||||
spacePrimitives!: SpacePrimitives;
|
||||
|
||||
jwtIssuer: JWTIssuer;
|
||||
|
||||
@ -38,20 +41,24 @@ export class SpaceServer {
|
||||
constructor(
|
||||
config: SpaceServerConfig,
|
||||
public shellBackend: ShellBackend,
|
||||
plugAssetBundle: AssetBundle,
|
||||
private kvPrimitives?: KvPrimitives,
|
||||
private plugAssetBundle: AssetBundle,
|
||||
private kvPrimitives: KvPrimitives,
|
||||
private syncOnly: boolean,
|
||||
) {
|
||||
this.pagesPath = config.pagesPath;
|
||||
this.hostname = config.hostname;
|
||||
this.auth = config.auth;
|
||||
this.jwtIssuer = new JWTIssuer(kvPrimitives || new MemoryKvPrimitives());
|
||||
this.authToken = config.authToken;
|
||||
this.jwtIssuer = new JWTIssuer(kvPrimitives);
|
||||
}
|
||||
|
||||
async init() {
|
||||
let fileFilterFn: (s: string) => boolean = () => true;
|
||||
|
||||
this.spacePrimitives = new FilteredSpacePrimitives(
|
||||
new AssetBundlePlugSpacePrimitives(
|
||||
determineStorageBackend(this.pagesPath),
|
||||
plugAssetBundle,
|
||||
await determineStorageBackend(this.kvPrimitives, this.pagesPath),
|
||||
this.plugAssetBundle,
|
||||
),
|
||||
(meta) => fileFilterFn(meta.name),
|
||||
async () => {
|
||||
@ -65,25 +72,27 @@ export class SpaceServer {
|
||||
);
|
||||
|
||||
// system = undefined in databaseless mode (no PlugOS instance on the server and no DB)
|
||||
if (kvPrimitives) {
|
||||
if (!this.syncOnly) {
|
||||
// Enable server-side processing
|
||||
const serverSystem = new ServerSystem(
|
||||
this.spacePrimitives,
|
||||
kvPrimitives,
|
||||
this.kvPrimitives,
|
||||
);
|
||||
this.serverSystem = serverSystem;
|
||||
}
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (this.auth) {
|
||||
// Initialize JWT issuer
|
||||
await this.jwtIssuer.init(this.auth);
|
||||
await this.jwtIssuer.init(
|
||||
JSON.stringify({ auth: this.auth, authToken: this.authToken }),
|
||||
);
|
||||
}
|
||||
|
||||
if (this.serverSystem) {
|
||||
await this.serverSystem.init();
|
||||
this.system = this.serverSystem.system;
|
||||
// Swap in the space primitives from the server system
|
||||
this.spacePrimitives = this.serverSystem.spacePrimitives;
|
||||
}
|
||||
|
||||
await this.reloadSettings();
|
||||
|
@ -39,7 +39,7 @@ const plugNameExtractRegex = /\/(.+)\.plug\.js$/;
|
||||
|
||||
export class ServerSystem {
|
||||
system!: System<SilverBulletHooks>;
|
||||
spacePrimitives!: SpacePrimitives;
|
||||
public spacePrimitives!: SpacePrimitives;
|
||||
// denoKv!: Deno.Kv;
|
||||
listInterval?: number;
|
||||
ds!: DataStore;
|
||||
|
@ -1,38 +0,0 @@
|
||||
import { S3SpacePrimitives } from "./s3_space_primitives.ts";
|
||||
import { assert, assertEquals } from "../../test_deps.ts";
|
||||
|
||||
Deno.test("s3_space_primitives", async () => {
|
||||
return;
|
||||
const options = {
|
||||
accessKey: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
||||
endPoint: "s3.eu-central-1.amazonaws.com",
|
||||
region: "eu-central-1",
|
||||
bucket: "zef-sb-space",
|
||||
prefix: "test",
|
||||
};
|
||||
|
||||
const primitives = new S3SpacePrimitives(options);
|
||||
console.log(await primitives.fetchFileList());
|
||||
console.log(
|
||||
await primitives.writeFile("test+'s.txt", stringToBytes("Hello world!")),
|
||||
);
|
||||
assertEquals(
|
||||
stringToBytes("Hello world!"),
|
||||
(await primitives.readFile("test+'s.txt")).data,
|
||||
);
|
||||
await primitives.deleteFile("test+'s.txt");
|
||||
|
||||
try {
|
||||
await primitives.getFileMeta("test+'s.txt");
|
||||
assert(false);
|
||||
} catch (e: any) {
|
||||
assertEquals(e.message, "Not found");
|
||||
}
|
||||
|
||||
// console.log(await primitives.readFile("SETTINGS.md", "utf8"));
|
||||
});
|
||||
|
||||
function stringToBytes(str: string): Uint8Array {
|
||||
return new TextEncoder().encode(str);
|
||||
}
|
@ -1,128 +0,0 @@
|
||||
// We're explicitly using 0.4.0 to be able to hijack the path encoding, which is inconsisently broken in 0.5.0
|
||||
import { S3Client } from "https://deno.land/x/s3_lite_client@0.4.0/mod.ts";
|
||||
import type { ClientOptions } from "https://deno.land/x/s3_lite_client@0.4.0/client.ts";
|
||||
import { SpacePrimitives } from "../../common/spaces/space_primitives.ts";
|
||||
import { mime } from "../deps.ts";
|
||||
import { FileMeta } from "$sb/types.ts";
|
||||
|
||||
// TODO: IMPORTANT: This needs a different way to keep meta data (last modified and created dates)
|
||||
|
||||
export type S3SpacePrimitivesOptions = ClientOptions & { prefix: string };
|
||||
|
||||
export class S3SpacePrimitives implements SpacePrimitives {
|
||||
client: S3Client;
|
||||
prefix: string;
|
||||
constructor(options: S3SpacePrimitivesOptions) {
|
||||
this.client = new S3Client(options);
|
||||
// TODO: Use this
|
||||
this.prefix = options.prefix;
|
||||
}
|
||||
|
||||
private encodePath(name: string): string {
|
||||
return uriEscapePath(name);
|
||||
}
|
||||
|
||||
private decodePath(encoded: string): string {
|
||||
// AWS only returns ' replace with '
|
||||
return encoded.replaceAll("'", "'");
|
||||
}
|
||||
|
||||
async fetchFileList(): Promise<FileMeta[]> {
|
||||
const allFiles: FileMeta[] = [];
|
||||
|
||||
for await (const obj of this.client.listObjects({ prefix: "" })) {
|
||||
allFiles.push({
|
||||
name: this.decodePath(obj.key),
|
||||
perm: "rw",
|
||||
created: 0,
|
||||
lastModified: obj.lastModified.getTime(),
|
||||
contentType: mime.getType(obj.key) || "application/octet-stream",
|
||||
size: obj.size,
|
||||
});
|
||||
}
|
||||
return allFiles;
|
||||
}
|
||||
|
||||
async readFile(
|
||||
name: string,
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
try {
|
||||
// console.log("Fetching object", encodeURI(name));
|
||||
const obj = await this.client.getObject(this.encodePath(name));
|
||||
|
||||
const contentType = mime.getType(name) || "application/octet-stream";
|
||||
const meta: FileMeta = {
|
||||
name,
|
||||
perm: "rw",
|
||||
created: 0,
|
||||
lastModified: new Date(obj.headers.get("Last-Modified")!).getTime(),
|
||||
contentType,
|
||||
size: parseInt(obj.headers.get("Content-Length")!),
|
||||
};
|
||||
|
||||
return {
|
||||
data: new Uint8Array(await obj.arrayBuffer()),
|
||||
meta,
|
||||
};
|
||||
} catch (e: any) {
|
||||
console.log("GOt error", e.message);
|
||||
|
||||
if (e.message.includes("does not exist")) {
|
||||
throw new Error(`Not found`);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
try {
|
||||
const stat = await this.client.statObject(this.encodePath(name));
|
||||
return {
|
||||
name,
|
||||
perm: "rw",
|
||||
// TODO: Created is not accurate
|
||||
created: 0,
|
||||
lastModified: new Date(stat.lastModified).getTime(),
|
||||
size: stat.size,
|
||||
contentType: mime.getType(name) || "application/octet-stream",
|
||||
};
|
||||
} catch (e: any) {
|
||||
if (e.message.includes("404")) {
|
||||
throw new Error(`Not found`);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
async writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
): Promise<FileMeta> {
|
||||
if (data.byteLength === 0) {
|
||||
// S3 doesn't like empty files, so we'll put a space in it. Not ideal, but it works. I hope.
|
||||
data = new TextEncoder().encode(" ");
|
||||
}
|
||||
await this.client.putObject(this.encodePath(name), data);
|
||||
// TODO: Dangerous due to eventual consistency? maybe check with etag or versionid?
|
||||
return this.getFileMeta(name);
|
||||
}
|
||||
async deleteFile(name: string): Promise<void> {
|
||||
await this.client.deleteObject(this.encodePath(name));
|
||||
}
|
||||
}
|
||||
|
||||
// Stolen from https://github.com/aws/aws-sdk-js/blob/master/lib/util.js
|
||||
|
||||
export function uriEscapePath(string: string): string {
|
||||
return string.split("/").map(uriEscape).join("/");
|
||||
}
|
||||
|
||||
function uriEscape(string: string): string {
|
||||
let output = encodeURIComponent(string);
|
||||
output = output.replace(/[^A-Za-z0-9_.~\-%]+/g, escape);
|
||||
|
||||
// AWS percent-encodes some extra non-standard characters in a URI
|
||||
output = output.replace(/[*]/g, function (ch) {
|
||||
return "%" + ch.charCodeAt(0).toString(16).toUpperCase();
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
@ -1,19 +1,52 @@
|
||||
import { DiskSpacePrimitives } from "../common/spaces/disk_space_primitives.ts";
|
||||
import { SpacePrimitives } from "../common/spaces/space_primitives.ts";
|
||||
import { path } from "./deps.ts";
|
||||
import { S3SpacePrimitives } from "./spaces/s3_space_primitives.ts";
|
||||
import { S3SpacePrimitives } from "../common/spaces/s3_space_primitives.ts";
|
||||
import { KvPrimitives } from "../plugos/lib/kv_primitives.ts";
|
||||
import { ChunkedKvStoreSpacePrimitives } from "../common/spaces/chunked_datastore_space_primitives.ts";
|
||||
import { HttpSpacePrimitives } from "../common/spaces/http_space_primitives.ts";
|
||||
|
||||
export function determineStorageBackend(folder: string): SpacePrimitives {
|
||||
if (folder === "s3://") {
|
||||
export async function determineStorageBackend(
|
||||
kvPrimitives: KvPrimitives,
|
||||
folder: string,
|
||||
): Promise<SpacePrimitives> {
|
||||
if (folder.startsWith("s3://")) {
|
||||
console.info("Using S3 as a storage backend");
|
||||
return new S3SpacePrimitives({
|
||||
accessKey: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
||||
endPoint: Deno.env.get("AWS_ENDPOINT")!,
|
||||
region: Deno.env.get("AWS_REGION")!,
|
||||
bucket: Deno.env.get("AWS_BUCKET")!,
|
||||
prefix: folder.slice(5),
|
||||
});
|
||||
let objectPrefix = folder.slice("s3://".length);
|
||||
if (objectPrefix !== "") {
|
||||
// Add a suffix /
|
||||
objectPrefix += "/";
|
||||
}
|
||||
const spacePrimitives = new S3SpacePrimitives(
|
||||
kvPrimitives,
|
||||
["meta"],
|
||||
objectPrefix,
|
||||
{
|
||||
accessKey: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
||||
endPoint: Deno.env.get("AWS_ENDPOINT")!,
|
||||
region: Deno.env.get("AWS_REGION")!,
|
||||
bucket: Deno.env.get("AWS_BUCKET")!,
|
||||
},
|
||||
);
|
||||
if (Deno.env.get("SB_S3_PERFORM_SYNC") === "true") {
|
||||
console.log("Performing S3 file list sync");
|
||||
await spacePrimitives.syncFileList();
|
||||
console.info("S3 file list sync complete");
|
||||
}
|
||||
return spacePrimitives;
|
||||
} else if (folder === "db://") {
|
||||
console.info(`Using the database as a storage backend`);
|
||||
return new ChunkedKvStoreSpacePrimitives(
|
||||
kvPrimitives,
|
||||
65536, // For DenoKV, this is the maximum size of a single value
|
||||
);
|
||||
} else if (folder.startsWith("http://") || folder.startsWith("https://")) {
|
||||
return new HttpSpacePrimitives(
|
||||
folder,
|
||||
undefined,
|
||||
Deno.env.get("SB_AUTH_TOKEN"),
|
||||
);
|
||||
} else {
|
||||
folder = path.resolve(Deno.cwd(), folder);
|
||||
console.info(`Using local disk as a storage backend: ${folder}`);
|
||||
|
@ -8,6 +8,7 @@ import { versionCommand } from "./cmd/version.ts";
|
||||
import { serveCommand } from "./cmd/server.ts";
|
||||
import { plugCompileCommand } from "./cmd/plug_compile.ts";
|
||||
import { plugRunCommand } from "./cmd/plug_run.ts";
|
||||
import { syncCommand } from "./cmd/sync.ts";
|
||||
|
||||
await new Command()
|
||||
.name("silverbullet")
|
||||
@ -28,10 +29,6 @@ await new Command()
|
||||
"--user <user:string>",
|
||||
"'username:password' combo for authentication",
|
||||
)
|
||||
.option(
|
||||
"--auth <auth.json:string>",
|
||||
"User authentication file to use for authentication",
|
||||
)
|
||||
.option(
|
||||
"--cert <certFile:string>",
|
||||
"Path to TLS certificate",
|
||||
@ -83,6 +80,18 @@ await new Command()
|
||||
// upgrade
|
||||
.command("upgrade", "Upgrade SilverBullet")
|
||||
.action(upgradeCommand)
|
||||
// sync
|
||||
.command("sync", "Synchronize two spaces")
|
||||
.option(
|
||||
"--snapshot <snapshot:string>",
|
||||
"Path to state file to use",
|
||||
)
|
||||
.option(
|
||||
"--wipe-secondary",
|
||||
"Wipe secondary and perform a full sync",
|
||||
)
|
||||
.arguments("<primary:string> <secondary:string>")
|
||||
.action(syncCommand)
|
||||
// version
|
||||
.command("version", "Get current version")
|
||||
.action(versionCommand)
|
||||
|
@ -58,7 +58,6 @@
|
||||
<h1>Login to <img src="/.client/logo.png" style="height: 1ch;" /> SilverBullet</h1>
|
||||
</header>
|
||||
<form action="/.auth" method="POST" id="login">
|
||||
<input type="hidden" name="csrf" value="" />
|
||||
<div class="error-message"></div>
|
||||
<div>
|
||||
<input type="text" name="username" id="username" autocomplete="off" autocorrect="off" autocapitalize="off"
|
||||
@ -80,42 +79,7 @@
|
||||
const error = params.get('error');
|
||||
if (error === "1") {
|
||||
document.querySelector('.error-message').innerText = "Invalid username or password";
|
||||
} else if (error === "2") {
|
||||
document.querySelector('.error-message').innerText = "Invalid CSRF token";
|
||||
}
|
||||
|
||||
// Generate CSRF token
|
||||
const csrf = generateCSRFToken();
|
||||
|
||||
// Inject CSRF token in form
|
||||
document.querySelector('input[name="csrf"]').value = csrf;
|
||||
|
||||
function generateRandomString(length) {
|
||||
const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
||||
let result = '';
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += characters.charAt(Math.floor(Math.random() * characters.length));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function generateCSRFToken() {
|
||||
// Generate random strings
|
||||
const randomPart1 = generateRandomString(16);
|
||||
const randomPart2 = generateRandomString(16);
|
||||
|
||||
// Create a timestamp for uniqueness
|
||||
const timestamp = new Date().getTime();
|
||||
|
||||
// Combine random strings and timestamp
|
||||
const csrfToken = randomPart1 + timestamp + randomPart2;
|
||||
|
||||
// Set cookie
|
||||
document.cookie = `csrf_token=${csrfToken}; SameSite=Lax; Secure`;
|
||||
|
||||
return csrfToken;
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
|
||||
|
@ -812,16 +812,17 @@ export class Client {
|
||||
}
|
||||
}
|
||||
|
||||
this.ui.viewDispatch({
|
||||
type: "page-loaded",
|
||||
meta: doc.meta,
|
||||
});
|
||||
|
||||
const editorState = createEditorState(
|
||||
this,
|
||||
pageName,
|
||||
doc.text,
|
||||
doc.meta.perm === "ro",
|
||||
);
|
||||
this.ui.viewDispatch({
|
||||
type: "page-loaded",
|
||||
meta: doc.meta,
|
||||
});
|
||||
editorView.setState(editorState);
|
||||
if (editorView.contentDOM) {
|
||||
this.tweakEditorDOM(editorView.contentDOM);
|
||||
|
22
web/space.ts
22
web/space.ts
@ -105,7 +105,7 @@ export class Space {
|
||||
}
|
||||
|
||||
async listPlugs(): Promise<FileMeta[]> {
|
||||
const files = await this.spacePrimitives.fetchFileList();
|
||||
const files = await this.deduplicatedFileList();
|
||||
return files
|
||||
.filter((fileMeta) =>
|
||||
fileMeta.name.startsWith(plugPrefix) &&
|
||||
@ -152,19 +152,35 @@ export class Space {
|
||||
}
|
||||
|
||||
async fetchPageList(): Promise<PageMeta[]> {
|
||||
return (await this.spacePrimitives.fetchFileList())
|
||||
return (await this.deduplicatedFileList())
|
||||
.filter(this.isListedPage)
|
||||
.map(fileMetaToPageMeta);
|
||||
}
|
||||
|
||||
async fetchAttachmentList(): Promise<AttachmentMeta[]> {
|
||||
return (await this.spacePrimitives.fetchFileList()).filter(
|
||||
return (await this.deduplicatedFileList()).filter(
|
||||
(fileMeta) =>
|
||||
!this.isListedPage(fileMeta) &&
|
||||
!fileMeta.name.endsWith(".plug.js"),
|
||||
);
|
||||
}
|
||||
|
||||
async deduplicatedFileList(): Promise<FileMeta[]> {
|
||||
const files = await this.spacePrimitives.fetchFileList();
|
||||
const fileMap = new Map<string, FileMeta>();
|
||||
for (const file of files) {
|
||||
if (fileMap.has(file.name)) {
|
||||
const existing = fileMap.get(file.name)!;
|
||||
if (existing.lastModified < file.lastModified) {
|
||||
fileMap.set(file.name, file);
|
||||
}
|
||||
} else {
|
||||
fileMap.set(file.name, file);
|
||||
}
|
||||
}
|
||||
return [...fileMap.values()];
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an attachment
|
||||
* @param name path of the attachment
|
||||
|
@ -4,14 +4,17 @@ release.
|
||||
---
|
||||
|
||||
## Next
|
||||
* Removed built-in multi-user [[Authentication]], `SB_AUTH` is no longer supported, use `--user` or `SB_USER` instead, or an authentication layer such as [[Authelia]]
|
||||
* Removed built-in multi-user [[Authentication]], `SB_AUTH` is no longer supported, use `--user` or `SB_USER` instead, or an authentication layer such as [[Authelia]].
|
||||
* Work on supporting multiple database as well as storage back-ends, reviving [[Install/Deno Deploy]] support.
|
||||
* This is now documented on the brand new [[Install/Configuration]] page.
|
||||
* A new `silverbullet sync` command to [[Sync]] spaces.
|
||||
* Technical refactoring in preparation of multi-tenant deployment support (allowing you to run a single SB instance and serve multiple spaces and users at the same time)
|
||||
* Lazy everything: plugs are now lazily loaded (after a first load, manifests are cached). On the server side, a whole lot of infrastructure is now only booted once the first HTTP request comes in
|
||||
|
||||
---
|
||||
|
||||
## 0.5.8
|
||||
* Various bugfixes, primarily related to the new way of running docker containers, which broke things for some people. Be sure to have a look at the new [[Install/Local$env|environment variable]] configuration options
|
||||
* Various bugfixes, primarily related to the new way of running docker containers, which broke things for some people. Be sure to have a look at the new [[Install/Configuration]] configuration options
|
||||
|
||||
---
|
||||
|
||||
|
@ -3,3 +3,4 @@ There’s a progressive path in how people tend to install and deploy SilverBull
|
||||
Instructions:
|
||||
* [[Install/Local]]: how to set up SilverBullet on your local machine
|
||||
* [[Install/Network and Internet]]: how to set up SilverBullet and expose it to your network or via the Internet
|
||||
* [[Install/Configuration]] of your instance
|
||||
|
97
website/Install/Configuration.md
Normal file
97
website/Install/Configuration.md
Normal file
@ -0,0 +1,97 @@
|
||||
SilverBullet is primarily configured via environment variables. This page gives a comprehensive overview of all configuration options. You can set these ad-hoc when running the SilverBullet server, or e.g. in your [[Install/Local$docker|docker-compose file]].
|
||||
|
||||
# Network
|
||||
$network
|
||||
|
||||
* `SB_HOSTNAME`: Set to the hostname to bind to (defaults to `127.0.0.0`, set to `0.0.0.0` to accept outside connections for the local deno setup, defaults to `0.0.0.0` for docker)
|
||||
* `SB_PORT`: Sets the port to listen to, e.g. `SB_PORT=1234`, default is `3000`
|
||||
|
||||
# Run mode
|
||||
$runmode
|
||||
|
||||
* `SB_SYNC_ONLY`: If you want to run SilverBullet in a mode where the server purely functions as a simple file store, and doesn’t index or process content on the server, you can do so by setting this environment variable to `true`. As a result, the client will always run in the Sync [[Client Modes|client mode]].
|
||||
|
||||
# Security
|
||||
$security
|
||||
|
||||
SilverBullet enables plugs to run shell commands. This is used by e.g. the [[🔌 Git]] plug to perform git commands. This is potentially unsafe. If you don’t need this you can disable this functionality:
|
||||
|
||||
* `SB_SHELL_BACKEND`: Enable/disable running of shell commands from plugs, defaults to `local` (enabled), set to `off` to disable. Only enabled when using a local folder for [[$storage]].
|
||||
|
||||
# Authentication
|
||||
$authentication
|
||||
SilverBullet supports basic authentication for a single user.
|
||||
|
||||
* `SB_USER`: Sets single-user credentials, e.g. `SB_USER=pete:1234` allows you to login with username “pete” and password “1234”.
|
||||
* `SB_AUTH_TOKEN`: Enables `Authorization: Bearer <token>` style authentication on the [[API]] (useful for [[Sync]] and remote HTTP storage back-ends).
|
||||
|
||||
# Storage
|
||||
$storage
|
||||
SilverBullet support multiple storage back-ends for keeping your [[Space]] content.
|
||||
|
||||
## Disk storage
|
||||
This is default and simplest back-end to use: a folder on disk. It is configured as follows:
|
||||
|
||||
* `SB_FOLDER`: Sets the folder to expose. In the docker container this defaults to `/space`.
|
||||
|
||||
## AWS S3 bucket storage
|
||||
It is also possible to use an S3 bucket as storage. For this, you need to create a bucket, create an IAM user and configure access to it appropriately.
|
||||
|
||||
Since S3 doesn’t support an efficient way to store custom meta data, this mode does require a [[$database]] configuration (see below) to keep all file meta data.
|
||||
|
||||
S3 is configured as follows:
|
||||
|
||||
* `SB_FOLDER`: Set to `s3://prefix`. `prefix` can be empty, but if set, this will prefix all files with `prefix/` to support multiple spaces being connected to a single bucket.
|
||||
* `AWS_ACCESS_KEY_ID`: an AWS access key with read/write permissions to the S3 bucket
|
||||
* `AWS_SECRET_ACCESS_KEY`: an AWS secret access key with read/write permissions to the S3 bucket
|
||||
* `AWS_BUCKET`: the name of the S3 bucket to use (e.g `my-sb-bucket`)
|
||||
* `AWS_ENDPOINT`: e.g. `s3.eu-central-1.amazonaws.com`
|
||||
* `AWS_REGION`: e.g. `eu-central-1`
|
||||
|
||||
## Database storage
|
||||
It is also possible to store space content in the [[$database]]. While not necessarily recommended, it is a viable way to setup a simple deployment of SilverBullet on e.g. [[Install/Deno Deploy]]. Large files will automatically be chunked to avoid limits the used database may have on value size.
|
||||
|
||||
This mode is configured as follows:
|
||||
|
||||
* `SB_FOLDER`: set to `db://`
|
||||
|
||||
The database configured via [[$database]] will be used.
|
||||
|
||||
## HTTP storage
|
||||
While not particularly useful stand-alone (primarily for [[Sync]]), it is possible to store space content on _another_ SilverBullet installation via its [[API]].
|
||||
|
||||
This mode is configured as follows:
|
||||
|
||||
* `SB_FOLDER`: set to the URL of the other SilverBullet server, e.g. `https://mynotes.mydomain.com`
|
||||
* `SB_AUTH_TOKEN`: matching the authorization token (configured via [[$authentication]] on the other end) to use for authorization.
|
||||
|
||||
# Database
|
||||
$database
|
||||
SilverBullet requires a database back-end to (potentially) keep various types of data:
|
||||
|
||||
* Indexes for e.g. [[Objects]]
|
||||
* Storing some encryption related secrets (for [[Authentication]])
|
||||
* Space content, when the “Database storage” storage back-end is used
|
||||
|
||||
Currently only two databases are supported: [Deno KV](https://deno.com/kv), and a dummy in-memory database.
|
||||
|
||||
## Deno KV database
|
||||
When self-hosting SilverBullet (that is: on any other server than on [[Install/Deno Deploy]]), KV uses a local SQLite file to keep data. This efficient and performant.
|
||||
|
||||
KV can be configured as follows:
|
||||
|
||||
* `SB_DB_BACKEND`: `denokv` (default, so can be omitted)
|
||||
* `SB_KV_DB`: path to the file name of the (SQLite) database to store data in, defaults to `.silverbullet.db` in the space’s folder (when kept on disk).
|
||||
|
||||
When SilverBullet runs on [[Install/Deno Deploy]] it automatically uses its cloud implementation of KV.
|
||||
|
||||
## Memory database
|
||||
The in-memory database is only useful for testing.
|
||||
|
||||
* `SB_DB_BACKEND`: `memory`
|
||||
|
||||
# Docker
|
||||
Configuration only relevant to docker deployments:
|
||||
|
||||
* `PUID`: Runs the server process with the specified UID (default: whatever user owns the `/space` mapped folder)
|
||||
* `GUID`: Runs the server process with the specified GID (default: whatever group owns the `/space` mapped folder)
|
@ -1,33 +1,34 @@
|
||||
**Note:** This is a highly experimental setup, take this into account.
|
||||
> **warning** Experimental
|
||||
> This setup is not battle tested, use at your own risk
|
||||
|
||||
You can deploy SilverBullet to [Deno Deploy](https://deno.com/deploy) for free, and store your data (space) in an S3 bucket.
|
||||
You can deploy SilverBullet to [Deno Deploy](https://deno.com/deploy) for free, and store space content in [Deno KV](https://deno.com/kv).
|
||||
|
||||
This guide assumes you know how to set up the S3 bucket part and get appropriate IAM keys and secrets to access it.
|
||||
# Steps
|
||||
Sign up for a (free) [Deno Deploy account](https://dash.deno.com/projects) and “Create an empty project” there.
|
||||
|
||||
For the Deno Deploy side:
|
||||
Jump to the “Settings”, give your project a nicer name and configure the following environment variables:
|
||||
|
||||
Sign up for a (free) [Deno Deploy account](https://dash.deno.com/projects) and create a project there.
|
||||
* `SB_FOLDER`: `db://`
|
||||
* `SB_PORT`: `8000`
|
||||
* `SB_SYNC_ONLY`: `1` (Deno Deploy does not currently supports Workers, so running indexing etc. on the server will not work)
|
||||
* `SB_USER`: (e.g. `pete:letmein`) — this is **super important** otherwise your space will be open to anybody without any authentication
|
||||
* `SB_AUTH_TOKEN`: (Optional) If you would like to migrate existing content from elsewhere (e.g. a local folder) using [[Sync]], you will want to configure an authentication token here (pick something secure).
|
||||
|
||||
Set these environment variables in the project:
|
||||
Make sure you have [installed Deno locally](https://docs.deno.com/runtime/manual/getting_started/installation) on your machine.
|
||||
|
||||
* AWS_ACCESS_KEY_ID
|
||||
* AWS_SECRET_ACCESS_KEY
|
||||
* AWS_BUCKET (e.g `my-sb-bucket`)
|
||||
* AWS_ENDPOINT (e.g. `s3.eu-central-1.amazonaws.com`)
|
||||
* AWS_REGION (e.g. `eu-central-1`)
|
||||
* SB_FOLDER (should be `s3://`)
|
||||
* SB_PORT (should be `8000`)
|
||||
* SB_USER (e.g. `pete:letmein`) — this is **super important** otherwise your space will be open without any authentication
|
||||
|
||||
In your local environment set `DENO_DEPLOY_TOKEN` to your account’s [deploy token](https://dash.deno.com/account#access-tokens).
|
||||
|
||||
Install [deployctl](https://deno.com/deploy/docs/deployctl).
|
||||
|
||||
Then run:
|
||||
Then, install `deployctl` via:
|
||||
|
||||
```shell
|
||||
|
||||
deployctl deploy --prod --include= -p your-project https://silverbullet.md/silverbullet.js
|
||||
$ deno install -Arf https://deno.land/x/deploy/deployctl.ts
|
||||
```
|
||||
|
||||
And that’s it!
|
||||
To deploy, run:
|
||||
|
||||
```shell
|
||||
deployctl deploy -p=your-project --entrypoint=https://silverbullet.md/silverbullet.js --include= --prod
|
||||
```
|
||||
|
||||
# Migrating and backing up content
|
||||
If you want to migrate content _from_ or _to_ your new Deploy-based space, you can use [[Sync]]. For this be sure to also configure a `SB_AUTH_TOKEN` variable.
|
||||
|
||||
For backup purposes, it may be wise to synchronize your content regularly this way.
|
||||
|
@ -7,6 +7,8 @@ You have two options here:
|
||||
1. Installation via [[$deno|Deno]] (the awesome JavaScript runtime)
|
||||
2. Installation via [[$docker|Docker]] (the awesome container runtime)
|
||||
|
||||
After choose either, be sure to checkout all [[Install/Configuration]] options as well.
|
||||
|
||||
# Installing using Deno
|
||||
$deno
|
||||
This consists of two steps (unless [Deno](https://deno.com/) is already installed — in which case we’re down to one):
|
||||
@ -126,19 +128,3 @@ docker-compose logs -f
|
||||
|
||||
## Building the docker image
|
||||
To build your own version of the docker image, run `./scripts/build_docker.sh`.
|
||||
|
||||
# Configuration
|
||||
SilverBullet is partially configured via flags (run it with `--help`) or alternatively via environment variables and partially via a [[SETTINGS]] page in your space.
|
||||
|
||||
## Environment variables
|
||||
$env
|
||||
You can configure SB with environment variables instead of flags, which is probably what you want to do in a docker setup. The following environment variables are supported:
|
||||
|
||||
* `PID`: Runs the server process with the specified UID (default: whatever user owns the `/space` mapped folder)
|
||||
* `GID`: Runs the server process with the specified GID (default: whatever group owns the `/space` mapped folder)
|
||||
* `SB_USER`: Sets single-user credentials (like `--user`), e.g. `SB_USER=pete:1234`
|
||||
* `SB_HOSTNAME`: Set to the hostname to bind to (defaults to `127.0.0.0`, set to `0.0.0.0` to accept outside connections)
|
||||
* `SB_PORT`: Sets the port to listen to, e.g. `SB_PORT=1234`
|
||||
* `SB_FOLDER`: Sets the folder to expose, e.g. `SB_FOLDER=/space`
|
||||
* `SB_SHELL_BACKEND`: Enable/disable running of shell commands from plugs, defaults to "local" (enabled), set to "off" to disable
|
||||
* `SB_SYNC_ONLY`: Runs the server in a "dumb" space store-only mode (not indexing content or keeping other state), e.g. `SB_SYNC_ONLY=1`. This will disable the Online [[Client Modes]] altogether (and not even show the sync icon in the top bar). Conceptually, [silverbullet.md](https://silverbullet.md) runs in this mode.
|
||||
|
@ -31,16 +31,9 @@ spaceIgnore: |
|
||||
# Plug overrides allow you to override any property in a plug manifest at runtime
|
||||
# The primary use case of this is to override or define keyboard shortcuts. You can use the . notation, to quickly "dive deep" into the structure
|
||||
plugOverrides:
|
||||
core:
|
||||
editor:
|
||||
# Matching this YAML structure:
|
||||
# https://github.com/silverbulletmd/silverbullet/blob/main/plugs/editor/editor.plug.yaml
|
||||
# and overriding the "key" for centering the cursor
|
||||
functions.centerCursor.command.key: Ctrl-Alt-p
|
||||
# However, it's even possible to define custom slash commands this way without building a plug (/today-header in this case):
|
||||
functions.todayHeader:
|
||||
redirect: insertTemplateText
|
||||
slashCommand:
|
||||
name: today-header
|
||||
value: |
|
||||
## {{today}}
|
||||
```
|
||||
|
@ -2,12 +2,12 @@ A _space_ is SilverBullet terminology for a workspace, or project. [Obsidian](ht
|
||||
|
||||
Feel free to back-up or manipulate your space;s folder and its files with whatever tool you like — you don’t have to use SilverBullet exclusively. You may want to turn your space’s folder into a git repository, for instance, and do version control and back-ups that way — in which case you may appreciated the [[🔌 Git]] plug.
|
||||
|
||||
## Pages
|
||||
# Pages
|
||||
All pages in your space are stored as files with a `.md` file extension. While _folders_ are not really a first-class concept in SilverBullet, you can create pages with a `/` in their name, which will (under the hood) automatically create a folder structure to match it. So creating a page named `people/John` will ensure a `people` folder in your space’s root, and put a `John.md` file in it.
|
||||
|
||||
## Attachments
|
||||
# Attachments
|
||||
Attachments are kept alongside the rest of your files with whatever file extension they have (e.g. `.jpg` for images, or `.pdf` for PDFs).
|
||||
|
||||
## Folder Lay-out
|
||||
# Folder lay-out
|
||||
Every space in SilverBullet at the very least has an index page (by default named `index.md`) and a SETTINGS page (named `SETTINGS.md`). If you install custom [[🔌 Plugs]], these will be kept under `_plug` in your space’s folder.
|
||||
|
||||
|
41
website/Sync.md
Normal file
41
website/Sync.md
Normal file
@ -0,0 +1,41 @@
|
||||
The SilverBullet CLI has a `sync` command that can be used to synchronize local as well as remote [[Space|spaces]]. This can be useful when migrating between different [[Install/Configuration$storage|storage implementations]]. It can also be used to back up content elsewhere. Under the hood this sync mechanism uses the exact same sync engine used for the Sync [[Client Modes]].
|
||||
|
||||
# Use cases
|
||||
* **Migration**: you hosted SilverBullet on your local device until now, but have since set up an instance via [[Install/Deno Deploy]] and want to migrate your content there.
|
||||
* **Backup**: you host SilverBullet on a remote server, but would like to make backups elsewhere from time to time.
|
||||
|
||||
# Setup
|
||||
To use `silverbullet sync` you need a [[Install/Local$deno|local deno installation of SilverBullet]].
|
||||
|
||||
# General use
|
||||
To perform a sync between two locations:
|
||||
|
||||
```shell
|
||||
silverbullet sync --snapshot snapshot.json <primaryPath> <secondaryPath>
|
||||
```
|
||||
|
||||
Where both `primaryPath` and `secondaryPath` can use any [[Install/Configuration$storage]] configuration.
|
||||
|
||||
The `--snapshot` argument is optional, when set it will read/write a snapshot to the given location. This snapshot will be used to speed up future synchronizations.
|
||||
|
||||
To synchronize two local folders (named `testspace1` and `testspace2`) (not particularly useful, you may as well use `cp` or `rsync`):
|
||||
|
||||
```
|
||||
silverbullet sync --snapshot snapshot.json testspace testspace2
|
||||
```
|
||||
|
||||
# Migrate
|
||||
To synchronize a local folder (the current directory `.`) to a remote server (located at `https://notes.myserver.com`) for which you have setup an [[Install/Configuration$authentication|auth token]] using the `SB_AUTH_TOKEN` environment variable of `1234`:
|
||||
|
||||
```shell
|
||||
SB_AUTH_TOKEN=1234 silverbullet sync . https://notes.myserver.com
|
||||
```
|
||||
|
||||
If you want to perform a “wipe sync”, wiping the destination (secondary) before uploading all files from the primary path there, you can use the `--wipe-secondary` flag. You will be asked for confirmation:
|
||||
|
||||
```shell
|
||||
SB_AUTH_TOKEN=1234 silverbullet sync --wipe-secondary . https://notes.myserver.com
|
||||
```
|
||||
|
||||
# Backup
|
||||
To perform a backup, you may simply run the `sync` commands mentioned above regularly. Be sure to always specify the `--snapshot` flag in this case, and be sure to actually backup your local copy, e.g. using git.
|
Loading…
Reference in New Issue
Block a user