1
0

Migrate to Deno (#86)

Big bang migration to Deno 🤯
This commit is contained in:
Zef Hemel 2022-10-10 14:50:21 +02:00 committed by GitHub
parent 78f83c70d8
commit 561aa6891f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
287 changed files with 4577 additions and 25087 deletions

12
.gitignore vendored
View File

@ -1,14 +1,10 @@
pages
test_space
.DS_Store
node_modules
.parcel-cache
dist_bundle
dist
build
generated
.yarnrc.yml
*.test.js
*.js.map
.vscode
website_build
data.db
/index.json
/index.json
.idea

9
.gitpod.Dockerfile vendored
View File

@ -1,7 +1,6 @@
FROM gitpod/workspace-full:latest
RUN bash -c 'VERSION="18" \
&& source $HOME/.nvm/nvm.sh && nvm install $VERSION \
&& nvm use $VERSION && nvm alias default $VERSION'
RUN echo "nvm use default &>/dev/null" >> ~/.bashrc.d/51-nvm-fix
RUN curl -fsSL https://deno.land/x/install/install.sh | sh
RUN /home/gitpod/.deno/bin/deno completions bash > /home/gitpod/.bashrc.d/90-deno && \
echo 'export DENO_INSTALL="/home/gitpod/.deno"' >> /home/gitpod/.bashrc.d/90-deno && \
echo 'export PATH="$DENO_INSTALL/bin:$PATH"' >> /home/gitpod/.bashrc.d/90-deno

View File

@ -1,3 +1,4 @@
image:
file: .gitpod.Dockerfile
@ -15,17 +16,21 @@ github:
tasks:
- name: Setup
init: |
npm install
npm run clean-build
deno task install
deno task build
gp sync-done setup
- name: Run Silverbullet server
- name: Server watcher
init: |
gp sync-await setup
mkdir pages
command: npm run server -- ./pages
- name: Run ParcelJS
command: deno task watch-server -- pages
- name: Web watcher
init: gp sync-await setup
command: npm run watch
- name: Build plugins
command: deno task watch-web
- name: Plug watcher
init: gp sync-await setup
command: npm run plugs
command: deno task watch-plugs
vscode:
extensions:
- denoland.vscode-deno

1
.nvmrc
View File

@ -1 +0,0 @@
18

View File

@ -1,6 +0,0 @@
{
"extends": "@parcel/config-default",
"validators": {
"*.{ts,tsx}": ["@parcel/validator-typescript"]
}
}

7
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,7 @@
{
"editor.formatOnSave": true,
"typescriptHero.imports.stringQuoteStyle": "\"",
"deno.enable": true,
"deno.importMap": "import_map.json",
"deno.config": "deno.jsonc"
}

112
README.md
View File

@ -1,78 +1,120 @@
# Silver Bullet
Silver Bullet (SB) is an extensible, open source **personal knowledge platform**. At its core its a clean markdown-based writing/note taking application that stores your _pages_ (notes) as plain markdown files in a folder referred to as a _space_. Pages can be cross-linked using the `[[link to other page]]` syntax. This makes it a simple tool for [Personal Knowledge Management](https://en.wikipedia.org/wiki/Personal_knowledge_management). However, once you leverage its various extensions (called _plugs_) it can feel more like a _knowledge platform_, allowing you to annotate, combine and query your accumulated knowledge in creative ways specific to you.
Silver Bullet (SB) is an extensible, open source **personal knowledge
platform**. At its core its a clean markdown-based writing/note taking
application that stores your _pages_ (notes) as plain markdown files in a folder
referred to as a _space_. Pages can be cross-linked using the
`[[link to other page]]` syntax. This makes it a simple tool for
[Personal Knowledge Management](https://en.wikipedia.org/wiki/Personal_knowledge_management).
However, once you leverage its various extensions (called _plugs_) it can feel
more like a _knowledge platform_, allowing you to annotate, combine and query
your accumulated knowledge in creative ways specific to you.
<img src="https://github.com/silverbulletmd/silverbullet/raw/main/images/silverbullet-pwa.png" height="400"/><img src="https://github.com/silverbulletmd/silverbullet/raw/main/images/silverbullet-ios.png" height="400"/>
For more in-depth information, an interactive demo, and links to more background, check out the [Silver Bullet website](https://silverbullet.md) (published from this repos `website/` folder).
For more in-depth information, an interactive demo, and links to more
background, check out the [Silver Bullet website](https://silverbullet.md)
(published from this repos `website/` folder).
Or checkout these two videos:
* [A Tour of some of Silver Bullets features](https://youtu.be/RYdc3UF9gok) — spoiler alert: its cool.
* [A look the SilverBullet architecture](https://youtu.be/mXCGau05p5o) — spoiler alert: its plugs all the way down.
- [A Tour of some of Silver Bullets features](https://youtu.be/RYdc3UF9gok) —
spoiler alert: its cool.
- [A look the SilverBullet architecture](https://youtu.be/mXCGau05p5o) — spoiler
alert: its plugs all the way down.
## Features
* **Free and open source**. Silver Bullet is MIT licensed.
* **The truth is in the markdown.** Silver Bullet doesnt use proprietary file formats. It keeps it data as plain markdown files on disk. While SB uses a database for indexing and caching some indexes, all of that can be rebuilt from its markdown source at any time. If SB would ever go away, you can still read your pages with any text editor.
* **The truth is in the markdown.** Silver Bullet doesnt use proprietary file formats. It keeps its data as plain markdown files on disk. While SB uses a database for indexing and caching some indexes, all of that can be rebuilt from its markdown source at any time. If SB would ever go away, you can still read your pages with any text editor.
* **One single, distraction free mode.** SB doesnt have a separate view and edit mode. It doesnt have a “focus mode.” Youre always in focused edit mode, why wouldnt you?
* **Keyboard oriented**. You can use SB fully using the keyboard, typin the keys.
* **Extend it your way**. SB is highly extensible with [plugs](https://silverbullet.md/🔌_Plugs), and you can customize it to your liking and your workflows.
## Installing Silver Bullet
To install Silver Bullet, you will need a recent version of [node.js installed](https://nodejs.org/en/) (16+) installed. Silver Bullet has only been tested on MacOS and Linux thus far. It may run on Windows as well, let me know if it does.
To install and run SB, create a folder for your pages (it can be empty, or be an existing folder with `.md` files) and run the following command in your terminal:
To run Silver Bullet create a folder for your pages (it can be empty, or be an
existing folder with `.md` files) and run the following command in your
terminal:
npx @silverbulletmd/server <path-to-folder>
deno run -A --unstable https://get.silverbullet.md <pages-path>
This will do one of three things:
However, because this command is not super easy to remember, you may install it
as well:
1. If you _dont have_ SB installed, it will download and run the latest version.
2. If you _already have_ SB installed, but there is a newer version available, it will offer to upgrade. Say yes!
3. If you _already have the latest and greatest_ SB installed, it will just run it.
deno install -f --name silverbullet -A --unstable https://get.silverbullet.md
By default, SB will bind to port `3000`, to use a different port use the `--port` flag. By default SB doesnt offer any sort of authentication, to add basic password authentication, pass the `--password` flag.
This will create a `silverbullet` (feel free to replace `silverbullet` in this
command with whatever you like) alias in your `~/.deno/bin` folder. Make sure
this path is in your `PATH` environment variable.
Once downloaded and booted, SB will print out a URL to open SB in your browser (spoiler alert: by default this will be http://localhost:3000 ).
This allows you to install Silver Bullet simply as follows:
#protip: If you have a PWA enabled browser (like any browser based on Chromium) hit that little button right of the location bar to install SB, and give it its own window frame (sans location bar) and desktop/dock icon. At last the PWA has found its killer app.
silverbullet <pages-path>
By default, SB will bind to port `3000`, to use a different port use the
`--port` flag. By default SB doesnt offer any sort of authentication, to add
basic password authentication, pass the `--password` flag.
Once downloaded and booted, SB will print out a URL to open SB in your browser
(spoiler alert: by default this will be http://localhost:3000 ).
#protip: If you have a PWA enabled browser (like any browser based on Chromium)
hit that little button right of the location bar to install SB, and give it its
own window frame (sans location bar) and desktop/dock icon. At last the PWA has
found its killer app.
## Upgrading Silver Bullet
Simply run this:
deno cache --reload https://get.silverbullet.md
And restart Silver Bullet. You should be good to go.
## Developing Silver Bullet
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/silverbulletmd/silverbullet)
Silver Bullet is written in [TypeScript](https://www.typescriptlang.org/) and built on top of the excellent [CodeMirror 6](https://codemirror.net/) editor component. Additional UI is built using React.js. [ParcelJS](https://parceljs.org/) is used to build both the front-end and back-end bundles. The server backend runs as a HTTP server on node.js using express.
Silver Bullet is written in [TypeScript](https://www.typescriptlang.org/) and
built on top of the excellent [CodeMirror 6](https://codemirror.net/) editor
component. Additional UI is built using React.js.
[ParcelJS](https://parceljs.org/) is used to build both the front-end and
back-end bundles. The server backend runs as a HTTP server on node.js using
express.
This repo is a monorepo using npm's "workspaces" feature. It consists of a number of npm packages under `packages`.
Requirements: node 16+ and npm 8+ as well as C/C++ compilers (for compiling SQLite, on debian/ubuntu style systems you get these via the `build-essential` package).
After cloning the repo, run the following commands to do an initial build:
After cloning the repo, run the following command to install some convenience
scripts (`silverbullet` and `plugos-bundle` into your `~/.deno/bin`):
```shell
npm install
npm run clean-build
deno task install
```
You can then run the server in “watch mode” (automatically restarting when you change source files) with:
To prepare the initial web and plug build run:
```shell
npm run server -- <PATH-TO-YOUR-SPACE>
deno task build
```
`<PATH-TO-YOUR-SPACE>` can be any folder with markdown files (or an empty folder).
After this initial build, I generally run three commands in parallel (in separate terminals):
You can then run the server in “watch mode” (automatically restarting when you
change source files) with:
```shell
# Runs ParcelJS in watch mode, rebuilding the server and webapp continuously on change
npm run watch
# Runs the silverbullet server, restarting when changes are detected
npm run server -- <PATH-TO-YOUR-SPACE>
# Builds (and watches for changes) all builtin plugs (in packages/plugs), still requires you to run Cmd-Shift-p (Mac) or Ctrl-Shift-p (Linux, Windows) in SB to reload these plugs
npm run plugs
deno task watch-server -- <PATH-TO-YOUR-SPACE>
```
After this initial build, it's convenient to run three commands in parallel (in
separate terminals):
```shell
deno task watch-web
deno task watch-server -- <PATH-TO-YOUR-SPACE>
deno task watch-plugs
```
## Feedback
If you (hypothetically) find bugs or have feature requests, post them in [our issue tracker](https://github.com/silverbulletmd/silverbullet/issues). Would you like to contribute? [Check out the code](https://github.com/silverbulletmd/silverbullet), and the issue tracker as well for ideas on what to work on.
If you (hypothetically) find bugs or have feature requests, post them in
[our issue tracker](https://github.com/silverbulletmd/silverbullet/issues).
Would you like to contribute?
[Check out the code](https://github.com/silverbulletmd/silverbullet), and the
issue tracker as well for ideas on what to work on.

110
build.ts Normal file
View File

@ -0,0 +1,110 @@
// -- esbuild --
// @deno-types="https://deno.land/x/esbuild@v0.14.54/mod.d.ts"
import * as esbuildWasm from "https://deno.land/x/esbuild@v0.14.54/wasm.js";
import * as esbuildNative from "https://deno.land/x/esbuild@v0.14.54/mod.js";
import { denoPlugin } from "./esbuild_deno_loader/mod.ts";
import { copy } from "https://deno.land/std@0.158.0/fs/copy.ts";
import sass from "https://deno.land/x/denosass@1.0.4/mod.ts";
import { bundleFolder } from "./plugos/asset_bundle.ts";
import { patchDenoLibJS } from "./common/hack.ts";
import { bundle as plugOsBundle } from "./plugos/bin/plugos-bundle.ts";
import * as flags from "https://deno.land/std@0.158.0/flags/mod.ts";
// @ts-ignore trust me
const esbuild: typeof esbuildWasm = Deno.run === undefined
? esbuildWasm
: esbuildNative;
async function prepareAssets(dist: string) {
await copy("web/fonts", `${dist}/web`, { overwrite: true });
await copy("web/index.html", `${dist}/web/index.html`, {
overwrite: true,
});
await copy("web/images/favicon.gif", `${dist}/web/favicon.gif`, {
overwrite: true,
});
await copy("web/images/logo.png", `${dist}/web/logo.png`, {
overwrite: true,
});
await copy("web/manifest.json", `${dist}/web/manifest.json`, {
overwrite: true,
});
const compiler = sass(
Deno.readTextFileSync("web/styles/main.scss"),
{
load_paths: ["web/styles"],
},
);
await Deno.writeTextFile(
`${dist}/web/main.css`,
compiler.to_string("expanded") as string,
);
const globalManifest = await plugOsBundle(
new URL(`./plugs/global.plug.yaml`, import.meta.url).pathname,
);
await Deno.writeTextFile(
`${dist}/web/global.plug.json`,
JSON.stringify(globalManifest, null, 2),
);
// HACK: Patch the JS by removing an invalid regex
let bundleJs = await Deno.readTextFile(`${dist}/web/client.js`);
bundleJs = patchDenoLibJS(bundleJs);
await Deno.writeTextFile(`${dist}/web/client.js`, bundleJs);
await bundleFolder(dist, "dist/asset_bundle.json");
}
async function bundle(watch: boolean): Promise<void> {
await Promise.all([
esbuild.build({
entryPoints: {
client: "web/boot.ts",
worker: "plugos/environments/sandbox_worker.ts",
service_worker: "web/service_worker.ts",
},
outdir: "./dist_bundle/web",
absWorkingDir: Deno.cwd(),
bundle: true,
treeShaking: true,
sourcemap: "linked",
minify: true,
jsxFactory: "h",
jsx: "automatic",
jsxFragment: "Fragment",
jsxImportSource: "https://esm.sh/preact@10.11.1",
watch: watch && {
onRebuild(error) {
if (error) {
console.error("watch build failed:", error);
} else {
console.log("watch build succeeded.");
}
prepareAssets("dist_bundle").catch(console.error);
},
},
plugins: [
denoPlugin({
importMapURL: new URL("./import_map.json", import.meta.url),
}),
],
}),
]);
await prepareAssets("dist_bundle");
console.log("Built!");
}
const args = flags.parse(Deno.args, {
boolean: ["watch"],
alias: { w: "watch" },
default: {
watch: false,
},
});
await bundle(args.watch);
if (!args.watch) {
esbuild.stop();
}

3
build_plugs.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
plugos-bundle --dist dist_bundle/_plug $1 --exclude=https://esm.sh/handlebars,https://deno.land/std/encoding/yaml.ts,https://esm.sh/@lezer/lr plugs/*/*.plug.yaml

View File

@ -1,4 +1,4 @@
import { Tag } from "@lezer/highlight";
import { Tag } from "./deps.ts";
export const WikiLinkTag = Tag.define();
export const WikiLinkPageTag = Tag.define();

95
common/deps.ts Normal file
View File

@ -0,0 +1,95 @@
export {
autocompletion,
CompletionContext,
completionKeymap,
} from "@codemirror/autocomplete";
export type { Completion, CompletionResult } from "@codemirror/autocomplete";
export {
history,
historyKeymap,
indentWithTab,
standardKeymap,
} from "@codemirror/commands";
export {
closeBrackets,
closeBracketsKeymap,
} from "https://esm.sh/@codemirror/autocomplete@6.3.0?external=@codemirror/state,@codemirror/commands,@lezer/common,@codemirror/view";
export { styleTags, Tag, tagHighlighter, tags } from "@lezer/highlight";
export * as YAML from "https://deno.land/std@0.158.0/encoding/yaml.ts";
export * as path from "https://deno.land/std@0.158.0/path/mod.ts";
export { readAll } from "https://deno.land/std@0.158.0/streams/conversion.ts";
export type {
BlockContext,
LeafBlock,
LeafBlockParser,
MarkdownConfig,
MarkdownExtension,
} from "@lezer/markdown";
export {
Emoji,
GFM,
MarkdownParser,
parseCode,
parser as baseParser,
Subscript,
Superscript,
Table,
TaskList,
} from "@lezer/markdown";
export type { SyntaxNode, Tree } from "@lezer/common";
export { searchKeymap } from "https://esm.sh/@codemirror/search@6.2.1?external=@codemirror/state,@codemirror/view";
export {
Decoration,
drawSelection,
dropCursor,
EditorView,
highlightSpecialChars,
keymap,
runScopeHandlers,
ViewPlugin,
ViewUpdate,
WidgetType,
} from "@codemirror/view";
export type { DecorationSet, KeyBinding } from "@codemirror/view";
export { markdown } from "https://esm.sh/@codemirror/lang-markdown@6.0.1?external=@codemirror/state,@lezer/common,@codemirror/language,@lezer/markdown,@codemirror/view,@lezer/highlight";
export {
EditorSelection,
EditorState,
Range,
SelectionRange,
Text,
Transaction,
} from "@codemirror/state";
export type { ChangeSpec, StateCommand } from "@codemirror/state";
export {
defaultHighlightStyle,
defineLanguageFacet,
foldNodeProp,
HighlightStyle,
indentNodeProp,
indentOnInput,
Language,
languageDataProp,
LanguageDescription,
LanguageSupport,
ParseContext,
StreamLanguage,
syntaxHighlighting,
syntaxTree,
} from "@codemirror/language";
export { yaml as yamlLanguage } from "https://esm.sh/@codemirror/legacy-modes@6.1.0/mode/yaml?external=@codemirror/language";
export {
javascriptLanguage,
typescriptLanguage,
} from "https://esm.sh/@codemirror/lang-javascript@6.1.0?external=@codemirror/language,@codemirror/autocomplete,@codemirror/view,@codemirror/state,@codemirror/lint,@lezer/common,@lezer/lr,@lezer/javascript,@codemirror/commands";

4
common/hack.ts Normal file
View File

@ -0,0 +1,4 @@
export function patchDenoLibJS(code: string): string {
// The Deno std lib has one occurence of a regex that Webkit JS doesn't (yet parse), we'll strip it because it's likely never invoked anyway, YOLO
return code.replaceAll("/(?<=\\n)/", "/()/");
}

28
common/manifest.ts Normal file
View File

@ -0,0 +1,28 @@
import * as plugos from "../plugos/types.ts";
import { EndpointHookT } from "../plugos/hooks/endpoint.ts";
import { CronHookT } from "../plugos/hooks/cron.deno.ts";
import { EventHookT } from "../plugos/hooks/event.ts";
import { CommandHookT } from "../web/hooks/command.ts";
import { SlashCommandHookT } from "../web/hooks/slash_command.ts";
import { PageNamespaceHookT } from "../server/hooks/page_namespace.ts";
export type SilverBulletHooks =
& CommandHookT
& SlashCommandHookT
& EndpointHookT
& CronHookT
& EventHookT
& PageNamespaceHookT;
export type SyntaxExtensions = {
syntax?: { [key: string]: NodeDef };
};
export type NodeDef = {
firstCharacters: string[];
regex: string;
styles: { [key: string]: string };
className?: string;
};
export type Manifest = plugos.Manifest<SilverBulletHooks> & SyntaxExtensions;

View File

@ -1,7 +1,7 @@
import { Tag } from "@lezer/highlight";
import type { MarkdownConfig } from "@lezer/markdown";
import { System } from "@plugos/plugos/system";
import { Manifest } from "@silverbulletmd/common/manifest";
import { Tag } from "./deps.ts";
import type { MarkdownConfig } from "./deps.ts";
import { System } from "../plugos/system.ts";
import { Manifest } from "./manifest.ts";
export type MDExt = {
// unicode char code for efficiency .charCodeAt(0)

View File

@ -1,11 +1,11 @@
import type { SyntaxNode } from "@lezer/common";
import type { Language } from "@codemirror/language";
import { ParseTree } from "./tree";
import { ParseTree } from "./tree.ts";
import type { SyntaxNode } from "./deps.ts";
import type { Language } from "./deps.ts";
export function lezerToParseTree(
text: string,
n: SyntaxNode,
offset = 0
offset = 0,
): ParseTree {
let children: ParseTree[] = [];
let nodeText: string | undefined;

View File

@ -1,31 +1,24 @@
import { styleTags, tags as t } from "@lezer/highlight";
import {
BlockContext,
LeafBlock,
LeafBlockParser,
MarkdownConfig,
parseCode,
Table,
TaskList,
} from "@lezer/markdown";
import { markdown } from "./markdown";
import * as ct from "./customtags";
import {
Language,
LanguageDescription,
LanguageSupport,
} from "@codemirror/language";
import { StreamLanguage } from "@codemirror/language";
import { yaml } from "@codemirror/legacy-modes/mode/yaml";
import {
javascriptLanguage,
typescriptLanguage,
} from "@codemirror/lang-javascript";
LeafBlock,
LeafBlockParser,
markdown,
MarkdownConfig,
parseCode,
styleTags,
Table,
tags as t,
TaskList,
} from "./deps.ts";
import * as ct from "./customtags.ts";
import {
MDExt,
mdExtensionStyleTags,
mdExtensionSyntaxConfig,
} from "./markdown_ext";
} from "./markdown_ext.ts";
export const pageLinkRegex = /^\[\[([^\]]+)\]\]/;
@ -45,7 +38,7 @@ const WikiLink: MarkdownConfig = {
return cx.addElement(
cx.elt("WikiLink", pos, pos + match[0].length, [
cx.elt("WikiLinkPage", pos + 2, pos + match[0].length - 2),
])
]),
);
},
after: "Emphasis",
@ -89,7 +82,7 @@ class CommentParser implements LeafBlockParser {
cx.elt("Comment", leaf.start, leaf.start + leaf.content.length, [
// cx.elt("CommentMarker", leaf.start, leaf.start + 3),
...cx.parser.parseInline(leaf.content.slice(3), leaf.start + 3),
])
]),
);
return true;
}

View File

@ -0,0 +1,86 @@
import { Plug } from "../../plugos/plug.ts";
import {
AssetBundle,
assetReadFileSync,
} from "../../plugos/asset_bundle_reader.ts";
import { FileMeta } from "../types.ts";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
export class AssetBundlePlugSpacePrimitives implements SpacePrimitives {
constructor(
private wrapped: SpacePrimitives,
private assetBundle: AssetBundle,
) {
}
async fetchFileList(): Promise<FileMeta[]> {
const l = await this.wrapped.fetchFileList();
return Object.entries(this.assetBundle).filter(([k]) =>
k.startsWith("_plug/")
).map(([_, v]) => v.meta).concat(l);
}
readFile(
name: string,
encoding: FileEncoding,
): Promise<{ data: FileData; meta: FileMeta }> {
if (this.assetBundle[name]) {
const data = assetReadFileSync(this.assetBundle, name);
// console.log("Requested encoding", encoding);
return Promise.resolve({
data: encoding === "string" ? new TextDecoder().decode(data) : data,
meta: {
lastModified: 0,
size: data.byteLength,
perm: "ro",
contentType: "application/json",
} as FileMeta,
});
}
return this.wrapped.readFile(name, encoding);
}
getFileMeta(name: string): Promise<FileMeta> {
if (this.assetBundle[name]) {
const data = assetReadFileSync(this.assetBundle, name);
return Promise.resolve({
lastModified: 0,
size: data.byteLength,
perm: "ro",
contentType: "application/json",
} as FileMeta);
}
return this.wrapped.getFileMeta(name);
}
writeFile(
name: string,
encoding: FileEncoding,
data: FileData,
selfUpdate?: boolean | undefined,
): Promise<FileMeta> {
return this.wrapped.writeFile(name, encoding, data, selfUpdate);
}
deleteFile(name: string): Promise<void> {
if (this.assetBundle[name]) {
// Quietly ignore
return Promise.resolve();
}
return this.wrapped.deleteFile(name);
}
// deno-lint-ignore no-explicit-any
proxySyscall(plug: Plug<any>, name: string, args: any[]): Promise<any> {
return this.wrapped.proxySyscall(plug, name, args);
}
invokeFunction(
plug: Plug<any>,
env: string,
name: string,
args: any[],
): Promise<any> {
return this.wrapped.invokeFunction(plug, env, name, args);
}
}

View File

@ -1,24 +1,25 @@
import { mkdir, readdir, readFile, stat, unlink, writeFile } from "fs/promises";
import * as path from "path";
import { FileMeta } from "../types";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives";
import { Plug } from "@plugos/plugos/plug";
import { realpathSync } from "fs";
import mime from "mime-types";
// import { mkdir, readdir, readFile, stat, unlink, writeFile } from "fs/promises";
import { path } from "../deps.ts";
import { readAll } from "../deps.ts";
import { FileMeta } from "../types.ts";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
import { Plug } from "../../plugos/plug.ts";
import { mime } from "https://deno.land/x/mimetypes@v1.0.0/mod.ts";
import { base64Decode, base64Encode } from "../../plugos/base64.ts";
function lookupContentType(path: string): string {
return mime.lookup(path) || "application/octet-stream";
return mime.getType(path) || "application/octet-stream";
}
export class DiskSpacePrimitives implements SpacePrimitives {
rootPath: string;
constructor(rootPath: string) {
this.rootPath = realpathSync(rootPath);
this.rootPath = Deno.realPathSync(rootPath);
}
safePath(p: string): string {
let realPath = path.resolve(p);
const realPath = path.resolve(p);
if (!realPath.startsWith(this.rootPath)) {
throw Error(`Path ${p} is not in the space`);
}
@ -35,40 +36,48 @@ export class DiskSpacePrimitives implements SpacePrimitives {
async readFile(
name: string,
encoding: FileEncoding
encoding: FileEncoding,
): Promise<{ data: FileData; meta: FileMeta }> {
const localPath = this.filenameToPath(name);
try {
const s = await stat(localPath);
const s = await Deno.stat(localPath);
let data: FileData | null = null;
let contentType = lookupContentType(name);
const contentType = lookupContentType(name);
switch (encoding) {
case "string":
data = await readFile(localPath, "utf8");
data = await Deno.readTextFile(localPath);
break;
case "dataurl":
let fileBuffer = await readFile(localPath, {
encoding: "base64",
});
data = `data:${contentType};base64,${fileBuffer}`;
{
const f = await Deno.open(localPath, { read: true });
const buf = base64Encode(await readAll(f));
Deno.close(f.rid);
data = `data:${contentType};base64,${buf}`;
}
break;
case "arraybuffer":
let arrayBuffer = await readFile(localPath);
data = arrayBuffer.buffer;
{
const f = await Deno.open(localPath, { read: true });
const buf = await readAll(f);
Deno.close(f.rid);
data = buf.buffer;
}
break;
}
return {
data,
meta: {
name: name,
lastModified: s.mtime.getTime(),
lastModified: s.mtime!.getTime(),
perm: "rw",
size: s.size,
contentType: contentType,
},
};
} catch (e) {
console.error("Error while reading file", name, e);
// console.error("Error while reading file", name, e);
throw Error(`Could not read file ${name}`);
}
}
@ -77,35 +86,36 @@ export class DiskSpacePrimitives implements SpacePrimitives {
name: string,
encoding: FileEncoding,
data: FileData,
selfUpdate?: boolean
selfUpdate?: boolean,
): Promise<FileMeta> {
let localPath = this.filenameToPath(name);
try {
// Ensure parent folder exists
await mkdir(path.dirname(localPath), { recursive: true });
await Deno.mkdir(path.dirname(localPath), { recursive: true });
// Actually write the file
switch (encoding) {
case "string":
await writeFile(localPath, data as string, "utf8");
await Deno.writeTextFile(localPath, data as string);
break;
case "dataurl":
await writeFile(localPath, (data as string).split(",")[1], {
encoding: "base64",
});
await Deno.writeFile(
localPath,
base64Decode((data as string).split(",")[1]),
);
break;
case "arraybuffer":
await writeFile(localPath, Buffer.from(data as ArrayBuffer));
await Deno.writeFile(localPath, new Uint8Array(data as ArrayBuffer));
break;
}
// Fetch new metadata
const s = await stat(localPath);
const s = await Deno.stat(localPath);
return {
name: name,
size: s.size,
contentType: lookupContentType(name),
lastModified: s.mtime.getTime(),
lastModified: s.mtime!.getTime(),
perm: "rw",
};
} catch (e) {
@ -115,14 +125,14 @@ export class DiskSpacePrimitives implements SpacePrimitives {
}
async getFileMeta(name: string): Promise<FileMeta> {
let localPath = this.filenameToPath(name);
const localPath = this.filenameToPath(name);
try {
const s = await stat(localPath);
const s = await Deno.stat(localPath);
return {
name: name,
size: s.size,
contentType: lookupContentType(name),
lastModified: s.mtime.getTime(),
lastModified: s.mtime!.getTime(),
perm: "rw",
};
} catch (e) {
@ -132,30 +142,29 @@ export class DiskSpacePrimitives implements SpacePrimitives {
}
async deleteFile(name: string): Promise<void> {
let localPath = this.filenameToPath(name);
await unlink(localPath);
const localPath = this.filenameToPath(name);
await Deno.remove(localPath);
}
async fetchFileList(): Promise<FileMeta[]> {
let fileList: FileMeta[] = [];
const fileList: FileMeta[] = [];
const walkPath = async (dir: string) => {
let files = await readdir(dir);
for (let file of files) {
if (file.startsWith(".")) {
for await (const file of Deno.readDir(dir)) {
if (file.name.startsWith(".")) {
continue;
}
const fullPath = path.join(dir, file);
let s = await stat(fullPath);
if (s.isDirectory()) {
const fullPath = path.join(dir, file.name);
let s = await Deno.stat(fullPath);
if (file.isDirectory) {
await walkPath(fullPath);
} else {
if (!file.startsWith(".")) {
if (!file.name.startsWith(".")) {
fileList.push({
name: this.pathToFilename(fullPath),
size: s.size,
contentType: lookupContentType(fullPath),
lastModified: s.mtime.getTime(),
lastModified: s.mtime!.getTime(),
perm: "rw",
});
}
@ -171,7 +180,7 @@ export class DiskSpacePrimitives implements SpacePrimitives {
plug: Plug<any>,
env: string,
name: string,
args: any[]
args: any[],
): Promise<any> {
return plug.invoke(name, args);
}

View File

@ -1,8 +1,8 @@
import { EventHook } from "@plugos/plugos/hooks/event";
import { Plug } from "@plugos/plugos/plug";
import { EventHook } from "../../plugos/hooks/event.ts";
import { Plug } from "../../plugos/plug.ts";
import { FileMeta } from "../types";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives";
import { FileMeta } from "../types.ts";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
export class EventedSpacePrimitives implements SpacePrimitives {
constructor(private wrapped: SpacePrimitives, private eventHook: EventHook) {}
@ -52,8 +52,11 @@ export class EventedSpacePrimitives implements SpacePrimitives {
text = data as string;
break;
case "arraybuffer":
const decoder = new TextDecoder("utf-8");
text = decoder.decode(data as ArrayBuffer);
{
const decoder = new TextDecoder("utf-8");
text = decoder.decode(data as ArrayBuffer);
}
break;
case "dataurl":
throw Error("Data urls not supported in this context");

View File

@ -1,6 +1,6 @@
import { AttachmentMeta, FileMeta, PageMeta } from "../types";
import { Plug } from "@plugos/plugos/plug";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives";
import { AttachmentMeta, FileMeta, PageMeta } from "../types.ts";
import { Plug } from "../../plugos/plug.ts";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
export class HttpSpacePrimitives implements SpacePrimitives {
fsUrl: string;
@ -51,12 +51,16 @@ export class HttpSpacePrimitives implements SpacePrimitives {
let data: FileData | null = null;
switch (encoding) {
case "arraybuffer":
let abBlob = await res.blob();
data = await abBlob.arrayBuffer();
{
let abBlob = await res.blob();
data = await abBlob.arrayBuffer();
}
break;
case "dataurl":
let dUBlob = await res.blob();
data = arrayBufferToDataUrl(await dUBlob.arrayBuffer());
{
let dUBlob = await res.blob();
data = arrayBufferToDataUrl(await dUBlob.arrayBuffer());
}
break;
case "string":
data = await res.text();

View File

@ -1,9 +1,9 @@
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives";
import { AttachmentMeta, FileMeta, PageMeta } from "../types";
import { EventEmitter } from "@plugos/plugos/event";
import { Plug } from "@plugos/plugos/plug";
import { plugPrefix } from "./constants";
import { safeRun } from "../util";
import { FileData, FileEncoding, SpacePrimitives } from "./space_primitives.ts";
import { AttachmentMeta, FileMeta, PageMeta } from "../types.ts";
import { EventEmitter } from "../../plugos/event.ts";
import { Plug } from "../../plugos/plug.ts";
import { plugPrefix } from "./constants.ts";
import { safeRun } from "../util.ts";
const pageWatchInterval = 2000;

View File

@ -1,5 +1,5 @@
import { Plug } from "@plugos/plugos/plug";
import { FileMeta } from "../types";
import { Plug } from "../../plugos/plug.ts";
import { FileMeta } from "../types.ts";
export type FileEncoding = "string" | "arraybuffer" | "dataurl";
export type FileData = ArrayBuffer | string;

View File

@ -0,0 +1,12 @@
import { SysCallMapping } from "../../plugos/system.ts";
import { parse } from "../parse_tree.ts";
import { Language } from "../deps.ts";
import type { ParseTree } from "../tree.ts";
export function markdownSyscalls(lang: Language): SysCallMapping {
return {
"markdown.parseMarkdown": (_ctx, text: string): ParseTree => {
return parse(lang, text);
},
};
}

View File

@ -1,5 +1,4 @@
import { expect, test } from "@jest/globals";
import { parse } from "./parse_tree";
import { parse } from "./parse_tree.ts";
import {
addParentPointers,
collectNodesMatching,
@ -8,8 +7,9 @@ import {
removeParentPointers,
renderToText,
replaceNodesMatching,
} from "./tree";
import wikiMarkdownLang from "@silverbulletmd/common/parser";
} from "./tree.ts";
import wikiMarkdownLang from "./parser.ts";
import { assertEquals, assertNotEquals } from "../test_deps.ts";
const mdTest1 = `
# Heading
@ -46,22 +46,23 @@ name: something
\`\`\`
`;
test("Run a Node sandbox", async () => {
Deno.test("Run a Node sandbox", () => {
const lang = wikiMarkdownLang([]);
let mdTree = parse(lang, mdTest1);
addParentPointers(mdTree);
// console.log(JSON.stringify(mdTree, null, 2));
let wikiLink = nodeAtPos(mdTree, mdTest1.indexOf("Wiki Page"))!;
expect(wikiLink.type).toBe("WikiLink");
expect(
findParentMatching(wikiLink, (n) => n.type === "BulletList")
).toBeDefined();
assertEquals(wikiLink.type, "WikiLink");
assertNotEquals(
findParentMatching(wikiLink, (n) => n.type === "BulletList"),
null,
);
let allTodos = collectNodesMatching(mdTree, (n) => n.type === "Task");
expect(allTodos.length).toBe(2);
assertEquals(allTodos.length, 2);
// Render back into markdown should be equivalent
expect(renderToText(mdTree)).toBe(mdTest1);
assertEquals(renderToText(mdTree), mdTest1);
removeParentPointers(mdTree);
replaceNodesMatching(mdTree, (n) => {

View File

@ -1,4 +1,4 @@
import YAML from "yaml";
import { YAML } from "./deps.ts";
export function safeRun(fn: () => Promise<void>) {
fn().catch((e) => {
@ -33,5 +33,7 @@ export function parseYamlSettings(settingsMarkdown: string): {
return {};
}
const yaml = match[1];
return YAML.parse(yaml);
return YAML.parse(yaml) as {
[key: string]: any;
};
}

29
deno.jsonc Normal file
View File

@ -0,0 +1,29 @@
{
"compilerOptions": {
"lib": ["dom", "dom.iterable", "dom.asynciterable", "deno.ns"],
"jsx": "react-jsx",
"jsxImportSource": "https://esm.sh/preact@10.11.1"
},
"importMap": "import_map.json",
"lint": {
"files": {
"exclude": [
"dist", "dist_bundle"
]
},
"rules": {
"exclude": ["no-explicit-any"]
}
},
"tasks": {
"test": "deno test -A --unstable",
"install": "deno install -f -A --unstable plugos/bin/plugos-bundle.ts && deno install -f -n silverbullet -A --unstable server/server.ts",
"build": "./build_plugs.sh && deno run -A --unstable --check build.ts",
"watch-web": "deno run -A --unstable --check build.ts --watch",
"watch-server": "deno run -A --unstable --check --watch server/server.ts",
// The only reason to run a shell script is that deno task doesn't support globs yet (e.g. *.plug.yaml)
"watch-plugs": "./build_plugs.sh --watch",
"bundle": "deno bundle --importmap import_map.json server/server.ts dist/silverbullet.js"
}
}

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Luca Casonato
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,21 @@
# esbuild_deno_loader
Deno module resolution for `esbuild`.
## Example
This example bundles an entrypoint into a single ESM output.
```js
import * as esbuild from "https://deno.land/x/esbuild@v0.14.51/mod.js";
import { denoPlugin } from "https://deno.land/x/esbuild_deno_loader@0.5.2/mod.ts";
await esbuild.build({
plugins: [denoPlugin()],
entryPoints: ["https://deno.land/std@0.150.0/hash/sha1.ts"],
outfile: "./dist/sha1.esm.js",
bundle: true,
format: "esm",
});
esbuild.stop();
```

View File

@ -0,0 +1,13 @@
import type * as esbuild from "https://deno.land/x/esbuild@v0.14.54/mod.d.ts";
export type { esbuild };
export {
fromFileUrl,
resolve,
toFileUrl,
} from "https://deno.land/std@0.150.0/path/mod.ts";
export { basename, extname } from "https://deno.land/std@0.150.0/path/mod.ts";
export {
resolveImportMap,
resolveModuleSpecifier,
} from "https://deno.land/x/importmap@0.2.1/mod.ts";
export type { ImportMap } from "https://deno.land/x/importmap@0.2.1/mod.ts";

View File

@ -0,0 +1,11 @@
import * as esbuild from "https://deno.land/x/esbuild@v0.14.51/mod.js";
import { denoPlugin } from "https://deno.land/x/esbuild_deno_loader@0.5.2/mod.ts";
await esbuild.build({
plugins: [denoPlugin()],
entryPoints: ["https://deno.land/std@0.150.0/hash/sha1.ts"],
outfile: "./dist/sha1.esm.js",
bundle: true,
format: "esm",
});
esbuild.stop();

View File

@ -0,0 +1,11 @@
test:
deno test -A
lint:
deno lint
fmt:
deno fmt
fmt/check:
deno fmt --check

117
esbuild_deno_loader/mod.ts Normal file
View File

@ -0,0 +1,117 @@
import {
esbuild,
ImportMap,
resolveImportMap,
resolveModuleSpecifier,
toFileUrl,
} from "./deps.ts";
import { load as nativeLoad } from "./src/native_loader.ts";
import { load as portableLoad } from "./src/portable_loader.ts";
import { ModuleEntry } from "./src/deno.ts";
import { resolve } from "https://deno.land/std@0.122.0/path/win32.ts";
export interface DenoPluginOptions {
/**
* Specify the URL to an import map to use when resolving import specifiers.
* The URL must be fetchable with `fetch`.
*/
importMapURL?: URL;
/**
* Specify which loader to use. By default this will use the `native` loader,
* unless `Deno.run` is not available.
*
* - `native`: Shells out to the Deno execuatble under the hood to load
* files. Requires --allow-read and --allow-run.
* - `portable`: Do module downloading and caching with only Web APIs.
* Requires --allow-net.
*/
loader?: "native" | "portable";
}
/** The default loader to use. */
export const DEFAULT_LOADER: "native" | "portable" =
typeof Deno.run === "function" ? "native" : "portable";
export function denoPlugin(options: DenoPluginOptions = {}): esbuild.Plugin {
const loader = options.loader ?? DEFAULT_LOADER;
return {
name: "deno",
setup(build) {
const infoCache = new Map<string, ModuleEntry>();
let importMap: ImportMap | null = null;
build.onStart(async function onStart() {
if (options.importMapURL !== undefined) {
const resp = await fetch(options.importMapURL.href);
const txt = await resp.text();
importMap = resolveImportMap(JSON.parse(txt), options.importMapURL);
} else {
importMap = null;
}
});
build.onResolve(
{ filter: /.*/ },
function onResolve(
args: esbuild.OnResolveArgs,
): esbuild.OnResolveResult | null | undefined {
// console.log("To resolve", args.path);
const resolveDir = args.resolveDir
? `${toFileUrl(args.resolveDir).href}/`
: "";
const referrer = args.importer || resolveDir;
let resolved: URL;
if (importMap !== null) {
const res = resolveModuleSpecifier(
args.path,
importMap,
new URL(referrer) || undefined,
);
resolved = new URL(res);
} else {
resolved = new URL(args.path, referrer);
}
// console.log("Resolved", resolved.href);
if (build.initialOptions.external) {
for (const external of build.initialOptions.external) {
if (resolved.href.startsWith(external)) {
// console.log("Got external", args.path, resolved.href);
return { path: resolved.href, external: true };
}
}
}
const href = resolved.href;
// Don't use the deno loader for any of the specific loader file extensions
const loaderExts = Object.keys(build.initialOptions.loader || {});
for (const ext of loaderExts) {
if (href.endsWith(ext)) {
console.log("Skipping", href);
return {
path: resolved.href.substring("file://".length),
};
}
}
return { path: resolved.href, namespace: "deno" };
},
);
build.onLoad(
{ filter: /.*/ },
function onLoad(
args: esbuild.OnLoadArgs,
): Promise<esbuild.OnLoadResult | null> {
if (args.path.endsWith(".css")) {
return Promise.resolve(null);
}
const url = new URL(args.path);
switch (loader) {
case "native":
return nativeLoad(infoCache, url, options);
case "portable":
return portableLoad(url, options);
}
},
);
},
};
}

View File

@ -0,0 +1,89 @@
// Lifted from https://raw.githubusercontent.com/denoland/deno_graph/89affe43c9d3d5c9165c8089687c107d53ed8fe1/lib/media_type.ts
export type MediaType =
| "JavaScript"
| "Mjs"
| "Cjs"
| "JSX"
| "TypeScript"
| "Mts"
| "Cts"
| "Dts"
| "Dmts"
| "Dcts"
| "TSX"
| "Json"
| "Wasm"
| "TsBuildInfo"
| "SourceMap"
| "Unknown";
export interface InfoOutput {
roots: string[];
modules: ModuleEntry[];
redirects: Record<string, string>;
}
export interface ModuleEntry {
specifier: string;
size: number;
mediaType?: MediaType;
local?: string;
checksum?: string;
emit?: string;
map?: string;
error?: string;
}
interface DenoInfoOptions {
importMap?: string;
}
let tempDir: null | string;
export async function info(
specifier: URL,
options: DenoInfoOptions,
): Promise<InfoOutput> {
const cmd = [
Deno.execPath(),
"info",
"--json",
];
if (options.importMap !== undefined) {
cmd.push("--import-map", options.importMap);
}
cmd.push(specifier.href);
if (!tempDir) {
tempDir = Deno.makeTempDirSync();
}
let proc;
try {
proc = Deno.run({
cmd,
stdout: "piped",
cwd: tempDir,
});
const raw = await proc.output();
const status = await proc.status();
if (!status.success) {
throw new Error(`Failed to call 'deno info' on '${specifier.href}'`);
}
const txt = new TextDecoder().decode(raw);
return JSON.parse(txt);
} finally {
try {
proc?.stdout.close();
} catch (err) {
if (err instanceof Deno.errors.BadResource) {
// ignore the error
} else {
// deno-lint-ignore no-unsafe-finally
throw err;
}
}
proc?.close();
}
}

View File

@ -0,0 +1,65 @@
import { esbuild, fromFileUrl } from "../deps.ts";
import * as deno from "./deno.ts";
import { mediaTypeToLoader, transformRawIntoContent } from "./shared.ts";
export interface LoadOptions {
importMapURL?: URL;
}
export async function load(
infoCache: Map<string, deno.ModuleEntry>,
url: URL,
options: LoadOptions
): Promise<esbuild.OnLoadResult | null> {
switch (url.protocol) {
case "http:":
case "https:":
case "data:":
return await loadFromCLI(infoCache, url, options);
case "file:": {
const res = await loadFromCLI(infoCache, url, options);
res.watchFiles = [fromFileUrl(url.href)];
return res;
}
}
return null;
}
async function loadFromCLI(
infoCache: Map<string, deno.ModuleEntry>,
specifier: URL,
options: LoadOptions
): Promise<esbuild.OnLoadResult> {
const specifierRaw = specifier.href;
if (!infoCache.has(specifierRaw)) {
const { modules, redirects } = await deno.info(specifier, {
importMap: options.importMapURL?.href,
});
for (const module of modules) {
infoCache.set(module.specifier, module);
}
for (const [specifier, redirect] of Object.entries(redirects)) {
const redirected = infoCache.get(redirect);
if (!redirected) {
throw new TypeError("Unreachable.");
}
infoCache.set(specifier, redirected);
}
}
const module = infoCache.get(specifierRaw);
if (!module) {
throw new TypeError("Unreachable.");
}
if (module.error) throw new Error(module.error);
if (!module.local) throw new Error("Module not downloaded yet.");
const mediaType = module.mediaType ?? "Unknown";
const loader = mediaTypeToLoader(mediaType);
const raw = await Deno.readFile(module.local);
const contents = transformRawIntoContent(raw, mediaType);
return { contents, loader };
}

View File

@ -0,0 +1,194 @@
import { esbuild, extname, fromFileUrl } from "../deps.ts";
import * as deno from "./deno.ts";
import { mediaTypeToLoader, transformRawIntoContent } from "./shared.ts";
export interface LoadOptions {
importMapURL?: URL;
}
export async function load(
url: URL,
_options: LoadOptions,
): Promise<esbuild.OnLoadResult | null> {
switch (url.protocol) {
case "http:":
case "https:":
case "data:":
return await loadWithFetch(url);
case "file:": {
const res = await loadWithReadFile(url);
res.watchFiles = [fromFileUrl(url.href)];
return res;
}
}
return null;
}
async function loadWithFetch(
specifier: URL,
): Promise<esbuild.OnLoadResult> {
const specifierRaw = specifier.href;
// TODO(lucacasonato): redirects!
const resp = await fetch(specifierRaw);
if (!resp.ok) {
throw new Error(
`Encountered status code ${resp.status} while fetching ${specifierRaw}.`,
);
}
const contentType = resp.headers.get("content-type");
const mediaType = mapContentType(
new URL(resp.url || specifierRaw),
contentType,
);
const loader = mediaTypeToLoader(mediaType);
const raw = new Uint8Array(await resp.arrayBuffer());
const contents = transformRawIntoContent(raw, mediaType);
return { contents, loader };
}
async function loadWithReadFile(specifier: URL): Promise<esbuild.OnLoadResult> {
const path = fromFileUrl(specifier);
const mediaType = mapContentType(specifier, null);
const loader = mediaTypeToLoader(mediaType);
const raw = await Deno.readFile(path);
const contents = transformRawIntoContent(raw, mediaType);
return { contents, loader };
}
function mapContentType(
specifier: URL,
contentType: string | null,
): deno.MediaType {
if (contentType !== null) {
const contentTypes = contentType.split(";");
const mediaType = contentTypes[0].toLowerCase();
switch (mediaType) {
case "application/typescript":
case "text/typescript":
case "video/vnd.dlna.mpeg-tts":
case "video/mp2t":
case "application/x-typescript":
return mapJsLikeExtension(specifier, "TypeScript");
case "application/javascript":
case "text/javascript":
case "application/ecmascript":
case "text/ecmascript":
case "application/x-javascript":
case "application/node":
return mapJsLikeExtension(specifier, "JavaScript");
case "text/jsx":
return "JSX";
case "text/tsx":
return "TSX";
case "application/json":
case "text/json":
return "Json";
case "application/wasm":
return "Wasm";
case "text/plain":
case "application/octet-stream":
return mediaTypeFromSpecifier(specifier);
default:
return "Unknown";
}
} else {
return mediaTypeFromSpecifier(specifier);
}
}
function mapJsLikeExtension(
specifier: URL,
defaultType: deno.MediaType,
): deno.MediaType {
const path = specifier.pathname;
switch (extname(path)) {
case ".jsx":
return "JSX";
case ".mjs":
return "Mjs";
case ".cjs":
return "Cjs";
case ".tsx":
return "TSX";
case ".ts":
if (path.endsWith(".d.ts")) {
return "Dts";
} else {
return defaultType;
}
case ".mts": {
if (path.endsWith(".d.mts")) {
return "Dmts";
} else {
return defaultType == "JavaScript" ? "Mjs" : "Mts";
}
}
case ".cts": {
if (path.endsWith(".d.cts")) {
return "Dcts";
} else {
return defaultType == "JavaScript" ? "Cjs" : "Cts";
}
}
default:
return defaultType;
}
}
function mediaTypeFromSpecifier(specifier: URL): deno.MediaType {
const path = specifier.pathname;
switch (extname(path)) {
case "":
if (path.endsWith("/.tsbuildinfo")) {
return "TsBuildInfo";
} else {
return "Unknown";
}
case ".ts":
if (path.endsWith(".d.ts")) {
return "Dts";
} else {
return "TypeScript";
}
case ".mts":
if (path.endsWith(".d.mts")) {
return "Dmts";
} else {
return "Mts";
}
case ".cts":
if (path.endsWith(".d.cts")) {
return "Dcts";
} else {
return "Cts";
}
case ".tsx":
return "TSX";
case ".js":
return "JavaScript";
case ".jsx":
return "JSX";
case ".mjs":
return "Mjs";
case ".cjs":
return "Cjs";
case ".json":
return "Json";
case ".wasm":
return "Wasm";
case ".tsbuildinfo":
return "TsBuildInfo";
case ".map":
return "SourceMap";
default:
return "Unknown";
}
}

View File

@ -0,0 +1,40 @@
import { esbuild } from "../deps.ts";
import { MediaType } from "./deno.ts";
export function mediaTypeToLoader(mediaType: MediaType): esbuild.Loader {
switch (mediaType) {
case "JavaScript":
case "Mjs":
return "js";
case "JSX":
return "jsx";
case "TypeScript":
case "Mts":
return "ts";
case "TSX":
return "tsx";
case "Json":
return "js";
default:
throw new Error(`Unhandled media type ${mediaType}.`);
}
}
export function transformRawIntoContent(
raw: Uint8Array,
mediaType: MediaType
): string | Uint8Array {
switch (mediaType) {
case "Json":
return jsonToESM(raw);
default:
return raw;
}
}
function jsonToESM(source: Uint8Array): string {
const sourceString = new TextDecoder().decode(source);
let json = JSON.stringify(JSON.parse(sourceString), null, 2);
json = json.replaceAll(`"__proto__":`, `["__proto__"]:`);
return `export default ${json};`;
}

View File

@ -0,0 +1,6 @@
import * as esbuild from "https://deno.land/x/esbuild@v0.14.51/mod.js";
export { esbuild };
export {
assert,
assertEquals,
} from "https://deno.land/std@0.150.0/testing/asserts.ts";

View File

@ -0,0 +1,6 @@
{
"hello": "world",
"__proto__": {
"sky": "universe"
}
}

View File

@ -0,0 +1 @@
export * from "esbuild_deno_loader/testdata/mod.ts";

View File

@ -0,0 +1,5 @@
{
"imports": {
"mod": "./mod.ts"
}
}

2
esbuild_deno_loader/testdata/mod.js vendored Normal file
View File

@ -0,0 +1,2 @@
const bool = "asd";
export { bool };

11
esbuild_deno_loader/testdata/mod.jsx vendored Normal file
View File

@ -0,0 +1,11 @@
function createElement(fn) {
return fn();
}
const React = { createElement };
function Asd() {
return "foo";
}
export default <Asd />;

2
esbuild_deno_loader/testdata/mod.mjs vendored Normal file
View File

@ -0,0 +1,2 @@
const bool = "asd";
export { bool };

4
esbuild_deno_loader/testdata/mod.mts vendored Normal file
View File

@ -0,0 +1,4 @@
let bool: string;
bool = "asd";
bool = "asd2";
export { bool };

4
esbuild_deno_loader/testdata/mod.ts vendored Normal file
View File

@ -0,0 +1,4 @@
let bool: string;
bool = "asd";
bool = "asd2";
export { bool };

11
esbuild_deno_loader/testdata/mod.tsx vendored Normal file
View File

@ -0,0 +1,11 @@
function createElement(fn: () => string) {
return fn();
}
const React = { createElement };
function Asd() {
return "foo";
}
export default <Asd />;

18
import_map.json Normal file
View File

@ -0,0 +1,18 @@
{
"imports": {
"@codemirror/state": "https://esm.sh/@codemirror/state@6.1.2",
"@lezer/common": "https://esm.sh/@lezer/common@1.0.1",
"@lezer/markdown": "https://esm.sh/@lezer/markdown@1.0.2?external=@lezer/common,@codemirror/language,@lezer/highlight",
"@lezer/javascript": "https://esm.sh/@lezer/javascript@1.0.2?external=@lezer/common,@codemirror/language,@lezer/highlight",
"@codemirror/language": "https://esm.sh/@codemirror/language@6.2.1?external=@codemirror/state,@lezer/common,@lezer/lr,@codemirror/view,@lezer/highlight",
"@codemirror/commands": "https://esm.sh/@codemirror/commands@6.1.1?external=@codemirror/state,@codemirror/view",
"@codemirror/view": "https://esm.sh/@codemirror/view@6.3.0?external=@codemirror/state,@lezer/common",
"@lezer/highlight": "https://esm.sh/@lezer/highlight@1.1.1?external=@lezer/common",
"@codemirror/autocomplete": "https://esm.sh/@codemirror/autocomplete@6.3.0?external=@codemirror/state,@lezer/common",
"@codemirror/lint": "https://esm.sh/@codemirror/lint@6.0.0?external=@codemirror/state,@lezer/common",
"$sb/": "./syscall/",
"handlebars": "https://esm.sh/handlebars",
"@lezer/lr": "https://esm.sh/@lezer/lr",
"yaml": "https://deno.land/std/encoding/yaml.ts"
}
}

View File

@ -1,6 +0,0 @@
dist/
.env
# Mac
.DS_Store

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,259 +0,0 @@
GO ?= $(shell command -v go 2> /dev/null)
NPM ?= $(shell command -v npm 2> /dev/null)
CURL ?= $(shell command -v curl 2> /dev/null)
MM_DEBUG ?=
MANIFEST_FILE ?= plugin.json
GOPATH ?= $(shell go env GOPATH)
GO_TEST_FLAGS ?= -race
GO_BUILD_FLAGS ?=
MM_UTILITIES_DIR ?= ../mattermost-utilities
DLV_DEBUG_PORT := 2346
DEFAULT_GOOS := $(shell go env GOOS)
DEFAULT_GOARCH := $(shell go env GOARCH)
export GO111MODULE=on
# You can include assets this directory into the bundle. This can be e.g. used to include profile pictures.
ASSETS_DIR ?= assets
## Define the default target (make all)
.PHONY: default
default: all
# Verify environment, and define PLUGIN_ID, PLUGIN_VERSION, HAS_SERVER and HAS_WEBAPP as needed.
include build/setup.mk
include build/legacy.mk
BUNDLE_NAME ?= $(PLUGIN_ID)-$(PLUGIN_VERSION).tar.gz
# Include custom makefile, if present
ifneq ($(wildcard build/custom.mk),)
include build/custom.mk
endif
## Checks the code style, tests, builds and bundles the plugin.
.PHONY: all
all: dist
## Builds the server, if it exists, for all supported architectures, unless MM_SERVICESETTINGS_ENABLEDEVELOPER is set.
.PHONY: server
server:
ifneq ($(HAS_SERVER),)
mkdir -p server/dist;
ifeq ($(MM_DEBUG),)
ifneq ($(MM_SERVICESETTINGS_ENABLEDEVELOPER),)
@echo Building plugin only for $(DEFAULT_GOOS)-$(DEFAULT_GOARCH) because MM_SERVICESETTINGS_ENABLEDEVELOPER is enabled
cd server && $(GO) build $(GO_BUILD_FLAGS) -trimpath -o dist/plugin-$(DEFAULT_GOOS)-$(DEFAULT_GOARCH);
else
cd server && env GOOS=linux GOARCH=amd64 $(GO) build $(GO_BUILD_FLAGS) -trimpath -o dist/plugin-linux-amd64;
cd server && env GOOS=linux GOARCH=arm64 $(GO) build $(GO_BUILD_FLAGS) -trimpath -o dist/plugin-linux-arm64;
cd server && env GOOS=darwin GOARCH=amd64 $(GO) build $(GO_BUILD_FLAGS) -trimpath -o dist/plugin-darwin-amd64;
cd server && env GOOS=darwin GOARCH=arm64 $(GO) build $(GO_BUILD_FLAGS) -trimpath -o dist/plugin-darwin-arm64;
cd server && env GOOS=windows GOARCH=amd64 $(GO) build $(GO_BUILD_FLAGS) -trimpath -o dist/plugin-windows-amd64.exe;
endif
else
$(info DEBUG mode is on; to disable, unset MM_DEBUG)
ifneq ($(MM_SERVICESETTINGS_ENABLEDEVELOPER),)
@echo Building plugin only for $(DEFAULT_GOOS)-$(DEFAULT_GOARCH) because MM_SERVICESETTINGS_ENABLEDEVELOPER is enabled
cd server && $(GO) build $(GO_BUILD_FLAGS) -gcflags "all=-N -l" -trimpath -o dist/plugin-$(DEFAULT_GOOS)-$(DEFAULT_GOARCH);
else
cd server && env GOOS=linux GOARCH=amd64 $(GO) build $(GO_BUILD_FLAGS) -gcflags "all=-N -l" -trimpath -o dist/plugin-linux-amd64;
cd server && env GOOS=linux GOARCH=arm64 $(GO) build $(GO_BUILD_FLAGS) -gcflags "all=-N -l" -trimpath -o dist/plugin-linux-arm64;
cd server && env GOOS=darwin GOARCH=amd64 $(GO) build $(GO_BUILD_FLAGS) -gcflags "all=-N -l" -trimpath -o dist/plugin-darwin-amd64;
cd server && env GOOS=darwin GOARCH=arm64 $(GO) build $(GO_BUILD_FLAGS) -gcflags "all=-N -l" -trimpath -o dist/plugin-darwin-arm64;
cd server && env GOOS=windows GOARCH=amd64 $(GO) build $(GO_BUILD_FLAGS) -gcflags "all=-N -l" -trimpath -o dist/plugin-windows-amd64.exe;
endif
endif
endif
## Ensures NPM dependencies are installed without having to run this all the time.
webapp/node_modules: $(wildcard webapp/package.json)
ifneq ($(HAS_WEBAPP),)
cd webapp && $(NPM) install
touch $@
endif
## Builds the webapp, if it exists.
.PHONY: webapp
webapp: webapp/node_modules
ifneq ($(HAS_WEBAPP),)
ifeq ($(MM_DEBUG),)
cd webapp && $(NPM) run build;
else
cd webapp && $(NPM) run debug;
endif
endif
## Generates a tar bundle of the plugin for install.
.PHONY: bundle
bundle:
rm -rf dist/
mkdir -p dist/$(PLUGIN_ID)
cp $(MANIFEST_FILE) dist/$(PLUGIN_ID)/
ifneq ($(wildcard $(ASSETS_DIR)/.),)
cp -r $(ASSETS_DIR) dist/$(PLUGIN_ID)/
endif
ifneq ($(HAS_PUBLIC),)
cp -r public dist/$(PLUGIN_ID)/
endif
ifneq ($(HAS_SERVER),)
mkdir -p dist/$(PLUGIN_ID)/server
cp -r server/dist dist/$(PLUGIN_ID)/server/
endif
ifneq ($(HAS_WEBAPP),)
mkdir -p dist/$(PLUGIN_ID)/webapp
cp -r webapp/dist dist/$(PLUGIN_ID)/webapp/
endif
cd dist && tar -cvzf $(BUNDLE_NAME) $(PLUGIN_ID)
@echo plugin built at: dist/$(BUNDLE_NAME)
## Builds and bundles the plugin.
.PHONY: dist
dist: server webapp bundle
## Builds and installs the plugin to a server.
.PHONY: deploy
deploy: dist
./build/bin/pluginctl deploy $(PLUGIN_ID) dist/$(BUNDLE_NAME)
## Builds and installs the plugin to a server, updating the webapp automatically when changed.
.PHONY: watch
watch: server bundle
ifeq ($(MM_DEBUG),)
cd webapp && $(NPM) run build:watch
else
cd webapp && $(NPM) run debug:watch
endif
## Installs a previous built plugin with updated webpack assets to a server.
.PHONY: deploy-from-watch
deploy-from-watch: bundle
./build/bin/pluginctl deploy $(PLUGIN_ID) dist/$(BUNDLE_NAME)
## Setup dlv for attaching, identifying the plugin PID for other targets.
.PHONY: setup-attach
setup-attach:
$(eval PLUGIN_PID := $(shell ps aux | grep "plugins/${PLUGIN_ID}" | grep -v "grep" | awk -F " " '{print $$2}'))
$(eval NUM_PID := $(shell echo -n ${PLUGIN_PID} | wc -w))
@if [ ${NUM_PID} -gt 2 ]; then \
echo "** There is more than 1 plugin process running. Run 'make kill reset' to restart just one."; \
exit 1; \
fi
## Check if setup-attach succeeded.
.PHONY: check-attach
check-attach:
@if [ -z ${PLUGIN_PID} ]; then \
echo "Could not find plugin PID; the plugin is not running. Exiting."; \
exit 1; \
else \
echo "Located Plugin running with PID: ${PLUGIN_PID}"; \
fi
## Attach dlv to an existing plugin instance.
.PHONY: attach
attach: setup-attach check-attach
dlv attach ${PLUGIN_PID}
## Attach dlv to an existing plugin instance, exposing a headless instance on $DLV_DEBUG_PORT.
.PHONY: attach-headless
attach-headless: setup-attach check-attach
dlv attach ${PLUGIN_PID} --listen :$(DLV_DEBUG_PORT) --headless=true --api-version=2 --accept-multiclient
## Detach dlv from an existing plugin instance, if previously attached.
.PHONY: detach
detach: setup-attach
@DELVE_PID=$(shell ps aux | grep "dlv attach ${PLUGIN_PID}" | grep -v "grep" | awk -F " " '{print $$2}') && \
if [ "$$DELVE_PID" -gt 0 ] > /dev/null 2>&1 ; then \
echo "Located existing delve process running with PID: $$DELVE_PID. Killing." ; \
kill -9 $$DELVE_PID ; \
fi
## Runs any lints and unit tests defined for the server and webapp, if they exist.
.PHONY: test
test: webapp/node_modules
ifneq ($(HAS_SERVER),)
$(GO) test -v $(GO_TEST_FLAGS) ./server/...
endif
ifneq ($(HAS_WEBAPP),)
cd webapp && $(NPM) run test;
endif
ifneq ($(wildcard ./build/sync/plan/.),)
cd ./build/sync && $(GO) test -v $(GO_TEST_FLAGS) ./...
endif
## Creates a coverage report for the server code.
.PHONY: coverage
coverage: webapp/node_modules
ifneq ($(HAS_SERVER),)
$(GO) test $(GO_TEST_FLAGS) -coverprofile=server/coverage.txt ./server/...
$(GO) tool cover -html=server/coverage.txt
endif
## Extract strings for translation from the source code.
.PHONY: i18n-extract
i18n-extract:
ifneq ($(HAS_WEBAPP),)
ifeq ($(HAS_MM_UTILITIES),)
@echo "You must clone github.com/mattermost/mattermost-utilities repo in .. to use this command"
else
cd $(MM_UTILITIES_DIR) && npm install && npm run babel && node mmjstool/build/index.js i18n extract-webapp --webapp-dir $(PWD)/webapp
endif
endif
## Disable the plugin.
.PHONY: disable
disable: detach
./build/bin/pluginctl disable $(PLUGIN_ID)
## Enable the plugin.
.PHONY: enable
enable:
./build/bin/pluginctl enable $(PLUGIN_ID)
## Reset the plugin, effectively disabling and re-enabling it on the server.
.PHONY: reset
reset: detach
./build/bin/pluginctl reset $(PLUGIN_ID)
## Kill all instances of the plugin, detaching any existing dlv instance.
.PHONY: kill
kill: detach
$(eval PLUGIN_PID := $(shell ps aux | grep "plugins/${PLUGIN_ID}" | grep -v "grep" | awk -F " " '{print $$2}'))
@for PID in ${PLUGIN_PID}; do \
echo "Killing plugin pid $$PID"; \
kill -9 $$PID; \
done; \
## Clean removes all build artifacts.
.PHONY: clean
clean:
rm -fr dist/
ifneq ($(HAS_SERVER),)
rm -fr server/coverage.txt
rm -fr server/dist
endif
ifneq ($(HAS_WEBAPP),)
rm -fr webapp/junit.xml
rm -fr webapp/dist
rm -fr webapp/node_modules
endif
rm -fr build/bin/
## Sync directory with a starter template
sync:
ifndef STARTERTEMPLATE_PATH
@echo STARTERTEMPLATE_PATH is not set.
@echo Set STARTERTEMPLATE_PATH to a local clone of https://github.com/mattermost/mattermost-plugin-starter-template and retry.
@exit 1
endif
cd ${STARTERTEMPLATE_PATH} && go run ./build/sync/main.go ./build/sync/plan.yml $(PWD)
# Help documentation à la https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
help:
@cat Makefile build/*.mk | grep -v '\.PHONY' | grep -v '\help:' | grep -B1 -E '^[a-zA-Z0-9_.-]+:.*' | sed -e "s/:.*//" | sed -e "s/^## //" | grep -v '\-\-' | sed '1!G;h;$$!d' | awk 'NR%2{printf "\033[36m%-30s\033[0m",$$0;next;}1' | sort

View File

@ -1,138 +0,0 @@
# Plugin Starter Template [![CircleCI branch](https://img.shields.io/circleci/project/github/mattermost/mattermost-plugin-starter-template/master.svg)](https://circleci.com/gh/mattermost/mattermost-plugin-starter-template)
This plugin serves as a starting point for writing a Mattermost plugin. Feel free to base your own plugin off this repository.
To learn more about plugins, see [our plugin documentation](https://developers.mattermost.com/extend/plugins/).
## Getting Started
Use GitHub's template feature to make a copy of this repository by clicking the "Use this template" button.
Alternatively shallow clone the repository matching your plugin name:
```
git clone --depth 1 https://github.com/mattermost/mattermost-plugin-starter-template com.example.my-plugin
```
Note that this project uses [Go modules](https://github.com/golang/go/wiki/Modules). Be sure to locate the project outside of `$GOPATH`.
Edit the following files:
1. `plugin.json` with your `id`, `name`, and `description`:
```
{
"id": "com.example.my-plugin",
"name": "My Plugin",
"description": "A plugin to enhance Mattermost."
}
```
2. `go.mod` with your Go module path, following the `<hosting-site>/<repository>/<module>` convention:
```
module github.com/example/my-plugin
```
3. `.golangci.yml` with your Go module path:
```yml
linters-settings:
# [...]
goimports:
local-prefixes: github.com/example/my-plugin
```
Build your plugin:
```
make
```
This will produce a single plugin file (with support for multiple architectures) for upload to your Mattermost server:
```
dist/com.example.my-plugin.tar.gz
```
## Development
To avoid having to manually install your plugin, build and deploy your plugin using one of the following options. In order for the below options to work, you must first enable plugin uploads via your config.json or API and restart Mattermost.
```json
"PluginSettings" : {
...
"EnableUploads" : true
}
```
### Deploying with Local Mode
If your Mattermost server is running locally, you can enable [local mode](https://docs.mattermost.com/administration/mmctl-cli-tool.html#local-mode) to streamline deploying your plugin. Edit your server configuration as follows:
```json
{
"ServiceSettings": {
...
"EnableLocalMode": true,
"LocalModeSocketLocation": "/var/tmp/mattermost_local.socket"
},
}
```
and then deploy your plugin:
```
make deploy
```
You may also customize the Unix socket path:
```
export MM_LOCALSOCKETPATH=/var/tmp/alternate_local.socket
make deploy
```
If developing a plugin with a webapp, watch for changes and deploy those automatically:
```
export MM_SERVICESETTINGS_SITEURL=http://localhost:8065
export MM_ADMIN_TOKEN=j44acwd8obn78cdcx7koid4jkr
make watch
```
### Deploying with credentials
Alternatively, you can authenticate with the server's API with credentials:
```
export MM_SERVICESETTINGS_SITEURL=http://localhost:8065
export MM_ADMIN_USERNAME=admin
export MM_ADMIN_PASSWORD=password
make deploy
```
or with a [personal access token](https://docs.mattermost.com/developer/personal-access-tokens.html):
```
export MM_SERVICESETTINGS_SITEURL=http://localhost:8065
export MM_ADMIN_TOKEN=j44acwd8obn78cdcx7koid4jkr
make deploy
```
## Q&A
### How do I make a server-only or web app-only plugin?
Simply delete the `server` or `webapp` folders and remove the corresponding sections from `plugin.json`. The build scripts will skip the missing portions automatically.
### How do I include assets in the plugin bundle?
Place them into the `assets` directory. To use an asset at runtime, build the path to your asset and open as a regular file:
```go
bundlePath, err := p.API.GetBundlePath()
if err != nil {
return errors.Wrap(err, "failed to get bundle path")
}
profileImage, err := ioutil.ReadFile(filepath.Join(bundlePath, "assets", "profile_image.png"))
if err != nil {
return errors.Wrap(err, "failed to read profile image")
}
if appErr := p.API.SetProfileImage(userID, profileImage); appErr != nil {
return errors.Wrap(err, "failed to set profile image")
}
```
### How do I build the plugin with unminified JavaScript?
Setting the `MM_DEBUG` environment variable will invoke the debug builds. The simplist way to do this is to simply include this variable in your calls to `make` (e.g. `make dist MM_DEBUG=1`).

View File

@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="241px" height="240px" viewBox="0 0 241 240" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<!-- Generator: Sketch 46.2 (44496) - http://www.bohemiancoding.com/sketch -->
<title>blue-icon</title>
<desc>Created with Sketch.</desc>
<defs></defs>
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="06" transform="translate(-681.000000, -572.000000)" fill="#1875F0">
<g id="Group-2" transform="translate(626.000000, 517.000000)">
<path d="M216.908181,153.127705 C216.908181,153.127705 217.280588,169.452526 205.928754,180.543035 C194.57546,191.633544 180.631383,190.619887 171.560722,187.557072 C162.488602,184.494256 150.79503,176.85251 148.531381,161.16705 C146.269193,145.480133 156.508188,132.736607 156.508188,132.736607 L178.820463,105.066407 L191.815268,89.2629779 L202.969946,75.4912313 C202.969946,75.4912313 208.088713,68.6534193 209.547671,67.2421648 C209.836834,66.9625354 210.133299,66.7790286 210.423923,66.6377576 L210.635683,66.5299837 L210.673654,66.5154197 C211.28703,66.2518108 211.993873,66.195011 212.675888,66.4251227 C213.343299,66.6508652 213.860288,67.1081757 214.187421,67.6718037 L214.256061,67.7810339 L214.315938,67.9062846 C214.475124,68.2063036 214.608022,68.5485583 214.67082,68.9709151 C214.968745,70.976382 214.870897,79.5094471 214.870897,79.5094471 L215.342613,97.2047434 L216.039232,117.630795 L216.908181,153.127705 Z M245.790587,78.2043261 C287.057212,108.155253 305.982915,162.509669 288.774288,213.346872 C267.594104,275.911031 199.706245,309.46073 137.142925,288.281718 C74.5796048,267.10125 41.031812,199.213937 62.2105402,136.649778 C79.4482947,85.7295603 127.625459,54.0324057 178.690632,55.4145322 L162.322339,74.7541074 C132.028106,80.231639 105.87146,100.919843 95.5908489,131.290215 C80.2944535,176.475117 105.932628,225.982624 152.855846,241.866155 C199.777608,257.751142 250.216536,233.998666 265.512932,188.813764 C275.760046,158.543884 267.634882,126.336988 247.050359,103.595256 L245.790587,78.2043261 Z" id="blue-icon"></path>
</g>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.2 KiB

View File

@ -1,9 +0,0 @@
module github.com/zefhemel/mattermost-silverbullet-plugin
go 1.16
require (
github.com/mattermost/mattermost-server/v6 v6.2.1
github.com/pkg/errors v0.9.1
github.com/stretchr/testify v1.7.0
)

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +0,0 @@
package root
import (
_ "embed" // Need to embed manifest file
"encoding/json"
"strings"
"github.com/mattermost/mattermost-server/v6/model"
)
//go:embed plugin.json
var manifestString string
var Manifest model.Manifest
func init() {
_ = json.NewDecoder(strings.NewReader(manifestString)).Decode(&Manifest)
}

View File

@ -1,28 +0,0 @@
{
"id": "silverbullet",
"name": "Silver Bullet",
"description": "Silver Bullet integration into Mattermost",
"homepage_url": "https://github.com/silverbulletmd/silverbullet",
"support_url": "https://github.com/silverbulletmd/silverbullet/issues",
"release_notes_url": "https://github.com/silverbulletmd/silverbullet/releases/tag/v0.1.0",
"icon_path": "assets/starter-template-icon.svg",
"version": "0.1.0",
"min_server_version": "6.0.0",
"server": {
"executables": {
"linux-amd64": "server/dist/plugin-linux-amd64",
"linux-arm64": "server/dist/plugin-linux-arm64",
"darwin-amd64": "server/dist/plugin-darwin-amd64",
"darwin-arm64": "server/dist/plugin-darwin-arm64",
"windows-amd64": "server/dist/plugin-windows-amd64.exe"
}
},
"webapp": {
"bundle_path": "webapp/dist/index.js"
},
"settings_schema": {
"header": "",
"footer": "",
"settings": []
}
}

View File

@ -1,2 +0,0 @@
coverage.txt
dist

View File

@ -1,83 +0,0 @@
package main
import (
"reflect"
"github.com/pkg/errors"
)
// configuration captures the plugin's external configuration as exposed in the Mattermost server
// configuration, as well as values computed from the configuration. Any public fields will be
// deserialized from the Mattermost server configuration in OnConfigurationChange.
//
// As plugins are inherently concurrent (hooks being called asynchronously), and the plugin
// configuration can change at any time, access to the configuration must be synchronized. The
// strategy used in this plugin is to guard a pointer to the configuration, and clone the entire
// struct whenever it changes. You may replace this with whatever strategy you choose.
//
// If you add non-reference types to your configuration struct, be sure to rewrite Clone as a deep
// copy appropriate for your types.
type configuration struct {
}
// Clone shallow copies the configuration. Your implementation may require a deep copy if
// your configuration has reference types.
func (c *configuration) Clone() *configuration {
var clone = *c
return &clone
}
// getConfiguration retrieves the active configuration under lock, making it safe to use
// concurrently. The active configuration may change underneath the client of this method, but
// the struct returned by this API call is considered immutable.
func (p *Plugin) getConfiguration() *configuration {
p.configurationLock.RLock()
defer p.configurationLock.RUnlock()
if p.configuration == nil {
return &configuration{}
}
return p.configuration
}
// setConfiguration replaces the active configuration under lock.
//
// Do not call setConfiguration while holding the configurationLock, as sync.Mutex is not
// reentrant. In particular, avoid using the plugin API entirely, as this may in turn trigger a
// hook back into the plugin. If that hook attempts to acquire this lock, a deadlock may occur.
//
// This method panics if setConfiguration is called with the existing configuration. This almost
// certainly means that the configuration was modified without being cloned and may result in
// an unsafe access.
func (p *Plugin) setConfiguration(configuration *configuration) {
p.configurationLock.Lock()
defer p.configurationLock.Unlock()
if configuration != nil && p.configuration == configuration {
// Ignore assignment if the configuration struct is empty. Go will optimize the
// allocation for same to point at the same memory address, breaking the check
// above.
if reflect.ValueOf(*configuration).NumField() == 0 {
return
}
panic("setConfiguration called with the existing configuration")
}
p.configuration = configuration
}
// OnConfigurationChange is invoked when configuration changes may have been made.
func (p *Plugin) OnConfigurationChange() error {
var configuration = new(configuration)
// Load the public configuration fields from the Mattermost server configuration.
if err := p.API.LoadPluginConfiguration(configuration); err != nil {
return errors.Wrap(err, "failed to load plugin configuration")
}
p.setConfiguration(configuration)
return nil
}

View File

@ -1,9 +0,0 @@
package main
import (
"github.com/mattermost/mattermost-server/v6/plugin"
)
func main() {
plugin.ClientMain(&Plugin{})
}

View File

@ -1,74 +0,0 @@
package main
import (
"fmt"
"io"
"net/http"
"strings"
"sync"
"github.com/mattermost/mattermost-server/v6/plugin"
"github.com/mattermost/mattermost-server/v6/shared/mlog"
)
// Plugin implements the interface expected by the Mattermost server to communicate between the server and plugin processes.
type Plugin struct {
plugin.MattermostPlugin
// configurationLock synchronizes access to the configuration.
configurationLock sync.RWMutex
// configuration is the active plugin configuration. Consult getConfiguration and
// setConfiguration for usage.
configuration *configuration
}
// ServeHTTP demonstrates a plugin that handles HTTP requests by greeting the world.
func (p *Plugin) ServeHTTP(c *plugin.Context, w http.ResponseWriter, r *http.Request) {
if strings.HasPrefix(r.URL.Path, "/page") || strings.HasPrefix(r.URL.Path, "/plug/") {
p.httpProxy(w, r)
return
}
if r.URL.Path == "/global.plug.json" {
p.httpProxy(w, r)
return
}
r2, err := http.Get("http://localhost:8065")
if err != nil {
fmt.Println(err)
return
}
io.Copy(w, r2.Body)
// fmt.Fprint(w, "Hello, world!")
}
func (p *Plugin) httpProxy(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
mlog.Info(fmt.Sprintf("Got HTTP request: %s: %s Headers: %+v", r.Method, r.URL, r.Header))
req, err := http.NewRequest(r.Method, fmt.Sprintf("http://localhost:%d%s", 3000, r.URL), r.Body)
if err != nil {
http.Error(w, fmt.Sprintf("Proxy error: %s", err), http.StatusInternalServerError)
return
}
req.Header = r.Header
res, err := http.DefaultClient.Do(req)
if err != nil {
http.Error(w, fmt.Sprintf("Proxy error: %s", err), http.StatusInternalServerError)
return
}
for k, vs := range res.Header {
for _, v := range vs {
w.Header().Add(k, v)
}
}
w.WriteHeader(res.StatusCode)
_, err = io.Copy(w, res.Body)
if err != nil {
mlog.Error("Error proxying", mlog.Err(err))
}
res.Body.Close()
}
// See https://developers.mattermost.com/extend/plugins/server/reference/

View File

@ -1,28 +0,0 @@
package main
import (
"io/ioutil"
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
)
func TestServeHTTP(t *testing.T) {
assert := assert.New(t)
plugin := Plugin{}
w := httptest.NewRecorder()
r := httptest.NewRequest(http.MethodGet, "/", nil)
plugin.ServeHTTP(nil, w, r)
result := w.Result()
assert.NotNil(result)
defer result.Body.Close()
bodyBytes, err := ioutil.ReadAll(result.Body)
assert.Nil(err)
bodyString := string(bodyBytes)
assert.Equal("Hello, world!", bodyString)
}

View File

@ -1,3 +0,0 @@
.eslintcache
junit.xml
node_modules

View File

@ -1 +0,0 @@
{}

View File

@ -1,69 +0,0 @@
{
"name": "silverbullet-mattermost-plugin",
"private": true,
"author": {
"name": "Zef Hemel",
"email": "zef@zef.me"
},
"license": "MIT",
"scripts": {
"watch": "rm -rf .parcel-cache && parcel watch",
"build": "parcel build",
"clean": "rm -rf dist"
},
"targets": {
"plugin": {
"source": [
"src/index.tsx"
],
"distDir": "dist",
"context": "browser"
}
},
"alias": {
"react": "./src/hack.js",
"react/jsx-runtime": "react/jsx-runtime"
},
"dependencies": {
"@codemirror/autocomplete": "^6.0.1",
"@codemirror/commands": "^6.0.0",
"@codemirror/lang-javascript": "^6.0.0",
"@codemirror/lang-markdown": "^6.0.0",
"@codemirror/language": "^6.0.0",
"@codemirror/legacy-modes": "^6.0.0",
"@codemirror/search": "^6.0.0",
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.0.0",
"@fortawesome/fontawesome-svg-core": "1.3.0",
"@fortawesome/free-solid-svg-icons": "6.0.0",
"@fortawesome/react-fontawesome": "0.1.17",
"@jest/globals": "^27.5.1",
"@lezer/highlight": "1.0.0",
"@lezer/markdown": "1.0.1",
"fake-indexeddb": "^3.1.7",
"fuzzysort": "^1.9.0",
"jest": "^27.5.1",
"knex": "^1.0.4",
"react": "17.0.2",
"react-dom": "17.0.2"
},
"devDependencies": {
"@parcel/packager-raw-url": "2.5.0",
"@parcel/service-worker": "2.5.0",
"@parcel/transformer-inline-string": "2.5.0",
"@parcel/transformer-sass": "2.5.0",
"@parcel/transformer-webmanifest": "2.5.0",
"@parcel/validator-typescript": "2.5.0",
"@types/cors": "^2.8.12",
"@types/events": "^3.0.0",
"@types/jest": "^27.4.1",
"@types/react": "17.0.2",
"@types/react-dom": "17.0.2",
"assert": "^2.0.0",
"nodemon": "^2.0.18",
"parcel": "2.5.0",
"prettier": "^2.5.1",
"typescript": "^4.6.2"
},
"version": "0.0.34"
}

View File

@ -1,3 +0,0 @@
export default window.React;
const { useEffect, useRef } = window.React;
export { useEffect, useRef };

View File

@ -1,63 +0,0 @@
import manifest from "./manifest";
import React, { useEffect, useRef } from "react";
import { Editor } from "@silverbulletmd/web/editor";
import { HttpSpacePrimitives } from "@silverbulletmd/common/spaces/http_space_primitives";
import { safeRun } from "@plugos/plugos/util";
import { Space } from "@silverbulletmd/common/spaces/space";
import "../../../packages/web/styles/main.scss";
import "./styles.scss";
function loadSheet(file: string) {
var sbCSS = document.createElement("link");
sbCSS.rel = "stylesheet";
sbCSS.type = "text/css";
sbCSS.href = `/static/plugins/silverbullet/${file}`;
document.getElementsByTagName("head")[0].appendChild(sbCSS);
}
const MainApp = (): React.ReactElement => {
let ref = useRef<HTMLDivElement>(null);
useEffect(() => {
loadSheet("index.css");
safeRun(async () => {
let httpPrimitives = new HttpSpacePrimitives("/plugins/silverbullet", "");
const editor = new Editor(
new Space(httpPrimitives, true),
ref.current!,
"/plugins/silverbullet"
);
await editor.init();
});
}, []);
return (
<div id="sb-root" ref={ref}>
This is Silver Bullet
</div>
);
};
export default class Plugin {
// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function
public async initialize(registry: any, store: any) {
// @see https://developers.mattermost.com/extend/plugins/webapp/reference/
console.log("SUP YALL SILVER BULLET!!!");
registry.registerProduct(
"/plugins/silverbullet",
"product-boards",
"Silver Bullet",
"/plugins/silverbullet",
MainApp
);
}
}
declare global {
interface Window {
registerPlugin(id: string, plugin: Plugin): void;
}
}
window.registerPlugin(manifest.id, new Plugin());

View File

@ -1,5 +0,0 @@
import manifest from '../../plugin.json';
export default manifest;
export const id = manifest.id;
export const version = manifest.version;

View File

@ -1,17 +0,0 @@
h1,
h2,
h3,
h4,
h5,
h6,
.h1,
.h2,
.h3,
.h4,
.h5,
.h6 {
font-family: revert;
font-weight: revert;
line-height: revert;
color: revert;
}

17831
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,35 +0,0 @@
{
"name": "silverbulletmd",
"private": true,
"license": "MIT",
"scripts": {
"watch": "rm -rf .parcel-cache && parcel watch --no-hmr packages/{web,server,plugos,plugs}",
"clean": "rm -rf .parcel-cache packages/*/dist",
"nuke": "rm -rf node_modules && npm run clean",
"build": "parcel build packages/{web,server,plugos}",
"plugs": "cd packages/plugs && npm run watch",
"build-plugs": "cd packages/plugs && npm run build",
"server": "nodemon -w packages/server/dist --exec silverbullet",
"test": "jest packages/*/{dist,build}/test",
"clean-build": "npm run clean && npm run build && npm i && npm run build-plugs",
"publish-all": "npm publish --access public --ws"
},
"devDependencies": {
"@parcel/core": "2.5.0",
"@parcel/packager-raw-url": "2.5.0",
"@parcel/service-worker": "2.5.0",
"@parcel/transformer-inline-string": "2.5.0",
"@parcel/transformer-sass": "2.5.0",
"@parcel/transformer-webmanifest": "2.5.0",
"@parcel/validator-typescript": "2.5.0",
"nodemon": "^2.0.19",
"parcel": "2.5.0",
"prettier": "^2.7.1",
"process": "^0.11.10",
"ts-node": "^10.9.1",
"typescript": "^4.8.3"
},
"workspaces": [
"packages/*"
]
}

View File

@ -1,27 +0,0 @@
import * as plugos from "@plugos/plugos/types";
import { EndpointHookT } from "@plugos/plugos/hooks/endpoint";
import { CronHookT } from "@plugos/plugos/hooks/node_cron";
import { EventHookT } from "@plugos/plugos/hooks/event";
import { CommandHookT } from "@silverbulletmd/web/hooks/command";
import { SlashCommandHookT } from "@silverbulletmd/web/hooks/slash_command";
import { PageNamespaceHookT } from "../server/hooks/page_namespace";
export type SilverBulletHooks = CommandHookT &
SlashCommandHookT &
EndpointHookT &
CronHookT &
EventHookT &
PageNamespaceHookT;
export type SyntaxExtensions = {
syntax?: { [key: string]: NodeDef };
};
export type NodeDef = {
firstCharacters: string[];
regex: string;
styles: { [key: string]: string };
className?: string;
};
export type Manifest = plugos.Manifest<SilverBulletHooks> & SyntaxExtensions;

View File

@ -1,353 +0,0 @@
// Changes made to this file:
// - ignore language facet stuff, always assume markdown
import {
StateCommand,
Text,
EditorSelection,
ChangeSpec,
} from "@codemirror/state";
import { syntaxTree } from "@codemirror/language";
import { SyntaxNode, Tree } from "@lezer/common";
import { markdownLanguage } from "./markdown";
function nodeStart(node: SyntaxNode, doc: Text) {
return doc.sliceString(node.from, node.from + 50);
}
class Context {
constructor(
readonly node: SyntaxNode,
readonly from: number,
readonly to: number,
readonly spaceBefore: string,
readonly spaceAfter: string,
readonly type: string,
readonly item: SyntaxNode | null
) {}
blank(trailing: boolean = true) {
let result = this.spaceBefore;
if (this.node.name == "Blockquote") result += ">";
else
for (
let i = this.to - this.from - result.length - this.spaceAfter.length;
i > 0;
i--
)
result += " ";
return result + (trailing ? this.spaceAfter : "");
}
marker(doc: Text, add: number) {
let number =
this.node.name == "OrderedList"
? String(+itemNumber(this.item!, doc)[2] + add)
: "";
return this.spaceBefore + number + this.type + this.spaceAfter;
}
}
function getContext(node: SyntaxNode, line: string, doc: Text) {
let nodes = [];
for (
let cur: SyntaxNode | null = node;
cur && cur.name != "Document";
cur = cur.parent
) {
if (cur.name == "ListItem" || cur.name == "Blockquote") nodes.push(cur);
}
let context = [],
pos = 0;
for (let i = nodes.length - 1; i >= 0; i--) {
let node = nodes[i],
match,
start = pos;
if (
node.name == "Blockquote" &&
(match = /^[ \t]*>( ?)/.exec(line.slice(pos)))
) {
pos += match[0].length;
context.push(new Context(node, start, pos, "", match[1], ">", null));
} else if (
node.name == "ListItem" &&
node.parent!.name == "OrderedList" &&
(match = /^([ \t]*)\d+([.)])([ \t]*)/.exec(nodeStart(node, doc)))
) {
let after = match[3],
len = match[0].length;
if (after.length >= 4) {
after = after.slice(0, after.length - 4);
len -= 4;
}
pos += len;
context.push(
new Context(node.parent!, start, pos, match[1], after, match[2], node)
);
} else if (
node.name == "ListItem" &&
node.parent!.name == "BulletList" &&
(match = /^([ \t]*)([-+*])([ \t]{1,4}\[[ xX]\])?([ \t]+)/.exec(
nodeStart(node, doc)
))
) {
let after = match[4],
len = match[0].length;
if (after.length > 4) {
after = after.slice(0, after.length - 4);
len -= 4;
}
let type = match[2];
if (match[3]) type += match[3].replace(/[xX]/, " ");
pos += len;
context.push(
new Context(node.parent!, start, pos, match[1], after, type, node)
);
}
}
return context;
}
function itemNumber(item: SyntaxNode, doc: Text) {
return /^(\s*)(\d+)(?=[.)])/.exec(
doc.sliceString(item.from, item.from + 10)
)!;
}
function renumberList(
after: SyntaxNode,
doc: Text,
changes: ChangeSpec[],
offset = 0
) {
for (let prev = -1, node = after; ; ) {
if (node.name == "ListItem") {
let m = itemNumber(node, doc);
let number = +m[2];
if (prev >= 0) {
if (number != prev + 1) return;
changes.push({
from: node.from + m[1].length,
to: node.from + m[0].length,
insert: String(prev + 2 + offset),
});
}
prev = number;
}
let next = node.nextSibling;
if (!next) break;
node = next;
}
}
/// This command, when invoked in Markdown context with cursor
/// selection(s), will create a new line with the markup for
/// blockquotes and lists that were active on the old line. If the
/// cursor was directly after the end of the markup for the old line,
/// trailing whitespace and list markers are removed from that line.
///
/// The command does nothing in non-Markdown context, so it should
/// not be used as the only binding for Enter (even in a Markdown
/// document, HTML and code regions might use a different language).
export const insertNewlineContinueMarkup: StateCommand = ({
state,
dispatch,
}) => {
let tree = syntaxTree(state),
{ doc } = state;
let dont = null,
changes = state.changeByRange((range) => {
if (!range.empty)
// TODO: Hack due to languagefacet stuff not working
// || !markdownLanguage.isActiveAt(state, range.from))
return (dont = { range });
let pos = range.from,
line = doc.lineAt(pos);
let context = getContext(tree.resolveInner(pos, -1), line.text, doc);
while (
context.length &&
context[context.length - 1].from > pos - line.from
)
context.pop();
if (!context.length) return (dont = { range });
let inner = context[context.length - 1];
if (inner.to - inner.spaceAfter.length > pos - line.from)
return (dont = { range });
let emptyLine =
pos >= inner.to - inner.spaceAfter.length &&
!/\S/.test(line.text.slice(inner.to));
// Empty line in list
if (inner.item && emptyLine) {
// First list item or blank line before: delete a level of markup
if (
inner.node.firstChild!.to >= pos ||
(line.from > 0 && !/[^\s>]/.test(doc.lineAt(line.from - 1).text))
) {
let next = context.length > 1 ? context[context.length - 2] : null;
let delTo,
insert = "";
if (next && next.item) {
// Re-add marker for the list at the next level
delTo = line.from + next.from;
insert = next.marker(doc, 1);
} else {
delTo = line.from + (next ? next.to : 0);
}
let changes: ChangeSpec[] = [{ from: delTo, to: pos, insert }];
if (inner.node.name == "OrderedList")
renumberList(inner.item!, doc, changes, -2);
if (next && next.node.name == "OrderedList")
renumberList(next.item!, doc, changes);
return {
range: EditorSelection.cursor(delTo + insert.length),
changes,
};
} else {
// Move this line down
let insert = "";
for (let i = 0, e = context.length - 2; i <= e; i++)
insert += context[i].blank(i < e);
insert += state.lineBreak;
return {
range: EditorSelection.cursor(pos + insert.length),
changes: { from: line.from, insert },
};
}
}
if (inner.node.name == "Blockquote" && emptyLine && line.from) {
let prevLine = doc.lineAt(line.from - 1),
quoted = />\s*$/.exec(prevLine.text);
// Two aligned empty quoted lines in a row
if (quoted && quoted.index == inner.from) {
let changes = state.changes([
{ from: prevLine.from + quoted.index, to: prevLine.to },
{ from: line.from + inner.from, to: line.to },
]);
return { range: range.map(changes), changes };
}
}
let changes: ChangeSpec[] = [];
if (inner.node.name == "OrderedList")
renumberList(inner.item!, doc, changes);
let insert = state.lineBreak;
let continued = inner.item && inner.item.from < line.from;
// If not dedented
if (
!continued ||
/^[\s\d.)\-+*>]*/.exec(line.text)![0].length >= inner.to
) {
for (let i = 0, e = context.length - 1; i <= e; i++)
insert +=
i == e && !continued
? context[i].marker(doc, 1)
: context[i].blank();
}
let from = pos;
while (
from > line.from &&
/\s/.test(line.text.charAt(from - line.from - 1))
)
from--;
changes.push({ from, to: pos, insert });
return { range: EditorSelection.cursor(from + insert.length), changes };
});
if (dont) return false;
dispatch(state.update(changes, { scrollIntoView: true, userEvent: "input" }));
return true;
};
function isMark(node: SyntaxNode) {
return node.name == "QuoteMark" || node.name == "ListMark";
}
function contextNodeForDelete(tree: Tree, pos: number) {
let node = tree.resolveInner(pos, -1),
scan = pos;
if (isMark(node)) {
scan = node.from;
node = node.parent!;
}
for (let prev; (prev = node.childBefore(scan)); ) {
if (isMark(prev)) {
scan = prev.from;
} else if (prev.name == "OrderedList" || prev.name == "BulletList") {
node = prev.lastChild!;
scan = node.to;
} else {
break;
}
}
return node;
}
/// This command will, when invoked in a Markdown context with the
/// cursor directly after list or blockquote markup, delete one level
/// of markup. When the markup is for a list, it will be replaced by
/// spaces on the first invocation (a further invocation will delete
/// the spaces), to make it easy to continue a list.
///
/// When not after Markdown block markup, this command will return
/// false, so it is intended to be bound alongside other deletion
/// commands, with a higher precedence than the more generic commands.
export const deleteMarkupBackward: StateCommand = ({ state, dispatch }) => {
let tree = syntaxTree(state);
let dont = null,
changes = state.changeByRange((range) => {
let pos = range.from,
{ doc } = state;
if (range.empty && markdownLanguage.isActiveAt(state, range.from)) {
let line = doc.lineAt(pos);
let context = getContext(
contextNodeForDelete(tree, pos),
line.text,
doc
);
if (context.length) {
let inner = context[context.length - 1];
let spaceEnd =
inner.to - inner.spaceAfter.length + (inner.spaceAfter ? 1 : 0);
// Delete extra trailing space after markup
if (
pos - line.from > spaceEnd &&
!/\S/.test(line.text.slice(spaceEnd, pos - line.from))
)
return {
range: EditorSelection.cursor(line.from + spaceEnd),
changes: { from: line.from + spaceEnd, to: pos },
};
if (pos - line.from == spaceEnd) {
let start = line.from + inner.from;
// Replace a list item marker with blank space
if (
inner.item &&
inner.node.from < inner.item.from &&
/\S/.test(line.text.slice(inner.from, inner.to))
)
return {
range,
changes: {
from: start,
to: line.from + inner.to,
insert: inner.blank(),
},
};
// Delete one level of indentation
if (start < pos)
return {
range: EditorSelection.cursor(start),
changes: { from: start, to: pos },
};
}
}
}
return (dont = { range });
});
if (dont) return false;
dispatch(
state.update(changes, { scrollIntoView: true, userEvent: "delete" })
);
return true;
};

View File

@ -1,94 +0,0 @@
// Local changes made to this file:
// * Disable HTML tags
import { Prec } from "@codemirror/state";
import { KeyBinding, keymap } from "@codemirror/view";
import {
Language,
LanguageSupport,
LanguageDescription,
} from "@codemirror/language";
import { MarkdownExtension, MarkdownParser, parseCode } from "@lezer/markdown";
// import { html } from "@codemirror/lang-html";
import {
commonmarkLanguage,
markdownLanguage,
mkLang,
getCodeParser,
} from "./markdown";
import { insertNewlineContinueMarkup, deleteMarkupBackward } from "./commands";
export {
commonmarkLanguage,
markdownLanguage,
insertNewlineContinueMarkup,
deleteMarkupBackward,
};
/// A small keymap with Markdown-specific bindings. Binds Enter to
/// [`insertNewlineContinueMarkup`](#lang-markdown.insertNewlineContinueMarkup)
/// and Backspace to
/// [`deleteMarkupBackward`](#lang-markdown.deleteMarkupBackward).
export const markdownKeymap: readonly KeyBinding[] = [
{ key: "Enter", run: insertNewlineContinueMarkup },
{ key: "Backspace", run: deleteMarkupBackward },
];
// const htmlNoMatch = html({ matchClosingTags: false });
/// Markdown language support.
export function markdown(
config: {
/// When given, this language will be used by default to parse code
/// blocks.
defaultCodeLanguage?: Language | LanguageSupport;
/// A source of language support for highlighting fenced code
/// blocks. When it is an array, the parser will use
/// [`LanguageDescription.matchLanguageName`](#language.LanguageDescription^matchLanguageName)
/// with the fenced code info to find a matching language. When it
/// is a function, will be called with the info string and may
/// return a language or `LanguageDescription` object.
codeLanguages?:
| readonly LanguageDescription[]
| ((info: string) => Language | LanguageDescription | null);
/// Set this to false to disable installation of the Markdown
/// [keymap](#lang-markdown.markdownKeymap).
addKeymap?: boolean;
/// Markdown parser
/// [extensions](https://github.com/lezer-parser/markdown#user-content-markdownextension)
/// to add to the parser.
extensions?: MarkdownExtension;
/// The base language to use. Defaults to
/// [`commonmarkLanguage`](#lang-markdown.commonmarkLanguage).
base?: Language;
} = {}
) {
let {
codeLanguages,
defaultCodeLanguage,
addKeymap = true,
base: { parser } = commonmarkLanguage,
} = config;
if (!(parser instanceof MarkdownParser))
throw new RangeError(
"Base parser provided to `markdown` should be a Markdown parser"
);
let extensions = config.extensions ? [config.extensions] : [];
// let support = [htmlNoMatch.support],
let support = [],
defaultCode;
if (defaultCodeLanguage instanceof LanguageSupport) {
support.push(defaultCodeLanguage.support);
defaultCode = defaultCodeLanguage.language;
} else if (defaultCodeLanguage) {
defaultCode = defaultCodeLanguage;
}
let codeParser =
codeLanguages || defaultCode
? getCodeParser(codeLanguages, defaultCode)
: undefined;
extensions.push(
parseCode({ codeParser }) //, htmlParser: htmlNoMatch.language.parser })
);
if (addKeymap) support.push(Prec.high(keymap.of(markdownKeymap)));
return new LanguageSupport(mkLang(parser.configure(extensions)), support);
}

View File

@ -1,72 +0,0 @@
import {
Language,
defineLanguageFacet,
languageDataProp,
foldNodeProp,
indentNodeProp,
LanguageDescription,
ParseContext,
} from "@codemirror/language";
import {
parser as baseParser,
MarkdownParser,
GFM,
Subscript,
Superscript,
Emoji,
} from "@lezer/markdown";
const data = defineLanguageFacet({ block: { open: "<!--", close: "-->" } });
export const commonmark = baseParser.configure({
props: [
foldNodeProp.add((type) => {
if (!type.is("Block") || type.is("Document")) return undefined;
return (tree, state) => ({
from: state.doc.lineAt(tree.from).to,
to: tree.to,
});
}),
indentNodeProp.add({
Document: () => null,
}),
languageDataProp.add({
Document: data,
}),
],
});
export function mkLang(parser: MarkdownParser) {
return new Language(data, parser);
}
/// Language support for strict CommonMark.
export const commonmarkLanguage = mkLang(commonmark);
const extended = commonmark.configure([GFM, Subscript, Superscript, Emoji]);
/// Language support for [GFM](https://github.github.com/gfm/) plus
/// subscript, superscript, and emoji syntax.
export const markdownLanguage = mkLang(extended);
export function getCodeParser(
languages:
| readonly LanguageDescription[]
| ((info: string) => Language | LanguageDescription | null)
| undefined,
defaultLanguage?: Language
) {
return (info: string) => {
if (info && languages) {
let found = null;
if (typeof languages == "function") found = languages(info);
else found = LanguageDescription.matchLanguageName(languages, info, true);
if (found instanceof LanguageDescription)
return found.support
? found.support.language.parser
: ParseContext.getSkippingParser(found.load());
else if (found) return found.parser;
}
return defaultLanguage ? defaultLanguage.parser : null;
};
}

View File

@ -1,28 +0,0 @@
{
"name": "@silverbulletmd/common",
"author": {
"name": "Zef Hemel",
"email": "zef@zef.me"
},
"version": "0.0.35",
"license": "MIT",
"dependencies": {
"@codemirror/autocomplete": "^6.1.1",
"@codemirror/commands": "^6.1.0",
"@codemirror/lang-javascript": "^6.0.2",
"@codemirror/lang-markdown": "^6.0.1",
"@codemirror/language": "^6.2.1",
"@codemirror/legacy-modes": "^6.1.0",
"@codemirror/search": "^6.2.0",
"@codemirror/state": "^6.1.1",
"@codemirror/view": "^6.2.3",
"@lezer/common": "^1.0.1",
"@lezer/highlight": "^1.0.0",
"@lezer/markdown": "^1.0.1",
"mime-types": "^2.1.35",
"yaml": "^1.10.2"
},
"devDependencies": {
"@types/mime-types": "^2.1.1"
}
}

View File

@ -1,12 +0,0 @@
import { SysCallMapping } from "@plugos/plugos/system";
import { parse } from "../parse_tree";
import { Language } from "@codemirror/language";
import type { ParseTree } from "../tree";
export function markdownSyscalls(lang: Language): SysCallMapping {
return {
"markdown.parseMarkdown": (ctx, text: string): ParseTree => {
return parse(lang, text);
},
};
}

View File

@ -1,13 +0,0 @@
import { syscall } from "./syscall";
export async function set(key: string, value: any): Promise<void> {
return syscall("clientStore.set", key, value);
}
export async function get(key: string): Promise<any> {
return syscall("clientStore.get", key);
}
export async function del(key: string): Promise<void> {
return syscall("clientStore.delete", key);
}

View File

@ -1,7 +0,0 @@
import { syscall } from "./syscall";
import type { ParseTree } from "../common/tree";
export async function parseMarkdown(text: string): Promise<ParseTree> {
return syscall("markdown.parseMarkdown", text);
}

View File

@ -1,9 +0,0 @@
{
"name": "@silverbulletmd/plugos-silverbullet-syscall",
"author": {
"name": "Zef Hemel",
"email": "zef@zef.me"
},
"version": "0.0.35",
"license": "MIT"
}

View File

@ -1,5 +0,0 @@
import type { LogEntry } from "@plugos/plugos/sandbox";
export async function getServerLogs(): Promise<LogEntry[]> {
return syscall("sandbox.getServerLogs");
}

View File

@ -1,54 +0,0 @@
import { syscall } from "./syscall";
import { AttachmentMeta, PageMeta } from "../common/types";
export async function listPages(unfiltered = false): Promise<PageMeta[]> {
return syscall("space.listPages", unfiltered);
}
export async function getPageMeta(name: string): Promise<PageMeta> {
return syscall("space.getPageMeta", name);
}
export async function readPage(
name: string
): Promise<{ text: string; meta: PageMeta }> {
return syscall("space.readPage", name);
}
export async function writePage(name: string, text: string): Promise<PageMeta> {
return syscall("space.writePage", name, text);
}
export async function deletePage(name: string): Promise<void> {
return syscall("space.deletePage", name);
}
export async function listPlugs(): Promise<string[]> {
return syscall("space.listPlugs");
}
export async function listAttachments(): Promise<PageMeta[]> {
return syscall("space.listAttachments");
}
export async function getAttachmentMeta(name: string): Promise<AttachmentMeta> {
return syscall("space.getAttachmentMeta", name);
}
export async function readAttachment(
name: string
): Promise<{ data: string; meta: AttachmentMeta }> {
return syscall("space.readAttachment", name);
}
export async function writeAttachment(
name: string,
encoding: "string" | "dataurl",
data: string
): Promise<AttachmentMeta> {
return syscall("space.writeAttachment", name, encoding, data);
}
export async function deleteAttachment(name: string): Promise<void> {
return syscall("space.deleteAttachment", name);
}

View File

@ -1,13 +0,0 @@
import { syscall } from "./syscall";
export async function fullTextIndex(key: string, value: string) {
return syscall("fulltext.index", key, value);
}
export async function fullTextDelete(key: string) {
return syscall("fulltext.delete", key);
}
export async function fullTextSearch(phrase: string, limit: number = 100) {
return syscall("fulltext.search", phrase, limit);
}

View File

@ -1,9 +0,0 @@
{
"name": "@plugos/plugos-syscall",
"author": {
"name": "Zef Hemel",
"email": "zef@zef.me"
},
"version": "0.0.35",
"license": "MIT"
}

View File

@ -1,5 +0,0 @@
import type { LogEntry } from "@plugos/plugos/sandbox";
export async function getLogs(): Promise<LogEntry[]> {
return syscall("sandbox.getLogs");
}

View File

@ -1,139 +0,0 @@
#!/usr/bin/env node
import { readFile, watch, writeFile } from "fs/promises";
import path from "path";
import yargs from "yargs";
import { hideBin } from "yargs/helpers";
import { Manifest } from "../types";
import YAML from "yaml";
import { mkdirSync } from "fs";
import { compile, sandboxCompileModule } from "../compile";
async function bundle(
manifestPath: string,
debug: boolean,
excludeModules: string[]
) {
const rootPath = path.dirname(manifestPath);
const manifest = YAML.parse(
(await readFile(manifestPath)).toString()
) as Manifest<any>;
if (!manifest.name) {
throw new Error(`Missing 'name' in ${manifestPath}`);
}
let allModulesToExclude = excludeModules.slice();
for (let [name, moduleSpec] of Object.entries(manifest.dependencies || {})) {
manifest.dependencies![name] = await sandboxCompileModule(moduleSpec);
allModulesToExclude.push(name);
}
for (let [name, def] of Object.entries(manifest.functions || {})) {
let jsFunctionName = "default",
filePath = path.join(rootPath, def.path!);
if (filePath.indexOf(":") !== -1) {
[filePath, jsFunctionName] = filePath.split(":");
}
def.code = await compile(
filePath,
jsFunctionName,
debug,
allModulesToExclude,
false
);
delete def.path;
}
return manifest;
}
async function buildManifest(
manifestPath: string,
distPath: string,
debug: boolean,
excludeModules: string[]
) {
let generatedManifest = await bundle(manifestPath, debug, excludeModules);
const outFile =
manifestPath.substring(
0,
manifestPath.length - path.extname(manifestPath).length
) + ".json";
const outPath = path.join(distPath, path.basename(outFile));
console.log("Emitting bundle to", outPath);
await writeFile(outPath, JSON.stringify(generatedManifest, null, 2));
return { generatedManifest, outPath };
}
async function run() {
let args = yargs(hideBin(process.argv))
.option("debug", {
type: "boolean",
})
.option("watch", {
type: "boolean",
alias: "w",
})
.option("dist", {
type: "string",
default: ".",
})
.option("exclude", {
type: "array",
default: [],
})
.parse();
if (args._.length === 0) {
console.log(
"Usage: plugos-bundle [--debug] [--dist <path>] [--exclude package1 package2] -- <manifest.plug.yaml> <manifest2.plug.yaml> ..."
);
process.exit(1);
}
// console.log("Args", args);
async function buildAll() {
mkdirSync(args.dist, { recursive: true });
for (const plugManifestPath of args._) {
let manifestPath = plugManifestPath as string;
try {
await buildManifest(
manifestPath,
args.dist,
!!args.debug,
args.exclude
);
} catch (e) {
console.error(`Error building ${manifestPath}:`, e);
}
}
}
await buildAll();
if (args.watch) {
console.log("Watching for changes...");
for await (const { eventType, filename } of watch(".", {
recursive: true,
})) {
if (
filename.endsWith(".plug.yaml") ||
filename.endsWith(".js") ||
filename.endsWith(".css") ||
filename.endsWith(".png") ||
filename.endsWith(".jpg") ||
filename.endsWith(".gif") ||
(filename.endsWith(".ts") && !filename.endsWith("_in.ts"))
) {
console.log("Change detected", eventType, filename);
await buildAll();
}
}
}
}
run().catch((e) => {
console.error(e);
process.exit(1);
});

View File

@ -1,147 +0,0 @@
import esbuild from "esbuild";
import { mkdir, readFile, rm, symlink, unlink, writeFile } from "fs/promises";
import path from "path";
import { tmpdir } from "os";
import { nodeModulesDir } from "./environments/node_sandbox";
import { promisify } from "util";
import { execFile } from "child_process";
const execFilePromise = promisify(execFile);
export async function compile(
filePath: string,
functionName: string | undefined = undefined,
debug: boolean = false,
excludeModules: string[] = [],
meta = false
): Promise<string> {
let outFile = path.resolve(path.dirname(filePath), "_out.tmp");
let inFile = filePath;
if (functionName) {
// Generate a new file importing just this one function and exporting it
inFile = path.resolve(path.dirname(filePath), "_in.ts");
await writeFile(
inFile,
`import {${functionName}} from "./${path.basename(
filePath
)}";export default ${functionName};`
);
}
try {
// TODO: Figure out how to make source maps work correctly with eval() code
let result = await esbuild.build({
entryPoints: [path.basename(inFile)],
bundle: true,
format: "iife",
globalName: "mod",
platform: "browser",
sourcemap: false, //debug ? "inline" : false,
minify: !debug,
outfile: outFile,
metafile: true,
external: excludeModules,
loader: {
".css": "text",
".md": "text",
".txt": "text",
".html": "text",
".hbs": "text",
".png": "dataurl",
".gif": "dataurl",
".jpg": "dataurl",
},
absWorkingDir: path.resolve(path.dirname(inFile)),
});
if (meta) {
let text = await esbuild.analyzeMetafile(result.metafile);
console.log("Bundle info for", functionName, text);
}
let jsCode = (await readFile(outFile)).toString();
await unlink(outFile);
return `(() => { ${jsCode} return mod;})()`;
} finally {
if (inFile !== filePath) {
await unlink(inFile);
}
}
}
export async function compileModule(
cwd: string,
moduleName: string
): Promise<string> {
let inFile = path.resolve(cwd, "_in.ts");
await writeFile(inFile, `export * from "${moduleName}";`);
let code = await compile(inFile);
await unlink(inFile);
return code;
}
// TODO: Reconsider this later
const exposedModules = [
"@silverbulletmd/plugos-silverbullet-syscall",
"@plugos/plugos-syscall",
];
export async function sandboxCompile(
filename: string,
code: string,
functionName?: string,
debug: boolean = false,
installModules: string[] = [],
globalModules: string[] = []
): Promise<string> {
let tmpDir = `${tmpdir()}/plugos-${Math.random()}`;
await mkdir(tmpDir, { recursive: true });
const srcNodeModules = `${nodeModulesDir}/node_modules`;
const targetNodeModules = `${tmpDir}/node_modules`;
await mkdir(`${targetNodeModules}/@silverbulletmd`, { recursive: true });
await mkdir(`${targetNodeModules}/@plugos`, { recursive: true });
for (const exposedModule of exposedModules) {
await symlink(
`${srcNodeModules}/${exposedModule}`,
`${targetNodeModules}/${exposedModule}`,
"dir"
);
}
for (let moduleName of installModules) {
await execFilePromise("npm", ["install", moduleName], {
cwd: tmpDir,
});
}
await writeFile(`${tmpDir}/${filename}`, code);
let jsCode = await compile(
`${tmpDir}/${filename}`,
functionName,
debug,
globalModules
);
await rm(tmpDir, { recursive: true });
return jsCode;
}
export async function sandboxCompileModule(
moduleName: string,
globalModules: string[] = []
): Promise<string> {
let [modulePart, path] = moduleName.split(":");
let modulePieces = modulePart.split("@");
let cleanModulesName = modulePieces
.slice(0, modulePieces.length - 1)
.join("@");
return sandboxCompile(
"module.ts",
// `export * from "${cleanModulesName}${path ? path : ""}";`,
`module.exports = require("${cleanModulesName}${path ? path : ""}");`,
undefined,
true,
[modulePart],
globalModules
);
}

View File

@ -1,8 +0,0 @@
<html>
<body>
<script type="module">
// Sup yo!
import "./sandbox_worker";
</script>
</body>
</html>

View File

@ -1,55 +0,0 @@
import { safeRun } from "../util";
// @ts-ignore
import sandboxHtml from "bundle-text:./iframe_sandbox.html";
import { Sandbox } from "../sandbox";
import { WorkerLike } from "./worker";
import { Plug } from "../plug";
class IFrameWrapper implements WorkerLike {
private iframe: HTMLIFrameElement;
onMessage?: (message: any) => Promise<void>;
ready: Promise<void>;
private messageListener: (evt: any) => void;
constructor() {
const iframe = document.createElement("iframe", {});
this.iframe = iframe;
iframe.style.display = "none";
// Let's lock this down significantly
iframe.setAttribute("sandbox", "allow-scripts");
iframe.srcdoc = sandboxHtml;
this.messageListener = (evt: any) => {
if (evt.source !== iframe.contentWindow) {
return;
}
let data = evt.data;
if (!data) return;
safeRun(async () => {
await this.onMessage!(data);
});
};
window.addEventListener("message", this.messageListener);
document.body.appendChild(iframe);
this.ready = new Promise((resolve) => {
iframe.onload = () => {
resolve();
iframe.onload = null;
};
});
}
postMessage(message: any): void {
this.iframe.contentWindow!.postMessage(message, "*");
}
terminate() {
console.log("Terminating iframe sandbox");
window.removeEventListener("message", this.messageListener);
return this.iframe.remove();
}
}
export function createSandbox(plug: Plug<any>) {
return new Sandbox(plug, new IFrameWrapper());
}

View File

@ -1,56 +0,0 @@
import { Worker } from "worker_threads";
import { safeRun } from "../util";
// @ts-ignore
import workerCode from "bundle-text:./node_worker.ts";
import { Sandbox } from "../sandbox";
import { WorkerLike } from "./worker";
import { Plug } from "../plug";
import path from "path";
import fs from "fs";
class NodeWorkerWrapper implements WorkerLike {
onMessage?: (message: any) => Promise<void>;
ready: Promise<void>;
private worker: Worker;
constructor(worker: Worker) {
this.worker = worker;
worker.on("message", (message: any) => {
safeRun(async () => {
await this.onMessage!(message);
});
});
this.ready = new Promise((resolve) => {
worker.once("online", resolve);
});
}
postMessage(message: any): void {
this.worker.postMessage(message);
}
terminate(): void {
this.worker.terminate();
}
}
// Look for the node_modules directory, to be passed to the worker to find e.g. the vm2 module
export let nodeModulesDir = __dirname;
while (
!fs.existsSync(nodeModulesDir + "/node_modules/vm2") &&
nodeModulesDir !== "/"
) {
nodeModulesDir = path.dirname(nodeModulesDir);
}
export function createSandbox(plug: Plug<any>) {
let worker = new Worker(workerCode, {
eval: true,
workerData: {
nodeModulesPath: path.join(nodeModulesDir, "node_modules"),
},
});
return new Sandbox(plug, new NodeWorkerWrapper(worker));
}

View File

@ -1,158 +0,0 @@
import { ConsoleLogger } from "./custom_logger";
const {
parentPort,
workerData: { nodeModulesPath },
} = require("worker_threads");
const { VM, VMScript } = require(`${nodeModulesPath}/vm2`);
let loadedFunctions = new Map<string, Function>();
let pendingRequests = new Map<
number,
{
resolve: (result: unknown) => void;
reject: (e: any) => void;
}
>();
let syscallReqId = 0;
let consoleLogger = new ConsoleLogger((level, message) => {
parentPort.postMessage({
type: "log",
level,
message,
});
}, false);
let loadedModules = new Map<string, any>();
// HACK to make Mattermost client work...
loadedModules.set("form-data", require(`${nodeModulesPath}/form-data`));
let vm = new VM({
sandbox: {
// Exposing some "safe" APIs
console: consoleLogger,
setTimeout,
clearTimeout,
setInterval,
URL,
clearInterval,
TextEncoder,
TextDecoder,
fetch: require(`${nodeModulesPath}/node-fetch`),
WebSocket: require(`${nodeModulesPath}/ws`),
// This is only going to be called for pre-bundled modules, we won't allow
// arbitrary requiring of modules
require: (moduleName: string): any => {
// console.log("Loading module", moduleName);
// if (preloadedModules.includes(moduleName)) {
// return require(`${nodeModulesPath}/${moduleName}`);
// } else
if (loadedModules.has(moduleName)) {
let mod = loadedModules.get(moduleName);
// console.log("And it has the value", mod);
return mod;
} else {
throw Error(`Cannot import arbitrary modules like ${moduleName}`);
}
},
self: {
syscall: (name: string, ...args: any[]) => {
return new Promise((resolve, reject) => {
syscallReqId++;
pendingRequests.set(syscallReqId, { resolve, reject });
parentPort.postMessage({
type: "syscall",
id: syscallReqId,
name,
// TODO: Figure out why this is necessary (to avoide a CloneError)
args: JSON.parse(JSON.stringify(args)),
});
});
},
},
},
});
function wrapScript(code: string) {
return `(${code})["default"]`;
}
function safeRun(fn: any) {
fn().catch((e: any) => {
console.error(e);
});
}
parentPort.on("message", (data: any) => {
safeRun(async () => {
switch (data.type) {
case "load":
loadedFunctions.set(data.name, new VMScript(wrapScript(data.code)));
parentPort.postMessage({
type: "inited",
name: data.name,
});
break;
case "load-dependency":
// console.log("Asked to load dep", data.name);
try {
let r = vm.run(data.code);
// console.log("Loaded dependency", r);
loadedModules.set(data.name, r);
parentPort.postMessage({
type: "dependency-inited",
name: data.name,
});
} catch (e: any) {
console.error("Could not load dependency", e.message);
}
break;
case "invoke":
let fn = loadedFunctions.get(data.name);
if (!fn) {
throw new Error(`Function not loaded: ${data.name}`);
}
try {
let r = vm.run(fn);
let result = await Promise.resolve(r(...data.args));
parentPort.postMessage({
type: "result",
id: data.id,
// TOOD: Figure out if this is necessary, because it's expensive
result: result && JSON.parse(JSON.stringify(result)),
});
} catch (e: any) {
// console.error("Error caught", e, "Stack", e.stack);
parentPort.postMessage({
type: "result",
id: data.id,
error: e.message,
stack: e.stack,
});
}
break;
case "syscall-response":
let syscallId = data.id;
const lookup = pendingRequests.get(syscallId);
if (!lookup) {
throw Error("Invalid request id");
}
pendingRequests.delete(syscallId);
if (data.error) {
// console.log("Got rejection", data.error);
lookup.reject(new Error(data.error));
} else {
lookup.resolve(data.result);
}
break;
}
});
});
process.on("uncaughtException", (e) => {
console.error("Uncaught error", e);
});

View File

@ -1,127 +0,0 @@
import { safeRun } from "../util";
import { ConsoleLogger } from "./custom_logger";
import { ControllerMessage, WorkerMessage } from "./worker";
let loadedFunctions = new Map<string, Function>();
let pendingRequests = new Map<
number,
{
resolve: (result: unknown) => void;
reject: (e: any) => void;
}
>();
function workerPostMessage(msg: ControllerMessage) {
if (typeof window !== "undefined" && window.parent !== window) {
window.parent.postMessage(msg, "*");
} else {
self.postMessage(msg);
}
}
declare global {
function syscall(name: string, ...args: any[]): Promise<any>;
// function require(moduleName: string): any;
}
let syscallReqId = 0;
self.syscall = async (name: string, ...args: any[]) => {
return await new Promise((resolve, reject) => {
syscallReqId++;
pendingRequests.set(syscallReqId, { resolve, reject });
workerPostMessage({
type: "syscall",
id: syscallReqId,
name,
args,
});
});
};
let loadedModules = new Map<string, any>();
// @ts-ignore
self.require = (moduleName: string): any => {
// console.log("Loading", moduleName, loadedModules.get(moduleName));
return loadedModules.get(moduleName);
};
// @ts-ignore
self.console = new ConsoleLogger((level, message) => {
workerPostMessage({ type: "log", level, message });
}, false);
function wrapScript(code: string) {
return `return (${code})["default"]`;
}
self.addEventListener("message", (event: { data: WorkerMessage }) => {
safeRun(async () => {
let data = event.data;
switch (data.type) {
case "load":
let fn2 = new Function(wrapScript(data.code!));
loadedFunctions.set(data.name!, fn2());
workerPostMessage({
type: "inited",
name: data.name,
});
break;
case "load-dependency":
// console.log("Received dep", data.name);
let fn3 = new Function(`return ${data.code!}`);
let v = fn3();
loadedModules.set(data.name!, v);
// console.log("Dep val", v);
workerPostMessage({
type: "dependency-inited",
name: data.name,
});
break;
case "invoke":
let fn = loadedFunctions.get(data.name!);
if (!fn) {
throw new Error(`Function not loaded: ${data.name}`);
}
try {
let result = await Promise.resolve(fn(...(data.args || [])));
workerPostMessage({
type: "result",
id: data.id,
result: result,
} as ControllerMessage);
} catch (e: any) {
workerPostMessage({
type: "result",
id: data.id,
error: e.message,
stack: e.stack,
});
// console.error("Error invoking function", data.name, e.message);
// throw e;
}
break;
case "syscall-response":
let syscallId = data.id!;
const lookup = pendingRequests.get(syscallId);
if (!lookup) {
console.log(
"Current outstanding requests",
pendingRequests,
"looking up",
syscallId
);
throw Error("Invalid request id");
}
pendingRequests.delete(syscallId);
if (data.error) {
lookup.reject(new Error(data.error));
} else {
lookup.resolve(data.result);
}
break;
}
});
});

View File

@ -1,50 +0,0 @@
import { createSandbox } from "../environments/node_sandbox";
import { expect, test } from "@jest/globals";
import { Manifest } from "../types";
import express from "express";
import request from "supertest";
import { EndpointHook, EndpointHookT } from "./endpoint";
import { System } from "../system";
test("Run a plugos endpoint server", async () => {
let system = new System<EndpointHookT>("server");
let plug = await system.load(
{
name: "test",
functions: {
testhandler: {
http: {
path: "/",
},
code: `(() => {
return {
default: (req) => {
console.log("Req", req);
return {status: 200, body: [1, 2, 3], headers: {"Content-type": "application/json"}};
}
};
})()`,
},
},
} as Manifest<EndpointHookT>,
createSandbox
);
const app = express();
const port = 3123;
system.addHook(new EndpointHook(app, "/_"));
let server = app.listen(port, () => {
console.log(`Listening on port ${port}`);
});
let resp = await request(app)
.get("/_/test/?name=Pete")
.expect((resp) => {
expect(resp.status).toBe(200);
expect(resp.header["content-type"]).toContain("application/json");
expect(resp.text).toBe(JSON.stringify([1, 2, 3]));
});
server.close();
await system.unloadAll();
});

View File

@ -1,134 +0,0 @@
import { Hook, Manifest } from "../types";
import { Express, NextFunction, Request, Response } from "express";
import { System } from "../system";
export type EndpointRequest = {
method: string;
path: string;
query: { [key: string]: string };
headers: { [key: string]: string };
body: any;
};
export type EndpointResponse = {
status: number;
headers?: { [key: string]: string };
body: any;
};
export type EndpointHookT = {
http?: EndPointDef | EndPointDef[];
};
export type EndPointDef = {
method?: "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "OPTIONS" | "ANY";
path: string;
};
export class EndpointHook implements Hook<EndpointHookT> {
private app: Express;
readonly prefix: string;
constructor(app: Express, prefix: string) {
this.app = app;
this.prefix = prefix;
}
apply(system: System<EndpointHookT>): void {
this.app.use((req: Request, res: Response, next: NextFunction) => {
if (!req.path.startsWith(this.prefix)) {
return next();
}
console.log("Endpoint request", req.path);
Promise.resolve()
.then(async () => {
// Iterate over all loaded plugins
for (const [plugName, plug] of system.loadedPlugs.entries()) {
const manifest = plug.manifest;
if (!manifest) {
continue;
}
const functions = manifest.functions;
console.log("Checking plug", plugName);
let prefix = `${this.prefix}/${plugName}`;
if (!req.path.startsWith(prefix)) {
continue;
}
for (const [name, functionDef] of Object.entries(functions)) {
if (!functionDef.http) {
continue;
}
let endpoints = Array.isArray(functionDef.http)
? functionDef.http
: [functionDef.http];
console.log(endpoints);
for (const { path, method } of endpoints) {
let prefixedPath = `${prefix}${path}`;
if (
prefixedPath === req.path &&
((method || "GET") === req.method || method === "ANY")
) {
try {
const response: EndpointResponse = await plug.invoke(name, [
{
path: req.path,
method: req.method,
body: req.body,
query: req.query,
headers: req.headers,
} as EndpointRequest,
]);
let resp = res.status(response.status);
if (response.headers) {
for (const [key, value] of Object.entries(
response.headers
)) {
resp = resp.header(key, value);
}
}
resp.send(response.body);
return;
} catch (e: any) {
console.error("Error executing function", e);
res.status(500).send(e.message);
return;
}
}
}
}
}
next();
})
.catch((e) => {
console.error(e);
next(e);
});
});
}
validateManifest(manifest: Manifest<EndpointHookT>): string[] {
let errors = [];
for (const [name, functionDef] of Object.entries(manifest.functions)) {
if (!functionDef.http) {
continue;
}
let endpoints = Array.isArray(functionDef.http)
? functionDef.http
: [functionDef.http];
for (let { path, method } of endpoints) {
if (!path) {
errors.push("Path not defined for endpoint");
}
if (
method &&
["GET", "POST", "PUT", "DELETE", "ANY"].indexOf(method) === -1
) {
errors.push(
`Invalid method ${method} for end point with with ${path}`
);
}
}
}
return errors;
}
}

View File

@ -1,88 +0,0 @@
{
"name": "@plugos/plugos",
"author": {
"name": "Zef Hemel",
"email": "zef@zef.me"
},
"version": "0.0.35",
"license": "MIT",
"bin": {
"plugos-bundle": "./dist/plugos/plugos-bundle.js",
"plugos-server": "./dist/plugos/plugos-server.js"
},
"scripts": {
"watch": "rm -rf .parcel-cache && parcel watch",
"build": "parcel build",
"clean": "rm -rf dist",
"test": "jest dist/test"
},
"targets": {
"plugos": {
"source": [
"bin/plugos-bundle.ts",
"bin/plugos-server.ts"
],
"outputFormat": "commonjs",
"isLibrary": true,
"context": "node"
},
"test": {
"source": [
"runtime.test.ts",
"hooks/endpoint.test.ts",
"syscalls/store.knex_node.test.ts",
"syscalls/store.dexie_browser.test.ts"
],
"outputFormat": "commonjs",
"isLibrary": true,
"context": "node"
}
},
"dependencies": {
"@jest/globals": "^27.5.1",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.13",
"@types/jsonwebtoken": "^8.5.8",
"better-sqlite3": "^7.5.0",
"body-parser": "^1.19.2",
"cors": "^2.8.5",
"dexie": "^3.2.1",
"esbuild": "^0.14.27",
"express": "^4.17.3",
"fake-indexeddb": "^3.1.7",
"form-data": "^4.0.0",
"jest": "^27.5.1",
"jsonwebtoken": "^8.5.1",
"knex": "^1.0.4",
"node-cron": "^3.0.0",
"node-fetch": "^2.6.7",
"node-watch": "^0.7.3",
"supertest": "^6.2.2",
"typescript": "^4.6.2",
"vm2": "^3.9.9",
"ws": "^8.5.0",
"yaml": "^1.10.2",
"yargs": "^17.3.1"
},
"devDependencies": {
"@lezer/lr": "1.0.0",
"@parcel/optimizer-data-url": "2.5.0",
"@parcel/packager-raw-url": "2.5.0",
"@parcel/service-worker": "2.5.0",
"@parcel/transformer-inline-string": "2.5.0",
"@parcel/transformer-sass": "2.5.0",
"@parcel/transformer-webmanifest": "2.5.0",
"@parcel/validator-typescript": "2.5.0",
"@types/events": "^3.0.0",
"@types/jest": "^27.4.1",
"@types/node": "^17.0.21",
"@types/node-cron": "^3.0.1",
"@types/node-fetch": "^2.6.1",
"@types/supertest": "^2.0.11",
"@types/yaml": "^1.9.7",
"assert": "^2.0.0",
"events": "^3.3.0",
"parcel": "2.5.0",
"prettier": "^2.5.1"
}
}

View File

@ -1,66 +0,0 @@
import fs from "fs/promises";
import watch from "node-watch";
import path from "path";
import { createSandbox } from "./environments/node_sandbox";
import { System } from "./system";
import { Manifest } from "./types";
export class DiskPlugLoader<HookT> {
private system: System<HookT>;
private plugPath: string;
constructor(system: System<HookT>, plugPath: string) {
this.system = system;
this.plugPath = plugPath;
}
watcher() {
watch(this.plugPath, (eventType, localPath) => {
if (!localPath.endsWith(".plug.json")) {
return;
}
Promise.resolve()
.then(async () => {
try {
// let localPath = path.join(this.plugPath, filename);
console.log("Change detected for", localPath);
try {
await fs.stat(localPath);
} catch (e) {
// Likely removed
console.log("Plug removed, TODO: Unload");
return;
}
const plugDef = await this.loadPlugFromFile(localPath);
} catch (e) {
console.log("Ignoring something FYI", e);
// ignore, error handled by loadPlug
}
})
.catch(console.error);
});
}
private async loadPlugFromFile(localPath: string) {
const plug = await fs.readFile(localPath, "utf8");
try {
const plugDef: Manifest<HookT> = JSON.parse(plug);
console.log("Now loading plug", plugDef.name);
await this.system.load(plugDef, createSandbox);
return plugDef;
} catch (e) {
console.error("Could not parse plugin file", e);
throw e;
}
}
async loadPlugs() {
for (let filename of await fs.readdir(this.plugPath)) {
if (filename.endsWith(".plug.json")) {
let localPath = path.join(this.plugPath, filename);
await this.loadPlugFromFile(localPath);
}
}
}
}

View File

@ -1,101 +0,0 @@
import { readdir, readFile, stat, writeFile, unlink, mkdir } from "fs/promises";
import path from "path";
import type { SysCallMapping } from "../system";
export type FileMeta = {
name: string;
lastModified: number;
};
export default function fileSystemSyscalls(root: string = "/"): SysCallMapping {
function resolvedPath(p: string): string {
p = path.resolve(root, p);
if (!p.startsWith(root)) {
throw Error("Path outside root, not allowed");
}
return p;
}
return {
"fs.readFile": async (
ctx,
filePath: string,
encoding: "utf8" | "dataurl" = "utf8"
): Promise<{ text: string; meta: FileMeta }> => {
let p = resolvedPath(filePath);
let text = "";
if (encoding === "utf8") {
text = await readFile(p, "utf8");
} else {
text = `data:application/octet-stream,${await readFile(p, "base64")}`;
}
let s = await stat(p);
return {
text,
meta: {
name: filePath,
lastModified: s.mtime.getTime(),
},
};
},
"fs.getFileMeta": async (ctx, filePath: string): Promise<FileMeta> => {
let p = resolvedPath(filePath);
let s = await stat(p);
return {
name: filePath,
lastModified: s.mtime.getTime(),
};
},
"fs.writeFile": async (
ctx,
filePath: string,
text: string,
encoding: "utf8" | "dataurl" = "utf8"
): Promise<FileMeta> => {
let p = resolvedPath(filePath);
await mkdir(path.dirname(p), { recursive: true });
if (encoding === "utf8") {
await writeFile(p, text);
} else {
await writeFile(p, text.split(",")[1], {
encoding: "base64",
});
}
let s = await stat(p);
return {
name: filePath,
lastModified: s.mtime.getTime(),
};
},
"fs.deleteFile": async (ctx, filePath: string): Promise<void> => {
let p = resolvedPath(filePath);
await unlink(p);
},
"fs.listFiles": async (
ctx,
dirPath: string,
recursive: boolean
): Promise<FileMeta[]> => {
dirPath = resolvedPath(dirPath);
let allFiles: FileMeta[] = [];
async function walkPath(dir: string) {
let files = await readdir(dir);
for (let file of files) {
const fullPath = path.join(dir, file);
let s = await stat(fullPath);
if (s.isDirectory() && recursive) {
await walkPath(fullPath);
} else {
allFiles.push({
name: fullPath.substring(dirPath.length + 1),
lastModified: s.mtime.getTime(),
});
}
}
}
await walkPath(dirPath);
return allFiles;
},
};
}

View File

@ -1,42 +0,0 @@
import { Knex } from "knex";
import { SysCallMapping } from "../system";
type Item = {
key: string;
value: string;
};
export async function ensureFTSTable(
db: Knex<any, unknown>,
tableName: string
) {
if (!(await db.schema.hasTable(tableName))) {
await db.raw(`CREATE VIRTUAL TABLE ${tableName} USING fts5(key, value);`);
console.log(`Created fts5 table ${tableName}`);
}
}
export function fullTextSearchSyscalls(
db: Knex<any, unknown>,
tableName: string
): SysCallMapping {
return {
"fulltext.index": async (ctx, key: string, value: string) => {
await db<Item>(tableName).where({ key }).del();
await db<Item>(tableName).insert({ key, value });
},
"fulltext.delete": async (ctx, key: string) => {
await db<Item>(tableName).where({ key }).del();
},
"fulltext.search": async (ctx, phrase: string, limit: number) => {
return (
await db<any>(tableName)
.whereRaw(`value MATCH ?`, [phrase])
.select(["key", "rank"])
.orderBy("rank")
.limit(limit)
).map((item) => ({ name: item.key, rank: item.rank }));
},
};
}

Some files were not shown because too many files have changed in this diff Show More