Complete redo of content indexing and querying (#517)
Complete redo of data store Introduces live queries and live templates
This commit is contained in:
parent
7af98e7c7b
commit
0313565610
3
.github/workflows/docker.yml
vendored
3
.github/workflows/docker.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.34
|
||||
deno-version: v1.37
|
||||
|
||||
- name: Run bundle build
|
||||
run: |
|
||||
@ -70,7 +70,6 @@ jobs:
|
||||
type=semver,pattern=latest,enable=true
|
||||
# When pushing to main branch, release as :edge
|
||||
type=edge,branch=main
|
||||
|
||||
- name: Build and push main docker images
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
with:
|
||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -13,7 +13,7 @@ jobs:
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.34
|
||||
deno-version: v1.37
|
||||
- name: Run build
|
||||
run: deno task build
|
||||
- name: Bundle
|
||||
|
2
.github/workflows/server.yml
vendored
2
.github/workflows/server.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.34
|
||||
deno-version: v1.37
|
||||
|
||||
- name: Build bundles
|
||||
run: |
|
||||
|
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.35
|
||||
deno-version: v1.37
|
||||
|
||||
- name: Run build
|
||||
run: deno task build
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM lukechannings/deno:v1.36.3
|
||||
FROM lukechannings/deno:v1.37.1
|
||||
# The volume that will keep the space data
|
||||
# Create a volume first:
|
||||
# docker volume create myspace
|
||||
|
@ -1,24 +0,0 @@
|
||||
# This Dockerfile is used to build a Docker image that runs silverbullet with an S3 bucket as a backend
|
||||
# Configure it with the following environment variables
|
||||
|
||||
# AWS_ACCESS_KEY_ID=XXXX
|
||||
# AWS_SECRET_ACCESS_KEY=XXXX
|
||||
# AWS_ENDPOINT=s3.eu-central-1.amazonaws.com
|
||||
# AWS_REGION=eu-central-1
|
||||
# AWS_BUCKET=my-sb-bucket
|
||||
|
||||
FROM denoland/deno:alpine-1.33.2
|
||||
|
||||
# Copy the bundled version of silverbullet into the container
|
||||
ADD ./dist/silverbullet.js /silverbullet.js
|
||||
|
||||
# deno user id is 1000 in alpine image
|
||||
USER deno
|
||||
|
||||
# Expose port 3000
|
||||
# Port map this when running, e.g. with -p 3002:3000 (where 3002 is the host port)
|
||||
EXPOSE 3000
|
||||
|
||||
# Run the server, allowing to pass in additional argument at run time, e.g.
|
||||
# docker run -p 3002:3000 -v myspace:/space -it zefhemel/silverbullet --user me:letmein
|
||||
ENTRYPOINT deno run -A /silverbullet.js -L 0.0.0.0 s3://
|
@ -8,7 +8,10 @@ import assets from "../dist/plug_asset_bundle.json" assert {
|
||||
import { assertEquals } from "../test_deps.ts";
|
||||
import { path } from "../common/deps.ts";
|
||||
|
||||
Deno.test("Test plug run", async () => {
|
||||
Deno.test("Test plug run", {
|
||||
sanitizeResources: false,
|
||||
sanitizeOps: false,
|
||||
}, async () => {
|
||||
// const tempDir = await Deno.makeTempDir();
|
||||
const tempDbFile = await Deno.makeTempFile({ suffix: ".db" });
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { path } from "../common/deps.ts";
|
||||
import { DiskSpacePrimitives } from "../common/spaces/disk_space_primitives.ts";
|
||||
import { AssetBundle } from "../plugos/asset_bundle/bundle.ts";
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { index } from "$sb/silverbullet-syscall/mod.ts";
|
||||
import { datastore } from "$sb/syscalls.ts";
|
||||
|
||||
export async function run() {
|
||||
console.log("Hello from plug_test.ts");
|
||||
console.log(await index.queryPrefix(`tag:`));
|
||||
await datastore.set(["plug_test"], "Hello");
|
||||
return "Hello";
|
||||
}
|
||||
|
134
common/languages.ts
Normal file
134
common/languages.ts
Normal file
@ -0,0 +1,134 @@
|
||||
import { LRLanguage } from "@codemirror/language";
|
||||
import {
|
||||
cLanguage,
|
||||
cppLanguage,
|
||||
csharpLanguage,
|
||||
dartLanguage,
|
||||
htmlLanguage,
|
||||
javaLanguage,
|
||||
javascriptLanguage,
|
||||
jsonLanguage,
|
||||
kotlinLanguage,
|
||||
Language,
|
||||
objectiveCLanguage,
|
||||
objectiveCppLanguage,
|
||||
postgresqlLanguage,
|
||||
protobufLanguage,
|
||||
pythonLanguage,
|
||||
rustLanguage,
|
||||
scalaLanguage,
|
||||
shellLanguage,
|
||||
sqlLanguage,
|
||||
StreamLanguage,
|
||||
tomlLanguage,
|
||||
typescriptLanguage,
|
||||
xmlLanguage,
|
||||
yamlLanguage,
|
||||
} from "./deps.ts";
|
||||
import { highlightingDirectiveParser } from "./markdown_parser/parser.ts";
|
||||
|
||||
const languageCache = new Map<string, Language>();
|
||||
|
||||
export function languageFor(name: string): Language | null {
|
||||
if (languageCache.has(name)) {
|
||||
return languageCache.get(name)!;
|
||||
}
|
||||
const language = languageLookup(name);
|
||||
if (!language) {
|
||||
return null;
|
||||
}
|
||||
languageCache.set(name, language);
|
||||
return language;
|
||||
}
|
||||
|
||||
function languageLookup(name: string): Language | null {
|
||||
switch (name) {
|
||||
case "meta":
|
||||
case "yaml":
|
||||
case "template":
|
||||
case "embed":
|
||||
case "data":
|
||||
return StreamLanguage.define(yamlLanguage);
|
||||
|
||||
case "javascript":
|
||||
case "js":
|
||||
return javascriptLanguage;
|
||||
case "typescript":
|
||||
case "ts":
|
||||
return typescriptLanguage;
|
||||
case "sql":
|
||||
return StreamLanguage.define(sqlLanguage);
|
||||
case "postgresql":
|
||||
case "pgsql":
|
||||
case "postgres":
|
||||
return StreamLanguage.define(postgresqlLanguage);
|
||||
case "rust":
|
||||
case "rs":
|
||||
return StreamLanguage.define(rustLanguage);
|
||||
case "css":
|
||||
return StreamLanguage.define(sqlLanguage);
|
||||
case "html":
|
||||
return htmlLanguage;
|
||||
case "python":
|
||||
case "py":
|
||||
return StreamLanguage.define(pythonLanguage);
|
||||
case "protobuf":
|
||||
case "proto":
|
||||
return StreamLanguage.define(protobufLanguage);
|
||||
case "shell":
|
||||
case "sh":
|
||||
case "bash":
|
||||
case "zsh":
|
||||
case "fish":
|
||||
return StreamLanguage.define(shellLanguage);
|
||||
case "swift":
|
||||
return StreamLanguage.define(rustLanguage);
|
||||
case "toml":
|
||||
return StreamLanguage.define(tomlLanguage);
|
||||
case "json":
|
||||
return StreamLanguage.define(jsonLanguage);
|
||||
case "xml":
|
||||
return StreamLanguage.define(xmlLanguage);
|
||||
case "c":
|
||||
return StreamLanguage.define(cLanguage);
|
||||
case "cpp":
|
||||
case "c++":
|
||||
case "cxx":
|
||||
return StreamLanguage.define(cppLanguage);
|
||||
case "java":
|
||||
return StreamLanguage.define(javaLanguage);
|
||||
case "csharp":
|
||||
case "cs":
|
||||
case "c#":
|
||||
return StreamLanguage.define(csharpLanguage);
|
||||
case "scala":
|
||||
return StreamLanguage.define(scalaLanguage);
|
||||
case "kotlin":
|
||||
return StreamLanguage.define(kotlinLanguage);
|
||||
case "objc":
|
||||
case "objective-c":
|
||||
case "objectivec":
|
||||
return StreamLanguage.define(objectiveCLanguage);
|
||||
case "objcpp":
|
||||
case "objective-cpp":
|
||||
case "objectivecpp":
|
||||
case "objective-c++":
|
||||
case "objectivec++":
|
||||
return StreamLanguage.define(objectiveCppLanguage);
|
||||
|
||||
case "dart":
|
||||
return StreamLanguage.define(dartLanguage);
|
||||
|
||||
case "query":
|
||||
return LRLanguage.define({
|
||||
name: "query",
|
||||
parser: highlightingDirectiveParser,
|
||||
});
|
||||
|
||||
default:
|
||||
if (name.startsWith("#")) {
|
||||
return StreamLanguage.define(yamlLanguage);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
20
common/limited_map.test.ts
Normal file
20
common/limited_map.test.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { sleep } from "$sb/lib/async.ts";
|
||||
import { assertEquals } from "../test_deps.ts";
|
||||
import { LimitedMap } from "./limited_map.ts";
|
||||
|
||||
Deno.test("limited map", async () => {
|
||||
const mp = new LimitedMap<string>(3);
|
||||
mp.set("a", "a");
|
||||
mp.set("b", "b");
|
||||
mp.set("c", "c");
|
||||
await sleep(2);
|
||||
assertEquals(mp.get("a"), "a");
|
||||
await sleep(2);
|
||||
assertEquals(mp.get("b"), "b");
|
||||
await sleep(2);
|
||||
assertEquals(mp.get("c"), "c");
|
||||
// Drops the first key
|
||||
mp.set("d", "d");
|
||||
await sleep(2);
|
||||
assertEquals(mp.get("a"), undefined);
|
||||
});
|
50
common/limited_map.ts
Normal file
50
common/limited_map.ts
Normal file
@ -0,0 +1,50 @@
|
||||
type LimitedMapRecord<V> = Record<string, { value: V; la: number }>;
|
||||
|
||||
export class LimitedMap<V> {
|
||||
constructor(private maxSize: number, private map: LimitedMapRecord<V> = {}) {
|
||||
}
|
||||
|
||||
set(key: string, value: V) {
|
||||
if (Object.keys(this.map).length >= this.maxSize) {
|
||||
// Remove the oldest key before adding a new one
|
||||
const oldestKey = this.getOldestKey();
|
||||
delete this.map[oldestKey!];
|
||||
}
|
||||
this.map[key] = { value, la: Date.now() };
|
||||
}
|
||||
|
||||
get(key: string): V | undefined {
|
||||
const entry = this.map[key];
|
||||
if (entry) {
|
||||
// Update the last accessed timestamp
|
||||
entry.la = Date.now();
|
||||
return entry.value;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
remove(key: string) {
|
||||
delete this.map[key];
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return this.map;
|
||||
}
|
||||
|
||||
private getOldestKey(): string | undefined {
|
||||
let oldestKey: string | undefined;
|
||||
let oldestTimestamp: number | undefined;
|
||||
|
||||
for (const key in this.map) {
|
||||
if (Object.prototype.hasOwnProperty.call(this.map, key)) {
|
||||
const entry = this.map[key];
|
||||
if (!oldestTimestamp || entry.la < oldestTimestamp) {
|
||||
oldestKey = key;
|
||||
oldestTimestamp = entry.la;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return oldestKey;
|
||||
}
|
||||
}
|
@ -22,5 +22,5 @@ export const AttributeNameTag = Tag.define();
|
||||
export const AttributeValueTag = Tag.define();
|
||||
|
||||
export const TaskTag = Tag.define();
|
||||
export const TaskMarkerTag = Tag.define();
|
||||
export const TaskMarkTag = Tag.define();
|
||||
export const TaskStateTag = Tag.define();
|
||||
|
119
common/markdown_parser/expression.grammar
Normal file
119
common/markdown_parser/expression.grammar
Normal file
@ -0,0 +1,119 @@
|
||||
@top Program { Expression }
|
||||
|
||||
@precedence {
|
||||
mulop @left
|
||||
addop @left
|
||||
binop @left
|
||||
and @left
|
||||
or @left
|
||||
}
|
||||
|
||||
@skip {
|
||||
space
|
||||
}
|
||||
|
||||
commaSep<content> { content ("," content)* }
|
||||
|
||||
kw<term> { @specialize[@name={term}]<Identifier, term> }
|
||||
|
||||
|
||||
Query {
|
||||
TagIdentifier ( WhereClause | LimitClause | OrderClause | SelectClause | RenderClause )*
|
||||
}
|
||||
|
||||
WhereClause { kw<"where"> Expression }
|
||||
LimitClause { kw<"limit"> Expression }
|
||||
OrderClause { Order commaSep<OrderBy> }
|
||||
OrderBy { Expression OrderDirection? }
|
||||
SelectClause { kw<"select"> commaSep<Select> }
|
||||
RenderClause { kw<"render"> PageRef }
|
||||
|
||||
Select { Identifier | Expression kw<"as"> Identifier }
|
||||
|
||||
OrderDirection {
|
||||
OrderKW
|
||||
}
|
||||
|
||||
Value { Number | String | Bool | Regex | kw<"null"> | List }
|
||||
|
||||
Attribute {
|
||||
LVal "." Identifier
|
||||
}
|
||||
|
||||
Call {
|
||||
Identifier "(" commaSep<Expression> ")" | Identifier "(" ")"
|
||||
}
|
||||
|
||||
LVal {
|
||||
Identifier
|
||||
| Attribute
|
||||
}
|
||||
|
||||
ParenthesizedExpression { "(" Expression ")" }
|
||||
|
||||
LogicalExpression {
|
||||
Expression !and kw<"and"> Expression
|
||||
| Expression !or kw<"or"> Expression
|
||||
}
|
||||
|
||||
Expression {
|
||||
Value
|
||||
| LVal
|
||||
| ParenthesizedExpression
|
||||
| LogicalExpression
|
||||
| BinExpression
|
||||
| Call
|
||||
}
|
||||
|
||||
BinExpression {
|
||||
Expression !binop "<" Expression
|
||||
| Expression !binop "<=" Expression
|
||||
| Expression !binop "=" Expression
|
||||
| Expression !binop "!=" Expression
|
||||
| Expression !binop ">=" Expression
|
||||
| Expression !binop ">" Expression
|
||||
| Expression !binop "=~" Expression
|
||||
| Expression !binop "!=~" Expression
|
||||
| Expression !binop InKW Expression
|
||||
|
||||
| Expression !mulop "*" Expression
|
||||
| Expression !mulop "/" Expression
|
||||
| Expression !mulop "%" Expression
|
||||
| Expression !addop "+" Expression
|
||||
| Expression !addop "-" Expression
|
||||
}
|
||||
|
||||
List { "[" commaSep<Expression> "]" }
|
||||
|
||||
|
||||
Bool {
|
||||
BooleanKW
|
||||
}
|
||||
|
||||
|
||||
@tokens {
|
||||
space { std.whitespace+ }
|
||||
|
||||
TagIdentifier { @asciiLetter (@asciiLetter | @digit | "-" | "_" | "/" )* }
|
||||
|
||||
Identifier { @asciiLetter (@asciiLetter | @digit | "-" | "_")* }
|
||||
|
||||
String {
|
||||
("\"" | "“" | "”") ![\"”“]* ("\"" | "“" | "”")
|
||||
}
|
||||
PageRef {
|
||||
"[" "[" ![\]]* "]" "]"
|
||||
}
|
||||
Order { "order by" }
|
||||
Regex { "/" ( ![/\\\n\r] | "\\" _ )* "/"? }
|
||||
|
||||
Number { std.digit+ }
|
||||
|
||||
BooleanKW { "true" | "false" }
|
||||
|
||||
InKW { "in" }
|
||||
|
||||
OrderKW { "asc" | "desc" }
|
||||
|
||||
@precedence { Order, BooleanKW, InKW, OrderKW, Identifier, Number }
|
||||
}
|
@ -23,9 +23,9 @@ class MultiStatusTaskParser implements LeafBlockParser {
|
||||
leaf,
|
||||
cx.elt("Task", leaf.start, leaf.start + leaf.content.length, [
|
||||
cx.elt("TaskState", leaf.start, leaf.start + 2 + this.status.length, [
|
||||
cx.elt("TaskMarker", leaf.start, leaf.start + 1),
|
||||
cx.elt("TaskMark", leaf.start, leaf.start + 1),
|
||||
cx.elt(
|
||||
"TaskMarker",
|
||||
"TaskMark",
|
||||
leaf.start + 1 + this.status.length,
|
||||
leaf.start + 2 + this.status.length,
|
||||
),
|
||||
@ -43,7 +43,7 @@ class MultiStatusTaskParser implements LeafBlockParser {
|
||||
export const TaskList: MarkdownConfig = {
|
||||
defineNodes: [
|
||||
{ name: "Task", block: true, style: t.list },
|
||||
{ name: "TaskMarker", style: t.atom },
|
||||
{ name: "TaskMark", style: t.atom },
|
||||
{ name: "TaskState", style: TaskStateTag },
|
||||
],
|
||||
parseBlock: [{
|
||||
|
18
common/markdown_parser/parse-expression.js
Normal file
18
common/markdown_parser/parse-expression.js
Normal file
@ -0,0 +1,18 @@
|
||||
// This file was generated by lezer-generator. You probably shouldn't edit it.
|
||||
import {LRParser} from "@lezer/lr"
|
||||
const spec_Identifier = {__proto__:null,null:20, and:32, or:34}
|
||||
export const parser = LRParser.deserialize({
|
||||
version: 14,
|
||||
states: "%jOVQPOOOOQO'#Cb'#CbOVQPO'#CgOOQO'#C_'#C_OqQQO'#C^O#UQQO'#ChOOQO'#Ch'#ChOVQPO'#CjQ$lQQOOOOQO'#C^'#C^O%pQQO'#CvO%}QPO,59RO&SQPO,59TO&XQPO,59[O&`QQO,59UOVQPO,59YOVQPO,59YOVQPO,59YOVQPO,59VOVQPO,59VOVQPO'#CqO&gQPO,59bOOQO1G.m1G.mOOQO1G.o1G.oO&rQPO1G.vOOQO1G.v1G.vOOQO1G.p1G.pO(RQQO1G.tOOQO1G.t1G.tO(]QQO1G.tO)tQQO1G.qO){QQO1G.qO*SQQO,59]OOQO-E6o-E6oOOQO7+$b7+$b",
|
||||
stateData: "*g~OhOS~OSROTROVPOWROXTOYROiQOnVO~Om[O`QXaQXcQXfQXpQXqQXrQXsQXtQXuQXvQXwQXxQXyQXzQX{QX|QXkQXlQXoQX~On]O`[Xa[Xc[Xf[Xm[Xp[Xq[Xr[Xs[Xt[Xu[Xv[Xw[Xx[Xy[Xz[X{[X|[Xk[Xl[Xo[X~O`bOacOc_Op_Oq_Or_Os_Ot_Ou_Ov_Ow_Ox`Oy`Oz`O{aO|aO~OkdOljXojX~P$lOlfO~OXgO~OoiO~PVOojO~P$lOkdOljaoja~OorO~Ox`Oy`Oz`O`biabicbifbipbiqbirbisbitbiubivbiwbikbilbiobi~O{aO|aO~P&wO{bi|bi~P&wOc_Op_Oq_Or_Os_Ot_Ou_Ov_Ow_Ox`Oy`Oz`O{aO|aOa_if_ik_il_io_i~O`_i~P(gO`bO~P(gOkealeaoea~P$lOVcXSX~",
|
||||
goto: "#hkPPl!YPP!fPPPP!f!r#O!Y!YPP!YP!Y#[PPPP#bQWOSYQ]Q^VQk_Ql`QmaQnbQocRpdeXOQV]_`abcdeROQV]_`abcdeSOQV]_`abcdeUOQV]_`abcdQeYRqeQZQRh]",
|
||||
nodeNames: "⚠ Program Expression Value Number String Bool BooleanKW Regex Identifier null List LVal Attribute ParenthesizedExpression LogicalExpression and or BinExpression InKW Call",
|
||||
maxTerm: 44,
|
||||
skippedNodes: [0],
|
||||
repeatNodeCount: 1,
|
||||
tokenData: "/j~RuX^#fpq#fqr$Zrs$nuv%fxy%kyz%pz{%u{|%z|}&P}!O&U!O!P&Z!P!Q&`!Q![)Q!^!_)Y!_!`)g!`!a)t!c!}*R!}#O*g#P#Q*l#T#Y*R#Y#Z*q#Z#]*R#]#^-b#^#h*R#h#i.d#i#o*R#y#z#f$f$g#f#BY#BZ#f$IS$I_#f$Ip$Iq$n$Iq$Ir$n$I|$JO#f$JT$JU#f$KV$KW#f&FU&FV#f~#kYh~X^#fpq#f#y#z#f$f$g#f#BY#BZ#f$IS$I_#f$I|$JO#f$JT$JU#f$KV$KW#f&FU&FV#f~$^P!_!`$a~$fPs~#r#s$i~$nOw~~$qWOr$nrs%Zs$Ip$n$Ip$Iq%Z$Iq$Ir%Z$Ir;'S$n;'S;=`%`<%lO$n~%`OT~~%cP;=`<%l$n~%kOz~~%pOn~~%uOo~~%zOx~~&PO{~~&UOk~~&ZO|~~&`Om~R&gXyQWPOY'SZ]'S^!P'S!P!Q't!Q#O'S#O#P'y#P;'S'S;'S;=`(z<%lO'SP'XXWPOY'SZ]'S^!P'S!P!Q't!Q#O'S#O#P'y#P;'S'S;'S;=`(z<%lO'SP'yOWPP'|RO;'S'S;'S;=`(V;=`O'SP([YWPOY'SZ]'S^!P'S!P!Q't!Q#O'S#O#P'y#P;'S'S;'S;=`(z;=`<%l'S<%lO'SP(}P;=`<%l'S~)VPS~!Q![)Q~)_Pp~!_!`)b~)gOq~~)lPr~#r#s)o~)tOv~~)yPu~!_!`)|~*ROt~~*WTX~}!O*R!Q![*R!c!}*R#R#S*R#T#o*R~*lOi~~*qOl~~*vUX~}!O*R!Q![*R!c!}*R#R#S*R#T#U+Y#U#o*R~+_VX~}!O*R!Q![*R!c!}*R#R#S*R#T#`*R#`#a+t#a#o*R~+yVX~}!O*R!Q![*R!c!}*R#R#S*R#T#g*R#g#h,`#h#o*R~,eVX~}!O*R!Q![*R!c!}*R#R#S*R#T#X*R#X#Y,z#Y#o*R~-RTV~X~}!O*R!Q![*R!c!}*R#R#S*R#T#o*R~-gVX~}!O*R!Q![*R!c!}*R#R#S*R#T#b*R#b#c-|#c#o*R~.TTc~X~}!O*R!Q![*R!c!}*R#R#S*R#T#o*R~.iVX~}!O*R!Q![*R!c!}*R#R#S*R#T#f*R#f#g/O#g#o*R~/TVX~}!O*R!Q![*R!c!}*R#R#S*R#T#i*R#i#j,`#j#o*R",
|
||||
tokenizers: [0, 1],
|
||||
topRules: {"Program":[0,1]},
|
||||
specialized: [{term: 9, get: value => spec_Identifier[value] || -1}],
|
||||
tokenPrec: 431
|
||||
})
|
19
common/markdown_parser/parse-expression.terms.js
Normal file
19
common/markdown_parser/parse-expression.terms.js
Normal file
@ -0,0 +1,19 @@
|
||||
// This file was generated by lezer-generator. You probably shouldn't edit it.
|
||||
export const
|
||||
Program = 1,
|
||||
Expression = 2,
|
||||
Value = 3,
|
||||
Number = 4,
|
||||
String = 5,
|
||||
Bool = 6,
|
||||
BooleanKW = 7,
|
||||
Regex = 8,
|
||||
Identifier = 9,
|
||||
List = 11,
|
||||
LVal = 12,
|
||||
Attribute = 13,
|
||||
ParenthesizedExpression = 14,
|
||||
LogicalExpression = 15,
|
||||
BinExpression = 18,
|
||||
InKW = 19,
|
||||
Call = 20
|
@ -1,16 +1,18 @@
|
||||
// This file was generated by lezer-generator. You probably shouldn't edit it.
|
||||
import {LRParser} from "@lezer/lr"
|
||||
const spec_Identifier = {__proto__:null,where:12, null:28, and:40, or:42, limit:52, select:66, as:70, render:74}
|
||||
export const parser = LRParser.deserialize({
|
||||
version: 14,
|
||||
states: "&`OVQPOOOmQQO'#C^QOQPOOOtQPO'#C`OyQPO'#ClO!OQPO'#CnO!TQPO'#CqO!YQPO'#CsOOQO'#Cv'#CvO!bQQO,58xO!iQQO'#CcO#WQQO'#CbOOQO,58z,58zOOQO,59W,59WO#oQQO,59YO$ZQQO'#D`OOQO,59],59]OOQO,59_,59_OOQO-E6t-E6tO$rQQO,58}OtQPO'#CxO%ZQQO,58|OOQO'#Cp'#CpOOQO1G.t1G.tO%rQPO'#CyO%wQQO,59zOOQO'#Cg'#CgO$rQQO'#CjOOQO'#Cd'#CdOOQO1G.i1G.iOOQO,59d,59dOOQO-E6v-E6vOOQO,59e,59eOOQO-E6w-E6wO&`QPO'#DRO&hQPO,59UO$rQQO'#CwO&mQPO,59mOOQO1G.p1G.pOOQO,59c,59cOOQO-E6u-E6u",
|
||||
stateData: "&u~OpOS~ORPO~OTROaSOcTOfUOhVO~OnQX~P[ORYO~OX]O~OR^O~OR_O~OYaOiaO~OnQa~P[OqcOxcOycOzcO{cO|cO}cO!OcO!PcO~O_dOTUXaUXcUXfUXhUXnUX~O!QfO!RfOTbaabacbafbahbanba~OvhOT!SXa!SXc!SXf!SXh!SXn!SX~OXlOYlO[lO]lOrjOsjOtkO~O_dOTUaaUacUafUahUanUa~ORpO~OvhOT!Saa!Sac!Saf!Sah!San!Sa~OvtOwuX~OwvO~OvtOwua~O",
|
||||
goto: "#g!TPP!UP!XP!]!`!fPP!oPP!oP!XP!XP!t!XP!XPP!w!}#T#ZPPPPPPP#aPPPPPPPPPPPP#dRQOTWPXR[RQZRRndQmcQrkRwtVlcktRg^QXPRbXQurRxuQeZRoeQi_RqiRskR`U",
|
||||
nodeNames: "⚠ Program Query Name WhereClause Where LogicalExpr FilterExpr Value Number String Bool Regex Null List And LimitClause Limit OrderClause Order OrderDirection SelectClause Select RenderClause Render PageRef",
|
||||
maxTerm: 50,
|
||||
states: ")lOVQPOOOmQQO'#C^QOQPOOOtQQO'#CwOOQO'#DT'#DTO!`QQO,58xOtQQO'#C`OtQQO'#CuO!gQQO'#C|O#RQPO'#DQOOQO'#Cg'#CgOtQQO'#CkOOQO'#Cd'#CdO#WQSO'#CcO&_QSO'#ClOOQO'#Cl'#ClOtQQO'#CnO(VQSO'#CyOOQO'#Cc'#CcO(sQQO'#DpOOQO,59c,59cOOQO-E7R-E7RO)[QSO,58zO)rQSO,59aO*YQSO'#ClO*sQSO'#DOO*zQQO'#DqOOQO,59h,59hOOQO,59l,59lO+cQSO'#D]O+pQPO,59VO+uQQO,59XO+zQQO,59`O,RQSO,59YOtQQO,59^OtQQO,59^OtQQO,59^OOQO'#Cz'#CzOOQO,59e,59eOtQQO,59ZOtQQO,59ZOtQQO'#DVO,YQQO,5:[O,qQQO,59jO!gQQO'#DWO,vQQO,5:]OtQQO'#DUO-_QPO,59wOOQO1G.q1G.qOOQO1G.s1G.sO-jQPO1G.zOOQO1G.z1G.zOOQO1G.t1G.tO/`QSO1G.xOOQO1G.x1G.xO/jQSO1G.xO1hQSO1G.uO1oQSO1G.uOOQO,59q,59qOOQO-E7T-E7TOOQO1G/U1G/UOOQO,59r,59rOOQO-E7U-E7UO1vQSO,59pOOQO-E7S-E7SOOQO7+$f7+$f",
|
||||
stateData: "2]~O}OS~ORPO~OUUOjVOlROqWOuXO~O{QX~P[OT^OX[OY[O[YO][O^[O!OZO!T`O~O{Qa~P[OThOX[OY[O[YO][O^[O!OZO!T`O~OvlO~O!SoOUVXdVXeVXgVXjVXlVXoVXqVXuVX{VX!QVX!VVX!WVX!XVX!YVX!ZVX![VX!]VX!^VX!_VX!`VX!aVX!bVX!cVXsVX!RVX!UVX~O!TpOd`Xe`Xg`X!S`X!V`X!W`X!X`X!Y`X!Z`X![`X!]`X!^`X!_`X!``X!a`X!b`X!c`Xs`X~OU`Xj`Xl`Xo`Xq`Xu`X{`X!Q`X!R`X!U`X~P%QOdwOexOgrO!VrO!WrO!XrO!YrO!ZrO![rO!]rO!^rO!_sO!`sO!asO!btO!ctO~OouOUmXjmXlmXqmXumX{mX!QmX~P'RO!QyOU!dXj!dXl!dXq!dXu!dX{!dX~OUSajSalSaqSauSa{Sa~P'ROUiajialiaqiauia{ia~P'ROUrXjrXlrXqrXurX{rX!QrX~P%QOs{O~P'RO!Q|OU!eXj!eXl!eXq!eXu!eX{!eX~O!Q!OO!R!PX!U!PX~P'RO!R!QO~OT!RO~O!U!TO~PtO!U!UO~P'RO!QyOU!daj!dal!daq!dau!da{!da~OT!^O~O!Q|OU!eaj!eal!eaq!eau!ea{!ea~O!Q!OO!R!Pa!U!Pa~O!U!cO~O!_sO!`sO!asOUfidfiefigfijfilfiofiqfiufi{fi!Qfi!Vfi!Wfi!Xfi!Yfi!Zfi![fi!]fi!^fisfi!Rfi!Ufi~O!btO!ctO~P-oO!bfi!cfi~P-oOgrO!VrO!WrO!XrO!YrO!ZrO![rO!]rO!^rO!_sO!`sO!asO!btO!ctOUciecijcilciociqciuci{ci!Qcisci!Rci!Uci~Odci~P/tOdwO~P/tO!Qxa!Rxa!Uxa~P'ROl[goTXo~",
|
||||
goto: "&S!fPP!gP!jPP!n#gPP#xPPP#x$Z$l#g#gPP#gP#g!jP!jP$}%TP!jP%WP!jPP%^%d%j%pPPPP%vPPPPPPPPPPPPPPPPPP%|&PRQOTSPTSaRyQfUQgVSiW|SmZpQq`Q!VrQ!WsQ!XtQ!YwQ!ZxR!a!OobRUVWZ`prstwxy|!Oo[RUVWZ`prstwxy|!Oo]RUVWZ`prstwxy|!Oo_RUVWZ`prstwxy|!OQcRR![yRvaQjWR!_|QTPReTQ!PmR!b!PQzcR!]zQ}jR!`}QnZR!SpRdRRkW",
|
||||
nodeNames: "⚠ Program Query TagIdentifier WhereClause Identifier where Expression Value Number String Bool BooleanKW Regex null List LVal Attribute ParenthesizedExpression LogicalExpression and or BinExpression InKW Call LimitClause limit OrderClause Order OrderBy OrderDirection OrderKW SelectClause select Select as RenderClause render PageRef",
|
||||
maxTerm: 67,
|
||||
skippedNodes: [0],
|
||||
repeatNodeCount: 4,
|
||||
tokenData: "C`~R|X^#{pq#{qr$prs%T|}%{}!O&Q!P!Q&c!Q![(a!^!_(i!_!`(v!`!a)T!c!}&Q!}#O)b#P#Q*_#R#S&Q#T#U*d#U#W&Q#W#X,y#X#Y&Q#Y#Z.u#Z#]&Q#]#^1V#^#`&Q#`#a2R#a#b&Q#b#c4f#c#d6b#d#f&Q#f#g9X#g#h<T#h#i?P#i#k&Q#k#l@{#l#o&Q#y#z#{$f$g#{#BY#BZ#{$IS$I_#{$Ip$Iq%T$Iq$Ir%T$I|$JO#{$JT$JU#{$KV$KW#{&FU&FV#{~$QYp~X^#{pq#{#y#z#{$f$g#{#BY#BZ#{$IS$I_#{$I|$JO#{$JT$JU#{$KV$KW#{&FU&FV#{~$sP!_!`$v~${Pz~#r#s%O~%TO!O~~%WWOr%Trs%ps$Ip%T$Ip$Iq%p$Iq$Ir%p$Ir;'S%T;'S;=`%u<%lO%T~%uOY~~%xP;=`<%l%T~&QOv~P&VSRP}!O&Q!c!}&Q#R#S&Q#T#o&Q~&hX[~OY&cZ]&c^!P&c!P!Q'T!Q#O&c#O#P'Y#P;'S&c;'S;=`(Z<%lO&c~'YO[~~']RO;'S&c;'S;=`'f;=`O&c~'kY[~OY&cZ]&c^!P&c!P!Q'T!Q#O&c#O#P'Y#P;'S&c;'S;=`(Z;=`<%l&c<%lO&c~(^P;=`<%l&c~(fPX~!Q![(a~(nPq~!_!`(q~(vOx~~({Py~#r#s)O~)TO}~~)YP|~!_!`)]~)bO{~R)gPtQ!}#O)jP)mTO#P)j#P#Q)|#Q;'S)j;'S;=`*X<%lO)jP*PP#P#Q*SP*XOiPP*[P;=`<%l)j~*dOw~R*iWRP}!O&Q!c!}&Q#R#S&Q#T#b&Q#b#c+R#c#g&Q#g#h+}#h#o&QR+WURP}!O&Q!c!}&Q#R#S&Q#T#W&Q#W#X+j#X#o&QR+qS_QRP}!O&Q!c!}&Q#R#S&Q#T#o&QR,SURP}!O&Q!c!}&Q#R#S&Q#T#V&Q#V#W,f#W#o&QR,mS!RQRP}!O&Q!c!}&Q#R#S&Q#T#o&QR-OURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y-b#Y#o&QR-gURP}!O&Q!c!}&Q#R#S&Q#T#g&Q#g#h-y#h#o&QR.OURP}!O&Q!c!}&Q#R#S&Q#T#V&Q#V#W.b#W#o&QR.iS!QQRP}!O&Q!c!}&Q#R#S&Q#T#o&QR.zTRP}!O&Q!c!}&Q#R#S&Q#T#U/Z#U#o&QR/`URP}!O&Q!c!}&Q#R#S&Q#T#`&Q#`#a/r#a#o&QR/wURP}!O&Q!c!}&Q#R#S&Q#T#g&Q#g#h0Z#h#o&QR0`URP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y0r#Y#o&QR0ySsQRP}!O&Q!c!}&Q#R#S&Q#T#o&QR1[URP}!O&Q!c!}&Q#R#S&Q#T#b&Q#b#c1n#c#o&QR1uS!PQRP}!O&Q!c!}&Q#R#S&Q#T#o&QR2WURP}!O&Q!c!}&Q#R#S&Q#T#]&Q#]#^2j#^#o&QR2oURP}!O&Q!c!}&Q#R#S&Q#T#a&Q#a#b3R#b#o&QR3WURP}!O&Q!c!}&Q#R#S&Q#T#]&Q#]#^3j#^#o&QR3oURP}!O&Q!c!}&Q#R#S&Q#T#h&Q#h#i4R#i#o&QR4YSaQRP}!O&Q!c!}&Q#R#S&Q#T#o&QR4kURP}!O&Q!c!}&Q#R#S&Q#T#i&Q#i#j4}#j#o&QR5SURP}!O&Q!c!}&Q#R#S&Q#T#`&Q#`#a5f#a#o&QR5kURP}!O&Q!c!}&Q#R#S&Q#T#`&Q#`#a5}#a#o&QR6USRP]Q}!O&Q!c!}&Q#R#S&Q#T#o&QR6gURP}!O&Q!c!}&Q#R#S&Q#T#f&Q#f#g6y#g#o&QR7OURP}!O&Q!c!}&Q#R#S&Q#T#W&Q#W#X7b#X#o&QR7gURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y7y#Y#o&QR8OURP}!O&Q!c!}&Q#R#S&Q#T#f&Q#f#g8b#g#o&QR8gTRPpq8v}!O&Q!c!}&Q#R#S&Q#T#o&QQ8yP#U#V8|Q9PP#m#n9SQ9XOcQR9^URP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y9p#Y#o&QR9uURP}!O&Q!c!}&Q#R#S&Q#T#b&Q#b#c:X#c#o&QR:^URP}!O&Q!c!}&Q#R#S&Q#T#W&Q#W#X:p#X#o&QR:uURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y;X#Y#o&QR;^URP}!O&Q!c!}&Q#R#S&Q#T#f&Q#f#g;p#g#o&QR;wSRPhQ}!O&Q!c!}&Q#R#S&Q#T#o&QR<YURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y<l#Y#o&QR<qURP}!O&Q!c!}&Q#R#S&Q#T#`&Q#`#a=T#a#o&QR=YURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y=l#Y#o&QR=qURP}!O&Q!c!}&Q#R#S&Q#T#V&Q#V#W>T#W#o&QR>YURP}!O&Q!c!}&Q#R#S&Q#T#h&Q#h#i>l#i#o&QR>sSRPfQ}!O&Q!c!}&Q#R#S&Q#T#o&QR?UURP}!O&Q!c!}&Q#R#S&Q#T#f&Q#f#g?h#g#o&QR?mURP}!O&Q!c!}&Q#R#S&Q#T#i&Q#i#j@P#j#o&QR@UURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#Y@h#Y#o&QR@oSrQRP}!O&Q!c!}&Q#R#S&Q#T#o&QRAQURP}!O&Q!c!}&Q#R#S&Q#T#[&Q#[#]Ad#]#o&QRAiURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#YA{#Y#o&QRBQURP}!O&Q!c!}&Q#R#S&Q#T#f&Q#f#gBd#g#o&QRBiURP}!O&Q!c!}&Q#R#S&Q#T#X&Q#X#YB{#Y#o&QRCSSRPTQ}!O&Q!c!}&Q#R#S&Q#T#o&Q",
|
||||
tokenizers: [0, 1],
|
||||
tokenData: "8k~RzX^#upq#uqr$jrs$}uv%uxy%zyz&Pz{&U{|&Z|}&`}!O&e!O!P&j!P!Q&o!Q![)a!^!_)i!_!`)v!`!a*T!c!}*b!}#O+d#P#Q,a#T#U,f#U#W*b#W#X.c#X#Y*b#Y#Z/S#Z#]*b#]#^2^#^#c*b#c#d3j#d#h*b#h#i7Z#i#o*b#y#z#u$f$g#u#BY#BZ#u$IS$I_#u$Ip$Iq$}$Iq$Ir$}$I|$JO#u$JT$JU#u$KV$KW#u&FU&FV#u~#zY}~X^#upq#u#y#z#u$f$g#u#BY#BZ#u$IS$I_#u$I|$JO#u$JT$JU#u$KV$KW#u&FU&FV#u~$mP!_!`$p~$uP!Y~#r#s$x~$}O!^~~%QWOr$}rs%js$Ip$}$Ip$Iq%j$Iq$Ir%j$Ir;'S$};'S;=`%o<%lO$}~%oOY~~%rP;=`<%l$}~%zO!a~~&PO!T~~&UO!U~~&ZO!_~~&`O!b~~&eO!Q~~&jO!c~~&oO!S~U&vX!`S]QOY'cZ]'c^!P'c!P!Q(T!Q#O'c#O#P(Y#P;'S'c;'S;=`)Z<%lO'cQ'hX]QOY'cZ]'c^!P'c!P!Q(T!Q#O'c#O#P(Y#P;'S'c;'S;=`)Z<%lO'cQ(YO]QQ(]RO;'S'c;'S;=`(f;=`O'cQ(kY]QOY'cZ]'c^!P'c!P!Q(T!Q#O'c#O#P(Y#P;'S'c;'S;=`)Z;=`<%l'c<%lO'cQ)^P;=`<%l'c~)fPX~!Q![)a~)nP!V~!_!`)q~)vO!W~~){P!X~#r#s*O~*TO!]~~*YP![~!_!`*]~*bO!Z~V*iURPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#o*bP+QURP}!O*{!P!Q*{!Q![*{!c!}*{#R#S*{#T#o*{R+iP!OQ!}#O+lP+oTO#P+l#P#Q,O#Q;'S+l;'S;=`,Z<%lO+lP,RP#P#Q,UP,ZOvPP,^P;=`<%l+l~,fO!R~V,mWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#g*b#g#h-V#h#o*bV-^WRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#V*b#V#W-v#W#o*bV.PURPoSTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#o*bV.jWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#X*b#X#Y,f#Y#o*bV/ZVRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#U/p#U#o*bV/wWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#`*b#`#a0a#a#o*bV0hWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#g*b#g#h1Q#h#o*bV1XWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#X*b#X#Y1q#Y#o*bV1zURP[QTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#o*bV2eWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#b*b#b#c2}#c#o*bV3WURPgSTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#o*bV3qWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#f*b#f#g4Z#g#o*bV4bWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#W*b#W#X4z#X#o*bV5RWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#X*b#X#Y5k#Y#o*bV5rWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#f*b#f#g6[#g#o*bV6cVRPTUpq6x}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#o*bU6{P#U#V7OU7RP#m#n7UU7ZOlUV7bWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#f*b#f#g7z#g#o*bV8RWRPTU}!O*b!P!Q*{!Q![*b!c!}*b#R#S*b#T#i*b#i#j1Q#j#o*b",
|
||||
tokenizers: [0, 1, 2],
|
||||
topRules: {"Program":[0,1]},
|
||||
tokenPrec: 0
|
||||
specialized: [{term: 5, get: value => spec_Identifier[value] || -1}],
|
||||
tokenPrec: 787
|
||||
})
|
||||
|
@ -2,26 +2,31 @@
|
||||
export const
|
||||
Program = 1,
|
||||
Query = 2,
|
||||
Name = 3,
|
||||
TagIdentifier = 3,
|
||||
WhereClause = 4,
|
||||
Where = 5,
|
||||
LogicalExpr = 6,
|
||||
FilterExpr = 7,
|
||||
Identifier = 5,
|
||||
Expression = 7,
|
||||
Value = 8,
|
||||
Number = 9,
|
||||
String = 10,
|
||||
Bool = 11,
|
||||
Regex = 12,
|
||||
Null = 13,
|
||||
List = 14,
|
||||
And = 15,
|
||||
LimitClause = 16,
|
||||
Limit = 17,
|
||||
OrderClause = 18,
|
||||
Order = 19,
|
||||
OrderDirection = 20,
|
||||
SelectClause = 21,
|
||||
Select = 22,
|
||||
RenderClause = 23,
|
||||
Render = 24,
|
||||
PageRef = 25
|
||||
BooleanKW = 12,
|
||||
Regex = 13,
|
||||
List = 15,
|
||||
LVal = 16,
|
||||
Attribute = 17,
|
||||
ParenthesizedExpression = 18,
|
||||
LogicalExpression = 19,
|
||||
BinExpression = 22,
|
||||
InKW = 23,
|
||||
Call = 24,
|
||||
LimitClause = 25,
|
||||
OrderClause = 27,
|
||||
Order = 28,
|
||||
OrderBy = 29,
|
||||
OrderDirection = 30,
|
||||
OrderKW = 31,
|
||||
SelectClause = 32,
|
||||
Select = 34,
|
||||
RenderClause = 36,
|
||||
PageRef = 38
|
||||
|
@ -73,23 +73,6 @@ Before
|
||||
End
|
||||
`;
|
||||
|
||||
Deno.test("Test directive parser", () => {
|
||||
const lang = buildMarkdown([]);
|
||||
let tree = parse(lang, directiveSample);
|
||||
// console.log("tree", JSON.stringify(tree, null, 2));
|
||||
assertEquals(renderToText(tree), directiveSample);
|
||||
|
||||
tree = parse(lang, nestedDirectiveExample);
|
||||
// console.log("tree", JSON.stringify(tree, null, 2));
|
||||
assertEquals(renderToText(tree), nestedDirectiveExample);
|
||||
|
||||
const orderByExample = `<!-- #query page order by lastModified -->
|
||||
|
||||
<!-- /query -->`;
|
||||
tree = parse(lang, orderByExample);
|
||||
console.log("Tree", JSON.stringify(tree, null, 2));
|
||||
});
|
||||
|
||||
const inlineAttributeSample = `
|
||||
Hello there [a link](http://zef.plus)
|
||||
[age: 100]
|
||||
|
@ -142,7 +142,7 @@ export const Highlight: MarkdownConfig = {
|
||||
],
|
||||
};
|
||||
|
||||
export const attributeStartRegex = /^\[(\w+)(::?\s*)/;
|
||||
export const attributeStartRegex = /^\[([\w\$]+)(::?\s*)/;
|
||||
|
||||
export const Attribute: MarkdownConfig = {
|
||||
defineNodes: [
|
||||
@ -252,16 +252,17 @@ const directiveStart = /^\s*<!--\s*#([a-z]+)\s*(.*?)-->\s*/;
|
||||
const directiveEnd = /^\s*<!--\s*\/(.*?)-->\s*/;
|
||||
|
||||
import { parser as directiveParser } from "./parse-query.js";
|
||||
import { parser as expressionParser } from "./parse-expression.js";
|
||||
import { Table } from "./table_parser.ts";
|
||||
|
||||
const highlightingDirectiveParser = directiveParser.configure({
|
||||
export const highlightingDirectiveParser = directiveParser.configure({
|
||||
props: [
|
||||
styleTags({
|
||||
"Name": t.variableName,
|
||||
"String": t.string,
|
||||
"Number": t.number,
|
||||
"PageRef": ct.WikiLinkTag,
|
||||
"Where Limit Select Render Order OrderDirection And": t.keyword,
|
||||
"where limit select render Order OrderKW and or as InKW": t.keyword,
|
||||
}),
|
||||
],
|
||||
});
|
||||
@ -294,6 +295,14 @@ export const Directive: MarkdownConfig = {
|
||||
cx.parsedPos + line.text.length + 1,
|
||||
[cx.elt(queryParseTree, frontStart + fullMatch.indexOf(arg))],
|
||||
));
|
||||
} else if (directive === "eval") {
|
||||
const expressionParseTree = expressionParser.parse(arg);
|
||||
elts.push(cx.elt(
|
||||
"DirectiveStart",
|
||||
cx.parsedPos,
|
||||
cx.parsedPos + line.text.length + 1,
|
||||
[cx.elt(expressionParseTree, frontStart + fullMatch.indexOf(arg))],
|
||||
));
|
||||
} else {
|
||||
elts.push(cx.elt(
|
||||
"DirectiveStart",
|
||||
@ -432,7 +441,7 @@ export default function buildMarkdown(mdExtensions: MDExt[]): Language {
|
||||
props: [
|
||||
styleTags({
|
||||
Task: ct.TaskTag,
|
||||
TaskMarker: ct.TaskMarkerTag,
|
||||
TaskMark: ct.TaskMarkTag,
|
||||
Comment: ct.CommentTag,
|
||||
"TableDelimiter SubscriptMark SuperscriptMark StrikethroughMark":
|
||||
t.processingInstruction,
|
||||
|
@ -1,58 +1,102 @@
|
||||
@precedence { logic @left }
|
||||
@top Program { Query }
|
||||
@skip { space }
|
||||
|
||||
Query {
|
||||
Name ( WhereClause | LimitClause | OrderClause | SelectClause | RenderClause )*
|
||||
@precedence {
|
||||
mulop @left
|
||||
addop @left
|
||||
binop @left
|
||||
and @left
|
||||
or @left
|
||||
}
|
||||
|
||||
@skip {
|
||||
space
|
||||
}
|
||||
|
||||
commaSep<content> { content ("," content)* }
|
||||
|
||||
WhereClause { Where LogicalExpr }
|
||||
LimitClause { Limit Number }
|
||||
OrderClause { Order Name OrderDirection? }
|
||||
SelectClause { Select commaSep<Name> }
|
||||
RenderClause { Render (PageRef | String) }
|
||||
kw<term> { @specialize[@name={term}]<Identifier, term> }
|
||||
|
||||
|
||||
Query {
|
||||
TagIdentifier ( WhereClause | LimitClause | OrderClause | SelectClause | RenderClause )*
|
||||
}
|
||||
|
||||
WhereClause { kw<"where"> Expression }
|
||||
LimitClause { kw<"limit"> Expression }
|
||||
OrderClause { Order commaSep<OrderBy> }
|
||||
OrderBy { Expression OrderDirection? }
|
||||
SelectClause { kw<"select"> commaSep<Select> }
|
||||
RenderClause { kw<"render"> PageRef }
|
||||
|
||||
Select { Identifier | Expression kw<"as"> Identifier }
|
||||
|
||||
OrderDirection {
|
||||
"desc" | "asc"
|
||||
OrderKW
|
||||
}
|
||||
|
||||
Value { Number | String | Bool | Regex | Null | List }
|
||||
Value { Number | String | Bool | Regex | kw<"null"> | List }
|
||||
|
||||
LogicalExpr { FilterExpr (And FilterExpr)* }
|
||||
|
||||
FilterExpr {
|
||||
Name "<" Value
|
||||
| Name "<=" Value
|
||||
| Name "=" Value
|
||||
| Name "!=" Value
|
||||
| Name ">=" Value
|
||||
| Name ">" Value
|
||||
| Name "=~" Value
|
||||
| Name "!=~" Value
|
||||
| Name "in" Value
|
||||
Attribute {
|
||||
LVal "." Identifier
|
||||
}
|
||||
|
||||
List { "[" commaSep<Value> "]" }
|
||||
Call {
|
||||
Identifier "(" commaSep<Expression> ")" | Identifier "(" ")"
|
||||
}
|
||||
|
||||
LVal {
|
||||
Identifier
|
||||
| Attribute
|
||||
}
|
||||
|
||||
ParenthesizedExpression { "(" Expression ")" }
|
||||
|
||||
LogicalExpression {
|
||||
Expression !and kw<"and"> Expression
|
||||
| Expression !or kw<"or"> Expression
|
||||
}
|
||||
|
||||
Expression {
|
||||
Value
|
||||
| LVal
|
||||
| ParenthesizedExpression
|
||||
| LogicalExpression
|
||||
| BinExpression
|
||||
| Call
|
||||
}
|
||||
|
||||
BinExpression {
|
||||
Expression !binop "<" Expression
|
||||
| Expression !binop "<=" Expression
|
||||
| Expression !binop "=" Expression
|
||||
| Expression !binop "!=" Expression
|
||||
| Expression !binop ">=" Expression
|
||||
| Expression !binop ">" Expression
|
||||
| Expression !binop "=~" Expression
|
||||
| Expression !binop "!=~" Expression
|
||||
| Expression !binop InKW Expression
|
||||
|
||||
| Expression !mulop "*" Expression
|
||||
| Expression !mulop "/" Expression
|
||||
| Expression !mulop "%" Expression
|
||||
| Expression !addop "+" Expression
|
||||
| Expression !addop "-" Expression
|
||||
}
|
||||
|
||||
List { "[" commaSep<Expression> "]" }
|
||||
|
||||
|
||||
Bool {
|
||||
"true" | "false"
|
||||
BooleanKW
|
||||
}
|
||||
|
||||
|
||||
@tokens {
|
||||
space { std.whitespace+ }
|
||||
Name { (std.asciiLetter | "-" | "_")+ }
|
||||
|
||||
Where { "where" }
|
||||
Order { "order by" }
|
||||
Select { "select" }
|
||||
Render { "render" }
|
||||
Limit { "limit" }
|
||||
And { "and" }
|
||||
Null { "null" }
|
||||
TagIdentifier { @asciiLetter (@asciiLetter | @digit | "-" | "_" | "/" )* }
|
||||
|
||||
Identifier { @asciiLetter (@asciiLetter | @digit | "-" | "_")* }
|
||||
|
||||
String {
|
||||
("\"" | "“" | "”") ![\"”“]* ("\"" | "“" | "”")
|
||||
@ -60,9 +104,16 @@ Bool {
|
||||
PageRef {
|
||||
"[" "[" ![\]]* "]" "]"
|
||||
}
|
||||
Order { "order by" }
|
||||
Regex { "/" ( ![/\\\n\r] | "\\" _ )* "/"? }
|
||||
|
||||
Number { std.digit+ }
|
||||
|
||||
// @precedence { Where, Sort, Select, Render, Limit, And, Null, Name }
|
||||
BooleanKW { "true" | "false" }
|
||||
|
||||
InKW { "in" }
|
||||
|
||||
OrderKW { "asc" | "desc" }
|
||||
|
||||
@precedence { Order, BooleanKW, InKW, OrderKW, Identifier, Number }
|
||||
}
|
||||
|
@ -1,9 +1,17 @@
|
||||
import { indexedDB } from "https://deno.land/x/indexeddb@1.3.5/ponyfill_memory.ts";
|
||||
import { IndexedDBSpacePrimitives } from "./indexeddb_space_primitives.ts";
|
||||
import "https://esm.sh/fake-indexeddb@4.0.2/auto";
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { DataStore } from "../../plugos/lib/datastore.ts";
|
||||
import { IndexedDBKvPrimitives } from "../../plugos/lib/indexeddb_kv_primitives.ts";
|
||||
import { DataStoreSpacePrimitives } from "./datastore_space_primitives.ts";
|
||||
|
||||
Deno.test("IndexedDBSpacePrimitives", async () => {
|
||||
const space = new IndexedDBSpacePrimitives("test", indexedDB);
|
||||
Deno.test("DataStoreSpacePrimitives", {
|
||||
sanitizeResources: false,
|
||||
sanitizeOps: false,
|
||||
}, async () => {
|
||||
const db = new IndexedDBKvPrimitives("test");
|
||||
await db.init();
|
||||
|
||||
const space = new DataStoreSpacePrimitives(new DataStore(db));
|
||||
const files = await space.fetchFileList();
|
||||
assertEquals(files, []);
|
||||
// Write text file
|
||||
@ -28,6 +36,8 @@ Deno.test("IndexedDBSpacePrimitives", async () => {
|
||||
|
||||
await space.deleteFile("test.bin");
|
||||
assertEquals(await space.fetchFileList(), [fileMeta]);
|
||||
|
||||
db.close();
|
||||
});
|
||||
|
||||
function stringToBytes(str: string): Uint8Array {
|
@ -1,7 +1,7 @@
|
||||
import type { SpacePrimitives } from "./space_primitives.ts";
|
||||
import Dexie, { Table } from "dexie";
|
||||
import { mime } from "../deps.ts";
|
||||
import { FileMeta } from "$sb/types.ts";
|
||||
import { DataStore } from "../../plugos/lib/datastore.ts";
|
||||
|
||||
export type FileContent = {
|
||||
name: string;
|
||||
@ -9,34 +9,27 @@ export type FileContent = {
|
||||
data: Uint8Array;
|
||||
};
|
||||
|
||||
export class IndexedDBSpacePrimitives implements SpacePrimitives {
|
||||
private db: Dexie;
|
||||
filesMetaTable: Table<FileMeta, string>;
|
||||
filesContentTable: Table<FileContent, string>;
|
||||
const filesMetaPrefix = ["file", "meta"];
|
||||
const filesContentPrefix = ["file", "content"];
|
||||
|
||||
export class DataStoreSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
dbName: string,
|
||||
indexedDB?: any,
|
||||
private ds: DataStore,
|
||||
) {
|
||||
this.db = new Dexie(dbName, {
|
||||
indexedDB,
|
||||
});
|
||||
this.db.version(1).stores({
|
||||
fileMeta: "name",
|
||||
fileContent: "name",
|
||||
});
|
||||
this.filesMetaTable = this.db.table("fileMeta");
|
||||
this.filesContentTable = this.db.table<FileContent, string>("fileContent");
|
||||
}
|
||||
|
||||
fetchFileList(): Promise<FileMeta[]> {
|
||||
return this.filesMetaTable.toArray();
|
||||
async fetchFileList(): Promise<FileMeta[]> {
|
||||
return (await this.ds.query<FileMeta>({ prefix: filesMetaPrefix }))
|
||||
.map((kv) => kv.value);
|
||||
}
|
||||
|
||||
async readFile(
|
||||
name: string,
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
const fileContent = await this.filesContentTable.get(name);
|
||||
const fileContent = await this.ds.get<FileContent>([
|
||||
...filesContentPrefix,
|
||||
name,
|
||||
]);
|
||||
if (!fileContent) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
@ -60,22 +53,35 @@ export class IndexedDBSpacePrimitives implements SpacePrimitives {
|
||||
size: data.byteLength,
|
||||
perm: suggestedMeta?.perm || "rw",
|
||||
};
|
||||
await this.filesContentTable.put({ name, data, meta });
|
||||
await this.filesMetaTable.put(meta);
|
||||
await this.ds.batchSet<FileMeta | FileContent>([
|
||||
{
|
||||
key: [...filesContentPrefix, name],
|
||||
value: { name, data, meta },
|
||||
},
|
||||
{
|
||||
key: [...filesMetaPrefix, name],
|
||||
value: meta,
|
||||
},
|
||||
]);
|
||||
return meta;
|
||||
}
|
||||
|
||||
async deleteFile(name: string): Promise<void> {
|
||||
const fileMeta = await this.filesMetaTable.get(name);
|
||||
const fileMeta = await this.ds.get<FileMeta>([
|
||||
...filesMetaPrefix,
|
||||
name,
|
||||
]);
|
||||
if (!fileMeta) {
|
||||
throw new Error("Not found");
|
||||
}
|
||||
await this.filesMetaTable.delete(name);
|
||||
await this.filesContentTable.delete(name);
|
||||
return this.ds.batchDelete([
|
||||
[...filesMetaPrefix, name],
|
||||
[...filesContentPrefix, name],
|
||||
]);
|
||||
}
|
||||
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
const fileMeta = await this.filesMetaTable.get(name);
|
||||
const fileMeta = await this.ds.get([...filesMetaPrefix, name]);
|
||||
if (!fileMeta) {
|
||||
throw new Error("Not found");
|
||||
}
|
@ -93,14 +93,7 @@ export class DiskSpacePrimitives implements SpacePrimitives {
|
||||
file.close();
|
||||
|
||||
// Fetch new metadata
|
||||
const s = await Deno.stat(localPath);
|
||||
return {
|
||||
name: name,
|
||||
size: s.size,
|
||||
contentType: lookupContentType(name),
|
||||
lastModified: s.mtime!.getTime(),
|
||||
perm: "rw",
|
||||
};
|
||||
return this.getFileMeta(name);
|
||||
} catch (e) {
|
||||
console.error("Error while writing file", name, e);
|
||||
throw Error(`Could not write ${name}`);
|
||||
|
@ -47,7 +47,7 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
oldHash !== newHash
|
||||
)
|
||||
) {
|
||||
this.dispatchEvent("file:changed", meta.name);
|
||||
await this.dispatchEvent("file:changed", meta.name);
|
||||
}
|
||||
// Page found, not deleted
|
||||
deletedFiles.delete(meta.name);
|
||||
@ -58,7 +58,7 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
|
||||
for (const deletedFile of deletedFiles) {
|
||||
delete this.spaceSnapshot[deletedFile];
|
||||
this.dispatchEvent("file:deleted", deletedFile);
|
||||
await this.dispatchEvent("file:deleted", deletedFile);
|
||||
|
||||
if (deletedFile.endsWith(".md")) {
|
||||
const pageName = deletedFile.substring(0, deletedFile.length - 3);
|
||||
@ -66,7 +66,7 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
}
|
||||
}
|
||||
|
||||
this.dispatchEvent("file:listed", newFileList);
|
||||
await this.dispatchEvent("file:listed", newFileList);
|
||||
this.alreadyFetching = false;
|
||||
this.initialFileListLoad = false;
|
||||
return newFileList;
|
||||
@ -93,7 +93,7 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
meta,
|
||||
);
|
||||
if (!selfUpdate) {
|
||||
this.dispatchEvent("file:changed", name, true);
|
||||
await this.dispatchEvent("file:changed", name, true);
|
||||
}
|
||||
this.spaceSnapshot[name] = newMeta.lastModified;
|
||||
|
||||
@ -104,16 +104,11 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
text = decoder.decode(data);
|
||||
|
||||
this.dispatchEvent("page:saved", pageName, newMeta)
|
||||
.then(() => {
|
||||
return this.dispatchEvent("page:index_text", {
|
||||
await this.dispatchEvent("page:saved", pageName, newMeta);
|
||||
await this.dispatchEvent("page:index_text", {
|
||||
name: pageName,
|
||||
text,
|
||||
});
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Error dispatching page:saved event", e);
|
||||
});
|
||||
}
|
||||
return newMeta;
|
||||
}
|
||||
@ -134,9 +129,9 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
this.triggerEventsAndCache(name, newMeta.lastModified);
|
||||
return newMeta;
|
||||
} catch (e: any) {
|
||||
console.log("Checking error", e, name);
|
||||
// console.log("Checking error", e, name);
|
||||
if (e.message === "Not found") {
|
||||
this.dispatchEvent("file:deleted", name);
|
||||
await this.dispatchEvent("file:deleted", name);
|
||||
if (name.endsWith(".md")) {
|
||||
const pageName = name.substring(0, name.length - 3);
|
||||
await this.dispatchEvent("page:deleted", pageName);
|
||||
@ -154,6 +149,6 @@ export class EventedSpacePrimitives implements SpacePrimitives {
|
||||
// await this.getPageMeta(name); // Check if page exists, if not throws Error
|
||||
await this.wrapped.deleteFile(name);
|
||||
delete this.spaceSnapshot[name];
|
||||
this.dispatchEvent("file:deleted", name);
|
||||
await this.dispatchEvent("file:deleted", name);
|
||||
}
|
||||
}
|
||||
|
@ -1,86 +0,0 @@
|
||||
import { SpacePrimitives } from "./space_primitives.ts";
|
||||
import type { SysCallMapping } from "../../plugos/system.ts";
|
||||
import { FileMeta } from "$sb/types.ts";
|
||||
|
||||
// Enriches the file list listing with custom metadata from the page index
|
||||
export class FileMetaSpacePrimitives implements SpacePrimitives {
|
||||
constructor(
|
||||
private wrapped: SpacePrimitives,
|
||||
private indexSyscalls: SysCallMapping,
|
||||
) {
|
||||
}
|
||||
|
||||
async fetchFileList(): Promise<FileMeta[]> {
|
||||
const files = await this.wrapped.fetchFileList();
|
||||
// Enrich the file list with custom meta data (for pages)
|
||||
const allFilesMap: Map<string, any> = new Map(
|
||||
files.map((fm) => [fm.name, fm]),
|
||||
);
|
||||
for (
|
||||
const { page, value } of await this.indexSyscalls["index.queryPrefix"](
|
||||
{} as any,
|
||||
"meta:",
|
||||
)
|
||||
) {
|
||||
const p = allFilesMap.get(`${page}.md`);
|
||||
if (p) {
|
||||
for (const [k, v] of Object.entries(value)) {
|
||||
if (
|
||||
["name", "lastModified", "size", "perm", "contentType"].includes(k)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
p[k] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
return [...allFilesMap.values()];
|
||||
}
|
||||
|
||||
readFile(
|
||||
name: string,
|
||||
): Promise<{ data: Uint8Array; meta: FileMeta }> {
|
||||
return this.wrapped.readFile(name);
|
||||
}
|
||||
|
||||
async getFileMeta(name: string): Promise<FileMeta> {
|
||||
const meta = await this.wrapped.getFileMeta(name);
|
||||
if (name.endsWith(".md")) {
|
||||
const pageName = name.slice(0, -3);
|
||||
const additionalMeta = await this.indexSyscalls["index.get"](
|
||||
{} as any,
|
||||
pageName,
|
||||
"meta:",
|
||||
);
|
||||
if (additionalMeta) {
|
||||
for (const [k, v] of Object.entries(additionalMeta)) {
|
||||
if (
|
||||
["name", "lastModified", "size", "perm", "contentType"].includes(k)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
meta[k] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
return meta;
|
||||
}
|
||||
|
||||
writeFile(
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
selfUpdate?: boolean,
|
||||
meta?: FileMeta,
|
||||
): Promise<FileMeta> {
|
||||
return this.wrapped.writeFile(
|
||||
name,
|
||||
data,
|
||||
selfUpdate,
|
||||
meta,
|
||||
);
|
||||
}
|
||||
|
||||
deleteFile(name: string): Promise<void> {
|
||||
return this.wrapped.deleteFile(name);
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import { niceDate } from "$sb/lib/dates.ts";
|
||||
|
||||
export function handlebarHelpers(_pageName: string) {
|
||||
export function handlebarHelpers() {
|
||||
return {
|
||||
json: (v: any) => JSON.stringify(v),
|
||||
niceDate: (ts: any) => niceDate(new Date(ts)),
|
23
common/syscalls/handlebars.ts
Normal file
23
common/syscalls/handlebars.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import { SysCallMapping } from "../../plugos/system.ts";
|
||||
import { handlebarHelpers } from "./handlebar_helpers.ts";
|
||||
import Handlebars from "handlebars";
|
||||
|
||||
export function handlebarsSyscalls(): SysCallMapping {
|
||||
return {
|
||||
"handlebars.renderTemplate": (
|
||||
_ctx,
|
||||
template: string,
|
||||
obj: any,
|
||||
globals: Record<string, any> = {},
|
||||
): string => {
|
||||
const templateFn = Handlebars.compile(
|
||||
template,
|
||||
{ noEscape: true },
|
||||
);
|
||||
return templateFn(obj, {
|
||||
helpers: handlebarHelpers(),
|
||||
data: globals,
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
20
common/syscalls/language.ts
Normal file
20
common/syscalls/language.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { SysCallMapping } from "../../plugos/system.ts";
|
||||
import { parse } from "../markdown_parser/parse_tree.ts";
|
||||
import type { ParseTree } from "$sb/lib/tree.ts";
|
||||
import { languageFor } from "../languages.ts";
|
||||
|
||||
export function languageSyscalls(): SysCallMapping {
|
||||
return {
|
||||
"language.parseLanguage": (
|
||||
_ctx,
|
||||
language: string,
|
||||
code: string,
|
||||
): ParseTree => {
|
||||
const lang = languageFor(language);
|
||||
if (!lang) {
|
||||
throw new Error(`Unknown language ${language}`);
|
||||
}
|
||||
return parse(lang, code);
|
||||
},
|
||||
};
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import { SysCallMapping } from "../../plugos/system.ts";
|
||||
import { parse } from "../../common/markdown_parser/parse_tree.ts";
|
||||
import { Language } from "../deps.ts";
|
||||
import { parse } from "../markdown_parser/parse_tree.ts";
|
||||
import { Language } from "../../web/deps.ts";
|
||||
import type { ParseTree } from "$sb/lib/tree.ts";
|
||||
|
||||
export function markdownSyscalls(lang: Language): SysCallMapping {
|
@ -1,5 +1,5 @@
|
||||
import { SysCallMapping } from "../../plugos/system.ts";
|
||||
import { YAML } from "../deps.ts";
|
||||
import { YAML } from "../../web/deps.ts";
|
||||
|
||||
export function yamlSyscalls(): SysCallMapping {
|
||||
return {
|
@ -15,8 +15,7 @@
|
||||
|
||||
"bundle": "deno run -A build_bundle.ts",
|
||||
// Regenerates some bundle files (checked into the repo)
|
||||
// Install lezer-generator with "npm install -g @lezer/generator"
|
||||
"generate": "lezer-generator common/markdown_parser/query.grammar -o common/markdown_parser/parse-query.js",
|
||||
"generate": "./scripts/generate.sh",
|
||||
|
||||
// Compile
|
||||
"compile": "deno task bundle && deno compile -A --unstable -o silverbullet dist/silverbullet.js",
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
"preact": "https://esm.sh/preact@10.11.1",
|
||||
"$sb/": "./plug-api/",
|
||||
"handlebars": "https://esm.sh/handlebars@4.7.7?target=es2022",
|
||||
"dexie": "https://esm.sh/dexie@3.2.2?target=es2022"
|
||||
"handlebars": "https://esm.sh/handlebars@4.7.7?target=es2022"
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import type { ParseTree } from "$sb/lib/tree.ts";
|
||||
import { ParsedQuery } from "$sb/lib/query.ts";
|
||||
import { TextChange } from "$sb/lib/change.ts";
|
||||
import { Query } from "$sb/types.ts";
|
||||
|
||||
export type AppEvent =
|
||||
| "page:click"
|
||||
@ -16,7 +16,7 @@ export type AppEvent =
|
||||
| "editor:pageModified";
|
||||
|
||||
export type QueryProviderEvent = {
|
||||
query: ParsedQuery;
|
||||
query: Query;
|
||||
pageName: string;
|
||||
};
|
||||
|
||||
|
39
plug-api/lib/builtin_query_functions.ts
Normal file
39
plug-api/lib/builtin_query_functions.ts
Normal file
@ -0,0 +1,39 @@
|
||||
import type { FunctionMap } from "$sb/types.ts";
|
||||
import { niceDate } from "$sb/lib/dates.ts";
|
||||
|
||||
export const builtinFunctions: FunctionMap = {
|
||||
today() {
|
||||
return niceDate(new Date());
|
||||
},
|
||||
max(...args: number[]) {
|
||||
return Math.max(...args);
|
||||
},
|
||||
min(...args: number[]) {
|
||||
return Math.min(...args);
|
||||
},
|
||||
toJSON(obj: any) {
|
||||
return JSON.stringify(obj);
|
||||
},
|
||||
// Note: these assume Monday as the first day of the week
|
||||
firstDayOfWeek(dateString: string): string {
|
||||
const date = new Date(dateString);
|
||||
const dayOfWeek = date.getDay();
|
||||
const daysToSubtract = (dayOfWeek + 7 - 1) % 7;
|
||||
const firstDayOfWeek = new Date(date);
|
||||
firstDayOfWeek.setDate(date.getDate() - daysToSubtract);
|
||||
return niceDate(firstDayOfWeek);
|
||||
},
|
||||
lastDayOfWeek(dateString: string): string {
|
||||
const date = new Date(dateString);
|
||||
const dayOfWeek = date.getDay();
|
||||
const daysToAdd = (7 - dayOfWeek) % 7;
|
||||
const lastDayOfWeek = new Date(date);
|
||||
lastDayOfWeek.setDate(date.getDate() + daysToAdd);
|
||||
return niceDate(lastDayOfWeek);
|
||||
},
|
||||
addDays(dateString: string, daysToAdd: number): string {
|
||||
const date = new Date(dateString);
|
||||
date.setDate(date.getDate() + daysToAdd);
|
||||
return niceDate(date);
|
||||
},
|
||||
};
|
@ -2,7 +2,7 @@ import { YAML } from "$sb/plugos-syscall/mod.ts";
|
||||
|
||||
import {
|
||||
addParentPointers,
|
||||
findNodeOfType,
|
||||
collectNodesOfType,
|
||||
ParseTree,
|
||||
renderToText,
|
||||
replaceNodesMatchingAsync,
|
||||
@ -18,21 +18,24 @@ export async function extractFrontmatter(
|
||||
): Promise<any> {
|
||||
let data: any = {};
|
||||
addParentPointers(tree);
|
||||
let paragraphCounter = 0;
|
||||
|
||||
await replaceNodesMatchingAsync(tree, async (t) => {
|
||||
// Find top-level hash tags
|
||||
if (t.type === "Hashtag") {
|
||||
// Check if if nested directly into a Paragraph
|
||||
if (t.parent && t.parent.type === "Paragraph") {
|
||||
const tagname = t.children![0].text!.substring(1);
|
||||
if (t.type === "Paragraph") {
|
||||
paragraphCounter++;
|
||||
// Only attach hashtags in the first paragraph to the page
|
||||
if (paragraphCounter !== 1) {
|
||||
return;
|
||||
}
|
||||
collectNodesOfType(t, "Hashtag").forEach((h) => {
|
||||
if (!data.tags) {
|
||||
data.tags = [];
|
||||
}
|
||||
const tagname = h.children![0].text!.substring(1);
|
||||
if (Array.isArray(data.tags) && !data.tags.includes(tagname)) {
|
||||
data.tags.push(tagname);
|
||||
}
|
||||
}
|
||||
return;
|
||||
});
|
||||
}
|
||||
// Find FrontMatter and parse it
|
||||
if (t.type === "FrontMatter") {
|
||||
@ -64,43 +67,6 @@ export async function extractFrontmatter(
|
||||
}
|
||||
}
|
||||
|
||||
// Find a fenced code block with `meta` as the language type
|
||||
if (t.type !== "FencedCode") {
|
||||
return;
|
||||
}
|
||||
const codeInfoNode = findNodeOfType(t, "CodeInfo");
|
||||
if (!codeInfoNode) {
|
||||
return;
|
||||
}
|
||||
if (codeInfoNode.children![0].text !== "meta") {
|
||||
return;
|
||||
}
|
||||
const codeTextNode = findNodeOfType(t, "CodeText");
|
||||
if (!codeTextNode) {
|
||||
// Honestly, this shouldn't happen
|
||||
return;
|
||||
}
|
||||
const codeText = codeTextNode.children![0].text!;
|
||||
const parsedData: any = YAML.parse(codeText);
|
||||
const newData = { ...parsedData };
|
||||
data = { ...data, ...parsedData };
|
||||
if (removeKeys.length > 0) {
|
||||
let removedOne = false;
|
||||
for (const key of removeKeys) {
|
||||
if (key in newData) {
|
||||
delete newData[key];
|
||||
removedOne = true;
|
||||
}
|
||||
}
|
||||
if (removedOne) {
|
||||
codeTextNode.children![0].text = (await YAML.stringify(newData)).trim();
|
||||
}
|
||||
}
|
||||
// If nothing is left, let's just delete this whole block
|
||||
if (Object.keys(newData).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
});
|
||||
|
||||
|
180
plug-api/lib/parse-query.ts
Normal file
180
plug-api/lib/parse-query.ts
Normal file
@ -0,0 +1,180 @@
|
||||
import type { AST } from "$sb/lib/tree.ts";
|
||||
import type { Query, QueryExpression } from "$sb/types.ts";
|
||||
|
||||
export function astToKvQuery(
|
||||
node: AST,
|
||||
): Query {
|
||||
const query: Query = {
|
||||
querySource: "",
|
||||
};
|
||||
const [queryType, querySource, ...clauses] = node;
|
||||
if (queryType !== "Query") {
|
||||
throw new Error(`Expected query type, got ${queryType}`);
|
||||
}
|
||||
query.querySource = querySource[1] as string;
|
||||
for (const clause of clauses) {
|
||||
const [clauseType] = clause;
|
||||
switch (clauseType) {
|
||||
case "WhereClause": {
|
||||
if (query.filter) {
|
||||
query.filter = [
|
||||
"and",
|
||||
query.filter,
|
||||
expressionToKvQueryFilter(clause[2]),
|
||||
];
|
||||
} else {
|
||||
query.filter = expressionToKvQueryFilter(clause[2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "OrderClause": {
|
||||
if (!query.orderBy) {
|
||||
query.orderBy = [];
|
||||
}
|
||||
for (const orderBy of clause.slice(2)) {
|
||||
if (orderBy[0] === "OrderBy") {
|
||||
// console.log("orderBy", orderBy);
|
||||
const expr = orderBy[1][1];
|
||||
if (orderBy[2]) {
|
||||
query.orderBy.push({
|
||||
expr: expressionToKvQueryExpression(expr),
|
||||
desc: orderBy[2][1][1] === "desc",
|
||||
});
|
||||
} else {
|
||||
query.orderBy.push({
|
||||
expr: expressionToKvQueryExpression(expr),
|
||||
desc: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
case "LimitClause": {
|
||||
query.limit = expressionToKvQueryExpression(clause[2][1]);
|
||||
break;
|
||||
}
|
||||
case "SelectClause": {
|
||||
for (const select of clause.slice(2)) {
|
||||
if (select[0] === "Select") {
|
||||
if (!query.select) {
|
||||
query.select = [];
|
||||
}
|
||||
if (select.length === 2) {
|
||||
query.select.push({ name: select[1][1] as string });
|
||||
} else {
|
||||
query.select.push({
|
||||
name: select[3][1] as string,
|
||||
expr: expressionToKvQueryExpression(select[1]),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "RenderClause": {
|
||||
query.render = (clause[2][1] as string).slice(2, -2);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unknown clause type: ${clauseType}`);
|
||||
}
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
export function expressionToKvQueryExpression(node: AST): QueryExpression {
|
||||
if (["LVal", "Expression", "Value"].includes(node[0])) {
|
||||
return expressionToKvQueryExpression(node[1]);
|
||||
}
|
||||
// console.log("Got expression", node);
|
||||
switch (node[0]) {
|
||||
case "Attribute": {
|
||||
return [
|
||||
"attr",
|
||||
expressionToKvQueryExpression(node[1]),
|
||||
node[3][1] as string,
|
||||
];
|
||||
}
|
||||
case "Identifier":
|
||||
return ["attr", node[1] as string];
|
||||
case "String":
|
||||
return ["string", (node[1] as string).slice(1, -1)];
|
||||
case "Number":
|
||||
return ["number", +(node[1])];
|
||||
case "Bool":
|
||||
return ["boolean", node[1][1] === "true"];
|
||||
case "null":
|
||||
return ["null"];
|
||||
case "Regex":
|
||||
return ["regexp", (node[1] as string).slice(1, -1), "i"];
|
||||
case "List": {
|
||||
const exprs: AST[] = [];
|
||||
for (const expr of node.slice(2)) {
|
||||
if (expr[0] === "Expression") {
|
||||
exprs.push(expr);
|
||||
}
|
||||
}
|
||||
return ["array", exprs.map(expressionToKvQueryExpression)];
|
||||
}
|
||||
case "BinExpression": {
|
||||
const lval = expressionToKvQueryExpression(node[1]);
|
||||
const binOp = (node[2] as string).trim();
|
||||
const val = expressionToKvQueryExpression(node[3]);
|
||||
return [binOp as any, lval, val];
|
||||
}
|
||||
case "LogicalExpression": {
|
||||
const op1 = expressionToKvQueryFilter(node[1]);
|
||||
const op = node[2];
|
||||
const op2 = expressionToKvQueryFilter(node[3]);
|
||||
return [op[1] as any, op1, op2];
|
||||
}
|
||||
case "ParenthesizedExpression": {
|
||||
return expressionToKvQueryFilter(node[2]);
|
||||
}
|
||||
case "Call": {
|
||||
// console.log("Call", node);
|
||||
const fn = node[1][1] as string;
|
||||
const args: AST[] = [];
|
||||
for (const expr of node.slice(2)) {
|
||||
if (expr[0] === "Expression") {
|
||||
args.push(expr);
|
||||
}
|
||||
}
|
||||
return ["call", fn, args.map(expressionToKvQueryExpression)];
|
||||
}
|
||||
default:
|
||||
throw new Error(`Not supported: ${node[0]}`);
|
||||
}
|
||||
}
|
||||
|
||||
function expressionToKvQueryFilter(
|
||||
node: AST,
|
||||
): QueryExpression {
|
||||
const [expressionType] = node;
|
||||
if (expressionType === "Expression") {
|
||||
return expressionToKvQueryFilter(node[1]);
|
||||
}
|
||||
switch (expressionType) {
|
||||
case "BinExpression": {
|
||||
const lval = expressionToKvQueryExpression(node[1]);
|
||||
const binOp = node[2][0] === "InKW" ? "in" : (node[2] as string).trim();
|
||||
const val = expressionToKvQueryExpression(node[3]);
|
||||
return [binOp as any, lval, val];
|
||||
}
|
||||
case "LogicalExpression": {
|
||||
// console.log("Logical expression", node);
|
||||
// 0 = first operand, 1 = whitespace, 2 = operator, 3 = whitespace, 4 = second operand
|
||||
const op1 = expressionToKvQueryFilter(node[1]);
|
||||
const op = node[2]; // 1 is whitespace
|
||||
const op2 = expressionToKvQueryFilter(node[3]);
|
||||
return [op[1] as any, op1, op2];
|
||||
}
|
||||
case "ParenthesizedExpression": {
|
||||
return expressionToKvQueryFilter(node[2]);
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unknown expression type: ${expressionType}`);
|
||||
}
|
||||
}
|
193
plug-api/lib/parser-query.test.ts
Normal file
193
plug-api/lib/parser-query.test.ts
Normal file
@ -0,0 +1,193 @@
|
||||
import { parse } from "../../common/markdown_parser/parse_tree.ts";
|
||||
import buildMarkdown from "../../common/markdown_parser/parser.ts";
|
||||
import { AST, findNodeOfType, parseTreeToAST } from "$sb/lib/tree.ts";
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { astToKvQuery } from "$sb/lib/parse-query.ts";
|
||||
|
||||
const lang = buildMarkdown([]);
|
||||
|
||||
function wrapQueryParse(query: string): AST | null {
|
||||
const tree = parse(lang, `<!-- #query ${query} -->\n$\n<!-- /query -->`);
|
||||
return parseTreeToAST(findNodeOfType(tree, "Query")!);
|
||||
}
|
||||
|
||||
Deno.test("Test directive parser", () => {
|
||||
// const query = ;
|
||||
// console.log("query", query);
|
||||
assertEquals(
|
||||
astToKvQuery(wrapQueryParse(`page where name = "test"`)!),
|
||||
{
|
||||
querySource: "page",
|
||||
filter: ["=", ["attr", "name"], ["string", "test"]],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(wrapQueryParse(`page where name =~ /test/`)!),
|
||||
{
|
||||
querySource: "page",
|
||||
filter: ["=~", ["attr", "name"], ["regexp", "test", "i"]],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(wrapQueryParse(`page where parent.name = "test"`)!),
|
||||
{
|
||||
querySource: "page",
|
||||
filter: ["=", ["attr", ["attr", "parent"], "name"], ["string", "test"]],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`page where name = "test" and age > 20`)!,
|
||||
),
|
||||
{
|
||||
querySource: "page",
|
||||
filter: ["and", ["=", ["attr", "name"], ["string", "test"]], [">", [
|
||||
"attr",
|
||||
"age",
|
||||
], ["number", 20]]],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`page where name = "test" and age > 20 or done = true`)!,
|
||||
),
|
||||
{
|
||||
querySource: "page",
|
||||
filter: ["or", ["and", ["=", ["attr", "name"], ["string", "test"]], [
|
||||
">",
|
||||
[
|
||||
"attr",
|
||||
"age",
|
||||
],
|
||||
["number", 20],
|
||||
]], ["=", ["attr", "done"], ["boolean", true]]],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`page where (age <= 20) or task.done = null`)!,
|
||||
),
|
||||
{
|
||||
querySource: "page",
|
||||
filter: ["or", ["<=", ["attr", "age"], ["number", 20]], [
|
||||
"=",
|
||||
[
|
||||
"attr",
|
||||
[
|
||||
"attr",
|
||||
"task",
|
||||
],
|
||||
"done",
|
||||
],
|
||||
["null"],
|
||||
]],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task order by lastModified asc`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
orderBy: [{ expr: ["attr", "lastModified"], desc: false }],
|
||||
},
|
||||
);
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task order by lastModified`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
orderBy: [{ expr: ["attr", "lastModified"], desc: false }],
|
||||
},
|
||||
);
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task order by lastModified desc, name, age asc`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
orderBy: [{ expr: ["attr", "lastModified"], desc: true }, {
|
||||
expr: ["attr", "name"],
|
||||
desc: false,
|
||||
}, { expr: ["attr", "age"], desc: false }],
|
||||
},
|
||||
);
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task order by lastModified desc limit 5`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
orderBy: [{ expr: ["attr", "lastModified"], desc: true }],
|
||||
limit: ["number", 5],
|
||||
},
|
||||
);
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task select name, lastModified + 20 as modified`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
select: [{ name: "name" }, {
|
||||
name: "modified",
|
||||
expr: ["+", ["attr", "lastModified"], ["number", 20]],
|
||||
}],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task render [[my/page]]`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
render: "my/page",
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task where name in ["hello", 1]`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
filter: ["in", ["attr", "name"], ["array", [["string", "hello"], [
|
||||
"number",
|
||||
1,
|
||||
]]]],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task select today() as today2`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
select: [{
|
||||
name: "today2",
|
||||
expr: ["call", "today", []],
|
||||
}],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
astToKvQuery(
|
||||
wrapQueryParse(`task select today(1, 2, 3) as today`)!,
|
||||
),
|
||||
{
|
||||
querySource: "task",
|
||||
select: [{
|
||||
name: "today",
|
||||
expr: ["call", "today", [["number", 1], ["number", 2], ["number", 3]]],
|
||||
}],
|
||||
},
|
||||
);
|
||||
});
|
@ -1,145 +1,227 @@
|
||||
import { ParseTree, renderToText, replaceNodesMatching } from "$sb/lib/tree.ts";
|
||||
import { FunctionMap, KV, Query, QueryExpression } from "$sb/types.ts";
|
||||
|
||||
export const queryRegex =
|
||||
/(<!--\s*#query\s+(.+?)-->)(.+?)(<!--\s*\/query\s*-->)/gs;
|
||||
|
||||
export const directiveStartRegex = /<!--\s*#([\w\-]+)\s+(.+?)-->/s;
|
||||
|
||||
export const directiveEndRegex = /<!--\s*\/([\w\-]+)\s*-->/s;
|
||||
|
||||
export type QueryFilter = {
|
||||
op: string;
|
||||
prop: string;
|
||||
value: any;
|
||||
};
|
||||
export function evalQueryExpression(
|
||||
val: QueryExpression,
|
||||
obj: any,
|
||||
functionMap: FunctionMap = {},
|
||||
): any {
|
||||
const [type, op1] = val;
|
||||
|
||||
export type QueryOrdering = {
|
||||
orderBy: string;
|
||||
orderDesc: boolean;
|
||||
};
|
||||
|
||||
export type ParsedQuery = {
|
||||
table: string;
|
||||
limit?: number;
|
||||
ordering: QueryOrdering[];
|
||||
/** @deprecated Please use ordering.
|
||||
* Deprecated due to PR #387
|
||||
* Currently holds ordering[0] if exists
|
||||
*/
|
||||
orderBy?: string;
|
||||
/** @deprecated Please use ordering.
|
||||
* Deprecated due to PR #387
|
||||
* Currently holds ordering[0] if exists
|
||||
*/
|
||||
orderDesc?: boolean;
|
||||
filter: QueryFilter[];
|
||||
select?: string[];
|
||||
render?: string;
|
||||
};
|
||||
|
||||
export function applyQuery<T>(parsedQuery: ParsedQuery, records: T[]): T[] {
|
||||
let resultRecords: any[] = [];
|
||||
if (parsedQuery.filter.length === 0) {
|
||||
resultRecords = records.slice();
|
||||
switch (type) {
|
||||
// Logical operators
|
||||
case "and":
|
||||
return evalQueryExpression(op1, obj, functionMap) &&
|
||||
evalQueryExpression(val[2], obj, functionMap);
|
||||
case "or":
|
||||
return evalQueryExpression(op1, obj, functionMap) ||
|
||||
evalQueryExpression(val[2], obj, functionMap);
|
||||
// Value types
|
||||
case "null":
|
||||
return null;
|
||||
case "number":
|
||||
case "string":
|
||||
case "boolean":
|
||||
return op1;
|
||||
case "regexp":
|
||||
return [op1, val[2]];
|
||||
case "attr": {
|
||||
let attributeVal = obj;
|
||||
if (val.length === 3) {
|
||||
attributeVal = evalQueryExpression(val[1], obj, functionMap);
|
||||
if (attributeVal) {
|
||||
return attributeVal[val[2]];
|
||||
} else {
|
||||
recordLoop:
|
||||
for (const record of records) {
|
||||
const recordAny: any = record;
|
||||
for (const { op, prop, value } of parsedQuery.filter) {
|
||||
switch (op) {
|
||||
return null;
|
||||
}
|
||||
} else if (!val[1]) {
|
||||
return obj;
|
||||
} else {
|
||||
return attributeVal[val[1]];
|
||||
}
|
||||
}
|
||||
case "array": {
|
||||
return op1.map((v) => evalQueryExpression(v, obj, functionMap));
|
||||
}
|
||||
case "object":
|
||||
return obj;
|
||||
case "call": {
|
||||
const fn = functionMap[op1];
|
||||
if (!fn) {
|
||||
throw new Error(`Unknown function: ${op1}`);
|
||||
}
|
||||
return fn(
|
||||
...val[2].map((v) => evalQueryExpression(v, obj, functionMap)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Binary operators, here we can pre-calculate the two operand values
|
||||
const val1 = evalQueryExpression(op1, obj, functionMap);
|
||||
const val2 = evalQueryExpression(val[2], obj, functionMap);
|
||||
|
||||
switch (type) {
|
||||
case "+":
|
||||
return val1 + val2;
|
||||
case "-":
|
||||
return val1 - val2;
|
||||
case "*":
|
||||
return val1 * val2;
|
||||
case "/":
|
||||
return val1 / val2;
|
||||
case "%":
|
||||
return val1 % val2;
|
||||
case "=": {
|
||||
const recordPropVal = recordAny[prop];
|
||||
if (Array.isArray(recordPropVal) && !Array.isArray(value)) {
|
||||
if (Array.isArray(val1) && !Array.isArray(val2)) {
|
||||
// Record property is an array, and value is a scalar: find the value in the array
|
||||
if (!recordPropVal.includes(value)) {
|
||||
continue recordLoop;
|
||||
if (val1.includes(val2)) {
|
||||
return true;
|
||||
}
|
||||
} else if (Array.isArray(recordPropVal) && Array.isArray(value)) {
|
||||
} else if (Array.isArray(val1) && Array.isArray(val2)) {
|
||||
// Record property is an array, and value is an array: find the value in the array
|
||||
if (!recordPropVal.some((v) => value.includes(v))) {
|
||||
continue recordLoop;
|
||||
if (val1.some((v) => val2.includes(v))) {
|
||||
return true;
|
||||
}
|
||||
} else if (!(recordPropVal == value)) {
|
||||
// Both are scalars: exact value
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
return val1 == val2;
|
||||
}
|
||||
case "!=":
|
||||
if (!(recordAny[prop] != value)) {
|
||||
continue recordLoop;
|
||||
return val1 != val2;
|
||||
case "=~": {
|
||||
if (!Array.isArray(val2)) {
|
||||
throw new Error(`Invalid regexp: ${val2}`);
|
||||
}
|
||||
const r = new RegExp(val2[0], val2[1]);
|
||||
return r.test(val1);
|
||||
}
|
||||
case "!=~": {
|
||||
if (!Array.isArray(val2)) {
|
||||
throw new Error(`Invalid regexp: ${val2}`);
|
||||
}
|
||||
const r = new RegExp(val2[0], val2[1]);
|
||||
return !r.test(val1);
|
||||
}
|
||||
break;
|
||||
case "<":
|
||||
if (!(recordAny[prop] < value)) {
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
return val1 < val2;
|
||||
case "<=":
|
||||
if (!(recordAny[prop] <= value)) {
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
return val1 <= val2;
|
||||
case ">":
|
||||
if (!(recordAny[prop] > value)) {
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
return val1 > val2;
|
||||
case ">=":
|
||||
if (!(recordAny[prop] >= value)) {
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
case "=~":
|
||||
// TODO: Cache regexps somehow
|
||||
if (!new RegExp(value).exec(recordAny[prop])) {
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
case "!=~":
|
||||
if (new RegExp(value).exec(recordAny[prop])) {
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
return val1 >= val2;
|
||||
case "in":
|
||||
if (!value.includes(recordAny[prop])) {
|
||||
continue recordLoop;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
resultRecords.push(recordAny);
|
||||
return val2.includes(val1);
|
||||
default:
|
||||
throw new Error(`Unupported operator: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (parsedQuery.ordering.length > 0) {
|
||||
resultRecords = resultRecords.sort((a: any, b: any) => {
|
||||
for (const { orderBy, orderDesc } of parsedQuery.ordering) {
|
||||
if (a[orderBy] < b[orderBy] || a[orderBy] === undefined) {
|
||||
return orderDesc ? 1 : -1;
|
||||
/**
|
||||
* Looks for an attribute assignment in the expression, and returns the expression assigned to the attribute or throws an error when not found
|
||||
* Side effect: effectively removes the attribute assignment from the expression (by replacing it with true = true)
|
||||
*/
|
||||
export function liftAttributeFilter(
|
||||
expression: QueryExpression | undefined,
|
||||
attributeName: string,
|
||||
): QueryExpression {
|
||||
if (!expression) {
|
||||
throw new Error(`Cannot find attribute assignment for ${attributeName}`);
|
||||
}
|
||||
if (a[orderBy] > b[orderBy] || b[orderBy] === undefined) {
|
||||
return orderDesc ? -1 : 1;
|
||||
switch (expression[0]) {
|
||||
case "=": {
|
||||
if (expression[1][0] === "attr" && expression[1][1] === attributeName) {
|
||||
const val = expression[2];
|
||||
// Remove the filter by changing it to true = true
|
||||
expression[1] = ["boolean", true];
|
||||
expression[2] = ["boolean", true];
|
||||
return val;
|
||||
}
|
||||
// Consider them equal. This way helps with comparing arrays (like tags)
|
||||
break;
|
||||
}
|
||||
case "and":
|
||||
case "or": {
|
||||
const newOp1 = liftAttributeFilter(expression[1], attributeName);
|
||||
if (newOp1) {
|
||||
return newOp1;
|
||||
}
|
||||
const newOp2 = liftAttributeFilter(expression[2], attributeName);
|
||||
if (newOp2) {
|
||||
return newOp2;
|
||||
}
|
||||
throw new Error(`Cannot find attribute assignment for ${attributeName}`);
|
||||
}
|
||||
}
|
||||
throw new Error(`Cannot find attribute assignment for ${attributeName}`);
|
||||
}
|
||||
|
||||
export function applyQuery<T>(query: Query, allItems: T[]): T[] {
|
||||
// Filter
|
||||
if (query.filter) {
|
||||
allItems = allItems.filter((item) =>
|
||||
evalQueryExpression(query.filter!, item)
|
||||
);
|
||||
}
|
||||
// Add dummy keys, then remove them
|
||||
return applyQueryNoFilterKV(
|
||||
query,
|
||||
allItems.map((v) => ({ key: [], value: v })),
|
||||
).map((v) => v.value);
|
||||
}
|
||||
|
||||
export function applyQueryNoFilterKV(
|
||||
query: Query,
|
||||
allItems: KV[],
|
||||
functionMap: FunctionMap = {}, // TODO: Figure this out later
|
||||
): KV[] {
|
||||
// Order by
|
||||
if (query.orderBy) {
|
||||
allItems.sort((a, b) => {
|
||||
const aVal = a.value;
|
||||
const bVal = b.value;
|
||||
for (const { expr, desc } of query.orderBy!) {
|
||||
const evalA = evalQueryExpression(expr, aVal, functionMap);
|
||||
const evalB = evalQueryExpression(expr, bVal, functionMap);
|
||||
if (
|
||||
evalA < evalB || evalA === undefined
|
||||
) {
|
||||
return desc ? 1 : -1;
|
||||
}
|
||||
if (
|
||||
evalA > evalB || evalB === undefined
|
||||
) {
|
||||
return desc ? -1 : 1;
|
||||
}
|
||||
}
|
||||
// Consider them equal. This helps with comparing arrays (like tags)
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
if (parsedQuery.limit) {
|
||||
resultRecords = resultRecords.slice(0, parsedQuery.limit);
|
||||
}
|
||||
if (parsedQuery.select) {
|
||||
resultRecords = resultRecords.map((rec) => {
|
||||
if (query.select) {
|
||||
for (let i = 0; i < allItems.length; i++) {
|
||||
const rec = allItems[i].value;
|
||||
const newRec: any = {};
|
||||
for (const k of parsedQuery.select!) {
|
||||
newRec[k] = rec[k];
|
||||
for (const { name, expr } of query.select) {
|
||||
newRec[name] = expr
|
||||
? evalQueryExpression(expr, rec, functionMap)
|
||||
: rec[name];
|
||||
}
|
||||
return newRec;
|
||||
});
|
||||
allItems[i].value = newRec;
|
||||
}
|
||||
return resultRecords;
|
||||
}
|
||||
if (query.limit) {
|
||||
const limit = evalQueryExpression(query.limit, {}, functionMap);
|
||||
if (allItems.length > limit) {
|
||||
allItems = allItems.slice(0, limit);
|
||||
}
|
||||
}
|
||||
return allItems;
|
||||
}
|
||||
|
||||
export function removeQueries(pt: ParseTree) {
|
||||
|
@ -4,6 +4,7 @@ import {
|
||||
collectNodesMatching,
|
||||
findParentMatching,
|
||||
nodeAtPos,
|
||||
parseTreeToAST,
|
||||
removeParentPointers,
|
||||
renderToText,
|
||||
replaceNodesMatching,
|
||||
@ -77,3 +78,9 @@ Deno.test("Test parsing", () => {
|
||||
let mdTree3 = parse(lang, mdTest3);
|
||||
// console.log(JSON.stringify(mdTree3, null, 2));
|
||||
});
|
||||
|
||||
Deno.test("AST functions", () => {
|
||||
const lang = wikiMarkdownLang([]);
|
||||
const mdTree = parse(lang, mdTest1);
|
||||
console.log(JSON.stringify(parseTreeToAST(mdTree), null, 2));
|
||||
});
|
||||
|
@ -8,6 +8,8 @@ export type ParseTree = {
|
||||
parent?: ParseTree;
|
||||
};
|
||||
|
||||
export type AST = [string, ...AST[]] | string;
|
||||
|
||||
export function addParentPointers(tree: ParseTree) {
|
||||
if (!tree.children) {
|
||||
return;
|
||||
@ -208,3 +210,19 @@ export function cloneTree(tree: ParseTree): ParseTree {
|
||||
delete newTree.parent;
|
||||
return newTree;
|
||||
}
|
||||
|
||||
export function parseTreeToAST(tree: ParseTree): AST {
|
||||
if (tree.text !== undefined) {
|
||||
return tree.text;
|
||||
}
|
||||
const ast: AST = [tree.type!];
|
||||
for (const node of tree.children!) {
|
||||
if (node.type && !node.type.endsWith("Mark")) {
|
||||
ast.push(parseTreeToAST(node));
|
||||
}
|
||||
if (node.text && node.text.trim()) {
|
||||
ast.push(node.text);
|
||||
}
|
||||
}
|
||||
return ast;
|
||||
}
|
||||
|
38
plug-api/plugos-syscall/datastore.ts
Normal file
38
plug-api/plugos-syscall/datastore.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { syscall } from "$sb/plugos-syscall/syscall.ts";
|
||||
import { KV, KvKey, KvQuery } from "$sb/types.ts";
|
||||
|
||||
export function set(key: KvKey, value: any): Promise<void> {
|
||||
return syscall("datastore.set", key, value);
|
||||
}
|
||||
|
||||
export function batchSet(kvs: KV[]): Promise<void> {
|
||||
return syscall("datastore.batchSet", kvs);
|
||||
}
|
||||
|
||||
export function get(key: KvKey): Promise<any> {
|
||||
return syscall("datastore.get", key);
|
||||
}
|
||||
|
||||
export function batchGet(keys: KvKey[]): Promise<(any | undefined)[]> {
|
||||
return syscall("datastore.batchGet", keys);
|
||||
}
|
||||
|
||||
export function del(key: KvKey): Promise<void> {
|
||||
return syscall("datastore.delete", key);
|
||||
}
|
||||
|
||||
export function batchDel(keys: KvKey[]): Promise<void> {
|
||||
return syscall("datastore.batchDelete", keys);
|
||||
}
|
||||
|
||||
export function query(
|
||||
query: KvQuery,
|
||||
): Promise<KV[]> {
|
||||
return syscall("datastore.query", query);
|
||||
}
|
||||
|
||||
export function queryDelete(
|
||||
query: KvQuery,
|
||||
): Promise<void> {
|
||||
return syscall("datastore.queryDelete", query);
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
export * as asset from "./asset.ts";
|
||||
export * as events from "./event.ts";
|
||||
export * as shell from "./shell.ts";
|
||||
export * as store from "./store.ts";
|
||||
export * as YAML from "./yaml.ts";
|
||||
export * as mq from "./mq.ts";
|
||||
export * from "./syscall.ts";
|
||||
export * as datastore from "./datastore.ts";
|
||||
|
@ -1,62 +0,0 @@
|
||||
import { syscall } from "./syscall.ts";
|
||||
|
||||
export type KV = {
|
||||
key: string;
|
||||
value: any;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
filter?: Filter[];
|
||||
orderBy?: string;
|
||||
orderDesc?: boolean;
|
||||
limit?: number;
|
||||
select?: string[];
|
||||
};
|
||||
|
||||
export type Filter = {
|
||||
op: string;
|
||||
prop: string;
|
||||
value: any;
|
||||
};
|
||||
|
||||
export function set(key: string, value: any): Promise<void> {
|
||||
return syscall("store.set", key, value);
|
||||
}
|
||||
|
||||
export function batchSet(kvs: KV[]): Promise<void> {
|
||||
return syscall("store.batchSet", kvs);
|
||||
}
|
||||
|
||||
export function get(key: string): Promise<any> {
|
||||
return syscall("store.get", key);
|
||||
}
|
||||
|
||||
export function batchGet(keys: string[]): Promise<(any | undefined)[]> {
|
||||
return syscall("store.batchGet", keys);
|
||||
}
|
||||
|
||||
export function has(key: string): Promise<boolean> {
|
||||
return syscall("store.has", key);
|
||||
}
|
||||
|
||||
export function del(key: string): Promise<void> {
|
||||
return syscall("store.delete", key);
|
||||
}
|
||||
|
||||
export function batchDel(keys: string[]): Promise<void> {
|
||||
return syscall("store.batchDelete", keys);
|
||||
}
|
||||
|
||||
export function queryPrefix(
|
||||
prefix: string,
|
||||
): Promise<{ key: string; value: any }[]> {
|
||||
return syscall("store.queryPrefix", prefix);
|
||||
}
|
||||
|
||||
export function deletePrefix(prefix: string): Promise<void> {
|
||||
return syscall("store.deletePrefix", prefix);
|
||||
}
|
||||
|
||||
export function deleteAll(): Promise<void> {
|
||||
return syscall("store.deleteAll");
|
||||
}
|
@ -1,5 +1,10 @@
|
||||
import { syscall } from "./syscall.ts";
|
||||
|
||||
/**
|
||||
* Implements a very simple (string) key value store for the client.
|
||||
* Generally should only be used to set some client-specific states, such as preferences.
|
||||
*/
|
||||
|
||||
export function set(key: string, value: any): Promise<void> {
|
||||
return syscall("clientStore.set", key, value);
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ export function filterBox(
|
||||
}
|
||||
|
||||
export function showPanel(
|
||||
id: "lhs" | "rhs" | "bhs" | "modal",
|
||||
id: "lhs" | "rhs" | "bhs" | "modal" | "ps",
|
||||
mode: number,
|
||||
html: string,
|
||||
script = "",
|
||||
@ -80,7 +80,9 @@ export function showPanel(
|
||||
return syscall("editor.showPanel", id, mode, html, script);
|
||||
}
|
||||
|
||||
export function hidePanel(id: "lhs" | "rhs" | "bhs" | "modal"): Promise<void> {
|
||||
export function hidePanel(
|
||||
id: "lhs" | "rhs" | "bhs" | "modal" | "ps",
|
||||
): Promise<void> {
|
||||
return syscall("editor.hidePanel", id);
|
||||
}
|
||||
|
||||
|
16
plug-api/silverbullet-syscall/handlebars.ts
Normal file
16
plug-api/silverbullet-syscall/handlebars.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { syscall } from "$sb/silverbullet-syscall/syscall.ts";
|
||||
|
||||
/**
|
||||
* Renders
|
||||
* @param template
|
||||
* @param obj
|
||||
* @param globals
|
||||
* @returns
|
||||
*/
|
||||
export function renderTemplate(
|
||||
template: string,
|
||||
obj: any,
|
||||
globals: Record<string, any> = {},
|
||||
): Promise<string> {
|
||||
return syscall("handlebars.renderTemplate", template, obj, globals);
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
import type { Query } from "../plugos-syscall/store.ts";
|
||||
import { syscall } from "./syscall.ts";
|
||||
|
||||
export type KV = {
|
||||
key: string;
|
||||
value: any;
|
||||
};
|
||||
|
||||
export function set(
|
||||
page: string,
|
||||
key: string,
|
||||
value: any,
|
||||
): Promise<void> {
|
||||
return syscall("index.set", page, key, value);
|
||||
}
|
||||
|
||||
export function batchSet(page: string, kvs: KV[]): Promise<void> {
|
||||
return syscall("index.batchSet", page, kvs);
|
||||
}
|
||||
|
||||
export function get(page: string, key: string): Promise<any> {
|
||||
return syscall("index.get", page, key);
|
||||
}
|
||||
|
||||
export function del(page: string, key: string): Promise<void> {
|
||||
return syscall("index.delete", page, key);
|
||||
}
|
||||
|
||||
export function queryPrefix(
|
||||
prefix: string,
|
||||
): Promise<{ key: string; page: string; value: any }[]> {
|
||||
return syscall("index.queryPrefix", prefix);
|
||||
}
|
||||
|
||||
export function query(
|
||||
query: Query,
|
||||
): Promise<{ key: string; page: string; value: any }[]> {
|
||||
return syscall("index.query", query);
|
||||
}
|
||||
|
||||
export function clearPageIndexForPage(page: string): Promise<void> {
|
||||
return syscall("index.clearPageIndexForPage", page);
|
||||
}
|
||||
|
||||
export function deletePrefixForPage(
|
||||
page: string,
|
||||
prefix: string,
|
||||
): Promise<void> {
|
||||
return syscall("index.deletePrefixForPage", page, prefix);
|
||||
}
|
||||
|
||||
export function clearPageIndex(): Promise<void> {
|
||||
return syscall("index.clearPageIndex");
|
||||
}
|
16
plug-api/silverbullet-syscall/language.ts
Normal file
16
plug-api/silverbullet-syscall/language.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { syscall } from "$sb/silverbullet-syscall/syscall.ts";
|
||||
|
||||
import type { ParseTree } from "$sb/lib/tree.ts";
|
||||
|
||||
/**
|
||||
* Parses a piece of code using any of the supported SB languages, see `common/languages.ts` for a list
|
||||
* @param language the language to parse
|
||||
* @param code the code to parse
|
||||
* @returns a ParseTree representation of the code
|
||||
*/
|
||||
export function parseLanguage(
|
||||
language: string,
|
||||
code: string,
|
||||
): Promise<ParseTree> {
|
||||
return syscall("language.parseLanguage", language, code);
|
||||
}
|
@ -1,8 +1,9 @@
|
||||
export * as editor from "./editor.ts";
|
||||
export * as index from "./index.ts";
|
||||
export * as markdown from "./markdown.ts";
|
||||
export * as space from "./space.ts";
|
||||
export * as system from "./system.ts";
|
||||
export * as clientStore from "./clientStore.ts";
|
||||
export * as sync from "./sync.ts";
|
||||
export * as debug from "./debug.ts";
|
||||
export * as language from "./language.ts";
|
||||
export * as handlebars from "./handlebars.ts";
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { syscall } from "./syscall.ts";
|
||||
import type { AttachmentMeta, PageMeta } from "../../web/types.ts";
|
||||
import { FileMeta } from "$sb/types.ts";
|
||||
import { AttachmentMeta, FileMeta, PageMeta } from "$sb/types.ts";
|
||||
|
||||
export function listPages(unfiltered = false): Promise<PageMeta[]> {
|
||||
return syscall("space.listPages", unfiltered);
|
||||
|
@ -1,13 +0,0 @@
|
||||
import { syscall } from "./syscall.ts";
|
||||
|
||||
export function set(key: string, value: any): Promise<void> {
|
||||
return syscall("store.set", key, value);
|
||||
}
|
||||
|
||||
export function get(key: string): Promise<any> {
|
||||
return syscall("store.get", key);
|
||||
}
|
||||
|
||||
export function del(key: string): Promise<void> {
|
||||
return syscall("store.delete", key);
|
||||
}
|
@ -1,3 +1,28 @@
|
||||
export type FileMeta = {
|
||||
name: string;
|
||||
lastModified: number;
|
||||
contentType: string;
|
||||
size: number;
|
||||
perm: "ro" | "rw";
|
||||
noSync?: boolean;
|
||||
};
|
||||
|
||||
export type PageMeta = {
|
||||
name: string;
|
||||
lastModified: number;
|
||||
lastOpened?: number;
|
||||
perm: "ro" | "rw";
|
||||
};
|
||||
|
||||
export type AttachmentMeta = {
|
||||
name: string;
|
||||
contentType: string;
|
||||
lastModified: number;
|
||||
size: number;
|
||||
perm: "ro" | "rw";
|
||||
};
|
||||
|
||||
// Message Queue related types
|
||||
export type MQMessage = {
|
||||
id: string;
|
||||
queue: string;
|
||||
@ -16,11 +41,76 @@ export type MQSubscribeOptions = {
|
||||
pollInterval?: number;
|
||||
};
|
||||
|
||||
export type FileMeta = {
|
||||
// Key-Value Store related types
|
||||
export type KvKey = string[];
|
||||
|
||||
export type KV<T = any> = {
|
||||
key: KvKey;
|
||||
value: T;
|
||||
};
|
||||
|
||||
export type OrderBy = {
|
||||
expr: QueryExpression;
|
||||
desc: boolean;
|
||||
};
|
||||
|
||||
export type Select = {
|
||||
name: string;
|
||||
lastModified: number;
|
||||
contentType: string;
|
||||
size: number;
|
||||
perm: "ro" | "rw";
|
||||
noSync?: boolean;
|
||||
} & Record<string, any>;
|
||||
expr?: QueryExpression;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
querySource?: string;
|
||||
filter?: QueryExpression;
|
||||
orderBy?: OrderBy[];
|
||||
select?: Select[];
|
||||
limit?: QueryExpression;
|
||||
render?: string;
|
||||
};
|
||||
|
||||
export type KvQuery = Omit<Query, "querySource"> & {
|
||||
prefix?: KvKey;
|
||||
};
|
||||
|
||||
export type QueryExpression =
|
||||
| ["and", QueryExpression, QueryExpression]
|
||||
| ["or", QueryExpression, QueryExpression]
|
||||
| ["=", QueryExpression, QueryExpression]
|
||||
| ["!=", QueryExpression, QueryExpression]
|
||||
| ["=~", QueryExpression, QueryExpression]
|
||||
| ["!=~", QueryExpression, QueryExpression]
|
||||
| ["<", QueryExpression, QueryExpression]
|
||||
| ["<=", QueryExpression, QueryExpression]
|
||||
| [">", QueryExpression, QueryExpression]
|
||||
| [">=", QueryExpression, QueryExpression]
|
||||
| ["in", QueryExpression, QueryExpression]
|
||||
| ["attr", QueryExpression, string]
|
||||
| ["attr", string]
|
||||
| ["number", number]
|
||||
| ["string", string]
|
||||
| ["boolean", boolean]
|
||||
| ["null"]
|
||||
| ["array", QueryExpression[]]
|
||||
| ["object", Record<string, any>]
|
||||
| ["regexp", string, string] // regex, modifier
|
||||
| ["+", QueryExpression, QueryExpression]
|
||||
| ["-", QueryExpression, QueryExpression]
|
||||
| ["*", QueryExpression, QueryExpression]
|
||||
| ["%", QueryExpression, QueryExpression]
|
||||
| ["/", QueryExpression, QueryExpression]
|
||||
| ["call", string, QueryExpression[]];
|
||||
|
||||
export type FunctionMap = Record<string, (...args: any[]) => any>;
|
||||
|
||||
/**
|
||||
* An ObjectValue that can be indexed by the `index` plug, needs to have a minimum of
|
||||
* of two fields:
|
||||
* - ref: a unique reference (id) for the object, ideally a page reference
|
||||
* - tags: a list of tags that the object belongs to
|
||||
*/
|
||||
export type ObjectValue<T> = {
|
||||
ref: string;
|
||||
tags: string[];
|
||||
} & T;
|
||||
|
||||
export type ObjectQuery = Omit<Query, "prefix">;
|
||||
|
@ -172,7 +172,7 @@ export async function compileManifests(
|
||||
}
|
||||
}
|
||||
console.log("Change detected, rebuilding...");
|
||||
buildAll();
|
||||
await buildAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ export class EndpointHook implements Hook<EndpointHookT> {
|
||||
}
|
||||
}
|
||||
// console.log("Shouldn't get here");
|
||||
next();
|
||||
await next();
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -53,7 +53,11 @@ export class EventHook implements Hook<EventHookT> {
|
||||
manifest!.functions,
|
||||
)
|
||||
) {
|
||||
if (functionDef.events && functionDef.events.includes(eventName)) {
|
||||
if (functionDef.events) {
|
||||
for (const event of functionDef.events) {
|
||||
if (
|
||||
event === eventName || eventNameToRegex(event).test(eventName)
|
||||
) {
|
||||
// Only dispatch functions that can run in this environment
|
||||
if (await plug.canInvoke(name)) {
|
||||
const result = await plug.invoke(name, args);
|
||||
@ -64,6 +68,8 @@ export class EventHook implements Hook<EventHookT> {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const localListeners = this.localListeners.get(eventName);
|
||||
if (localListeners) {
|
||||
for (const localListener of localListeners) {
|
||||
@ -100,3 +106,9 @@ export class EventHook implements Hook<EventHookT> {
|
||||
return errors;
|
||||
}
|
||||
}
|
||||
|
||||
function eventNameToRegex(eventName: string): RegExp {
|
||||
return new RegExp(
|
||||
`^${eventName.replace(/\*/g, ".*").replace(/\//g, "\\/")}$`,
|
||||
);
|
||||
}
|
||||
|
@ -6,15 +6,17 @@ import { KvPrimitives } from "./kv_primitives.ts";
|
||||
import { assertEquals } from "https://deno.land/std@0.165.0/testing/asserts.ts";
|
||||
|
||||
async function test(db: KvPrimitives) {
|
||||
const dataStore = new DataStore(db);
|
||||
await dataStore.set(["user", "peter"], { name: "Peter" });
|
||||
await dataStore.set(["user", "hank"], { name: "Hank" });
|
||||
let results = await dataStore.query({
|
||||
const datastore = new DataStore(db, ["ds"], {
|
||||
count: (arr: any[]) => arr.length,
|
||||
});
|
||||
await datastore.set(["user", "peter"], { name: "Peter" });
|
||||
await datastore.set(["user", "hank"], { name: "Hank" });
|
||||
let results = await datastore.query({
|
||||
prefix: ["user"],
|
||||
filter: ["=", "name", "Peter"],
|
||||
filter: ["=", ["attr", "name"], ["string", "Peter"]],
|
||||
});
|
||||
assertEquals(results, [{ key: ["user", "peter"], value: { name: "Peter" } }]);
|
||||
await dataStore.batchSet([
|
||||
await datastore.batchSet<any>([
|
||||
{ key: ["kv", "name"], value: "Zef" },
|
||||
{ key: ["kv", "data"], value: new Uint8Array([1, 2, 3]) },
|
||||
{
|
||||
@ -29,32 +31,42 @@ async function test(db: KvPrimitives) {
|
||||
},
|
||||
},
|
||||
]);
|
||||
assertEquals(await dataStore.get(["kv", "name"]), "Zef");
|
||||
assertEquals(await dataStore.get(["kv", "data"]), new Uint8Array([1, 2, 3]));
|
||||
results = await dataStore.query({
|
||||
assertEquals(await datastore.get(["kv", "name"]), "Zef");
|
||||
assertEquals(await datastore.get(["kv", "data"]), new Uint8Array([1, 2, 3]));
|
||||
results = await datastore.query({
|
||||
prefix: ["kv"],
|
||||
filter: ["=", "", "Zef"],
|
||||
filter: ["=~", ["attr", ""], ["regexp", "Z.f", "i"]],
|
||||
});
|
||||
assertEquals(results, [{ key: ["kv", "name"], value: "Zef" }]);
|
||||
results = await dataStore.query({
|
||||
results = await datastore.query({
|
||||
prefix: ["kv"],
|
||||
filter: ["and", ["=", "parents", "John"], [
|
||||
filter: ["and", ["=", ["attr", "parents"], ["string", "John"]], [
|
||||
"=",
|
||||
"address.city",
|
||||
"San Francisco",
|
||||
["attr", ["attr", "address"], "city"],
|
||||
["string", "San Francisco"],
|
||||
]],
|
||||
select: ["name"],
|
||||
select: [
|
||||
{ name: "parents" },
|
||||
{
|
||||
name: "name",
|
||||
expr: ["+", ["attr", "name"], ["string", "!"]],
|
||||
},
|
||||
{
|
||||
name: "parentCount",
|
||||
expr: ["call", "count", [["attr", "parents"]]],
|
||||
},
|
||||
],
|
||||
});
|
||||
assertEquals(results.length, 1);
|
||||
assertEquals(results[0], {
|
||||
key: ["kv", "complicated"],
|
||||
value: { name: "Frank" },
|
||||
value: { name: "Frank!", parentCount: 2, parents: ["John", "Jane"] },
|
||||
});
|
||||
}
|
||||
|
||||
Deno.test("Test Deno KV DataStore", async () => {
|
||||
const tmpFile = await Deno.makeTempFile();
|
||||
const db = new DenoKvPrimitives(tmpFile);
|
||||
await db.init();
|
||||
const db = new DenoKvPrimitives(await Deno.openKv(tmpFile));
|
||||
await test(db);
|
||||
db.close();
|
||||
await Deno.remove(tmpFile);
|
||||
|
@ -1,183 +1,116 @@
|
||||
import { KvKey, KvPrimitives } from "./kv_primitives.ts";
|
||||
|
||||
export type { KvKey };
|
||||
|
||||
export type KvValue = any;
|
||||
|
||||
export type KV = {
|
||||
key: KvKey;
|
||||
value: KvValue;
|
||||
};
|
||||
|
||||
export type KvOrderBy = {
|
||||
attribute: string;
|
||||
desc: boolean;
|
||||
};
|
||||
|
||||
export type KvQuery = {
|
||||
prefix: KvKey;
|
||||
filter?: KvQueryFilter;
|
||||
orderBy?: KvOrderBy[];
|
||||
limit?: number;
|
||||
select?: string[];
|
||||
};
|
||||
|
||||
export type KvQueryFilter =
|
||||
| ["=", string, any]
|
||||
| ["!=", string, any]
|
||||
| ["=~", string, RegExp]
|
||||
| ["!=~", string, RegExp]
|
||||
| ["prefix", string, string]
|
||||
| ["<", string, any]
|
||||
| ["<=", string, any]
|
||||
| [">", string, any]
|
||||
| [">=", string, any]
|
||||
| ["in", string, any[]]
|
||||
| ["and", KvQueryFilter, KvQueryFilter]
|
||||
| ["or", KvQueryFilter, KvQueryFilter];
|
||||
|
||||
function filterKvQuery(kvQuery: KvQueryFilter, obj: KvValue): boolean {
|
||||
const [op, op1, op2] = kvQuery;
|
||||
|
||||
if (op === "and") {
|
||||
return filterKvQuery(op1, obj) &&
|
||||
filterKvQuery(op2, obj);
|
||||
} else if (op === "or") {
|
||||
return filterKvQuery(op1, obj) || filterKvQuery(op2, obj);
|
||||
}
|
||||
|
||||
// Look up the value of the attribute, supporting nested attributes via `attr.attr2.attr3`, and empty attribute value signifies the root object
|
||||
let attributeVal = obj;
|
||||
for (const part of op1.split(".")) {
|
||||
if (!part) {
|
||||
continue;
|
||||
}
|
||||
if (attributeVal === undefined) {
|
||||
return false;
|
||||
}
|
||||
attributeVal = attributeVal[part];
|
||||
}
|
||||
|
||||
// And apply the operator
|
||||
switch (op) {
|
||||
case "=": {
|
||||
if (Array.isArray(attributeVal) && !Array.isArray(op2)) {
|
||||
// Record property is an array, and value is a scalar: find the value in the array
|
||||
if (attributeVal.includes(op2)) {
|
||||
return true;
|
||||
}
|
||||
} else if (Array.isArray(attributeVal) && Array.isArray(obj)) {
|
||||
// Record property is an array, and value is an array: find the value in the array
|
||||
if (attributeVal.some((v) => obj.includes(v))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return attributeVal === op2;
|
||||
}
|
||||
case "!=":
|
||||
return attributeVal !== op2;
|
||||
case "=~":
|
||||
return op2.test(attributeVal);
|
||||
case "!=~":
|
||||
return !op2.test(attributeVal);
|
||||
case "prefix":
|
||||
return attributeVal.startsWith(op2);
|
||||
case "<":
|
||||
return attributeVal < op2;
|
||||
case "<=":
|
||||
return attributeVal <= op2;
|
||||
case ">":
|
||||
return attributeVal > op2;
|
||||
case ">=":
|
||||
return attributeVal >= op2;
|
||||
case "in":
|
||||
return op2.includes(attributeVal);
|
||||
default:
|
||||
throw new Error(`Unupported operator: ${op}`);
|
||||
}
|
||||
}
|
||||
import { applyQueryNoFilterKV, evalQueryExpression } from "$sb/lib/query.ts";
|
||||
import { FunctionMap, KV, KvKey, KvQuery } from "$sb/types.ts";
|
||||
import { builtinFunctions } from "$sb/lib/builtin_query_functions.ts";
|
||||
import { KvPrimitives } from "./kv_primitives.ts";
|
||||
|
||||
/**
|
||||
* This is the data store class you'll actually want to use, wrapping the primitives
|
||||
* in a more user-friendly way
|
||||
*/
|
||||
export class DataStore {
|
||||
constructor(private kv: KvPrimitives) {
|
||||
constructor(
|
||||
private kv: KvPrimitives,
|
||||
private prefix: KvKey = [],
|
||||
private functionMap: FunctionMap = builtinFunctions,
|
||||
) {
|
||||
}
|
||||
|
||||
async get(key: KvKey): Promise<KvValue> {
|
||||
return (await this.kv.batchGet([key]))[0];
|
||||
prefixed(prefix: KvKey): DataStore {
|
||||
return new DataStore(
|
||||
this.kv,
|
||||
[...this.prefix, ...prefix],
|
||||
this.functionMap,
|
||||
);
|
||||
}
|
||||
|
||||
batchGet(keys: KvKey[]): Promise<KvValue[]> {
|
||||
return this.kv.batchGet(keys);
|
||||
async get<T = any>(key: KvKey): Promise<T | null> {
|
||||
return (await this.batchGet([key]))[0];
|
||||
}
|
||||
|
||||
set(key: KvKey, value: KvValue): Promise<void> {
|
||||
return this.kv.batchSet([{ key, value }]);
|
||||
batchGet<T = any>(keys: KvKey[]): Promise<(T | null)[]> {
|
||||
return this.kv.batchGet(keys.map((key) => this.applyPrefix(key)));
|
||||
}
|
||||
|
||||
batchSet(entries: KV[]): Promise<void> {
|
||||
return this.kv.batchSet(entries);
|
||||
set(key: KvKey, value: any): Promise<void> {
|
||||
return this.batchSet([{ key, value }]);
|
||||
}
|
||||
|
||||
batchSet<T = any>(entries: KV<T>[]): Promise<void> {
|
||||
const allKeyStrings = new Set<string>();
|
||||
const uniqueEntries: KV[] = [];
|
||||
for (const { key, value } of entries) {
|
||||
const keyString = JSON.stringify(key);
|
||||
if (allKeyStrings.has(keyString)) {
|
||||
console.warn(`Duplicate key ${keyString} in batchSet, skipping`);
|
||||
} else {
|
||||
allKeyStrings.add(keyString);
|
||||
uniqueEntries.push({ key: this.applyPrefix(key), value });
|
||||
}
|
||||
}
|
||||
return this.kv.batchSet(uniqueEntries);
|
||||
}
|
||||
|
||||
delete(key: KvKey): Promise<void> {
|
||||
return this.kv.batchDelete([key]);
|
||||
return this.batchDelete([key]);
|
||||
}
|
||||
|
||||
batchDelete(keys: KvKey[]): Promise<void> {
|
||||
return this.kv.batchDelete(keys);
|
||||
return this.kv.batchDelete(keys.map((key) => this.applyPrefix(key)));
|
||||
}
|
||||
|
||||
async query(query: KvQuery): Promise<KV[]> {
|
||||
const results: KV[] = [];
|
||||
async query<T = any>(query: KvQuery): Promise<KV<T>[]> {
|
||||
const results: KV<T>[] = [];
|
||||
let itemCount = 0;
|
||||
// Accumuliate results
|
||||
for await (const entry of this.kv.query({ prefix: query.prefix })) {
|
||||
// Accumulate results
|
||||
let limit = Infinity;
|
||||
const prefixedQuery: KvQuery = {
|
||||
...query,
|
||||
prefix: query.prefix ? this.applyPrefix(query.prefix) : undefined,
|
||||
};
|
||||
if (query.limit) {
|
||||
limit = evalQueryExpression(query.limit, {}, this.functionMap);
|
||||
}
|
||||
for await (
|
||||
const entry of this.kv.query(prefixedQuery)
|
||||
) {
|
||||
// Filter
|
||||
if (query.filter && !filterKvQuery(query.filter, entry.value)) {
|
||||
if (
|
||||
query.filter &&
|
||||
!evalQueryExpression(query.filter, entry.value, this.functionMap)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
results.push(entry);
|
||||
itemCount++;
|
||||
// Stop when the limit has been reached
|
||||
if (itemCount === query.limit) {
|
||||
if (itemCount === limit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Order by
|
||||
if (query.orderBy) {
|
||||
results.sort((a, b) => {
|
||||
const aVal = a.value;
|
||||
const bVal = b.value;
|
||||
for (const { attribute, desc } of query.orderBy!) {
|
||||
if (
|
||||
aVal[attribute] < bVal[attribute] || aVal[attribute] === undefined
|
||||
) {
|
||||
return desc ? 1 : -1;
|
||||
}
|
||||
if (
|
||||
aVal[attribute] > bVal[attribute] || bVal[attribute] === undefined
|
||||
) {
|
||||
return desc ? -1 : 1;
|
||||
}
|
||||
}
|
||||
// Consider them equal. This helps with comparing arrays (like tags)
|
||||
return 0;
|
||||
});
|
||||
// Apply order by, limit, and select
|
||||
return applyQueryNoFilterKV(prefixedQuery, results, this.functionMap).map((
|
||||
{ key, value },
|
||||
) => ({ key: this.stripPrefix(key), value }));
|
||||
}
|
||||
|
||||
if (query.select) {
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const rec = results[i].value;
|
||||
const newRec: any = {};
|
||||
for (const k of query.select) {
|
||||
newRec[k] = rec[k];
|
||||
async queryDelete(query: KvQuery): Promise<void> {
|
||||
const keys: KvKey[] = [];
|
||||
for (
|
||||
const { key } of await this.query({
|
||||
...query,
|
||||
prefix: query.prefix ? this.applyPrefix(query.prefix) : undefined,
|
||||
})
|
||||
) {
|
||||
keys.push(key);
|
||||
}
|
||||
results[i].value = newRec;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
return this.batchDelete(keys);
|
||||
}
|
||||
|
||||
private applyPrefix(key: KvKey): KvKey {
|
||||
return [...this.prefix, ...(key ? key : [])];
|
||||
}
|
||||
|
||||
private stripPrefix(key: KvKey): KvKey {
|
||||
return key.slice(this.prefix.length);
|
||||
}
|
||||
}
|
||||
|
@ -3,8 +3,7 @@ import { allTests } from "./kv_primitives.test.ts";
|
||||
|
||||
Deno.test("Test Deno KV Primitives", async () => {
|
||||
const tmpFile = await Deno.makeTempFile();
|
||||
const db = new DenoKvPrimitives(tmpFile);
|
||||
await db.init();
|
||||
const db = new DenoKvPrimitives(await Deno.openKv(tmpFile));
|
||||
await allTests(db);
|
||||
db.close();
|
||||
await Deno.remove(tmpFile);
|
||||
|
@ -1,15 +1,12 @@
|
||||
/// <reference lib="deno.unstable" />
|
||||
|
||||
import { KV, KvKey, KvPrimitives, KvQueryOptions } from "./kv_primitives.ts";
|
||||
const kvBatchSize = 10;
|
||||
import { KV, KvKey } from "$sb/types.ts";
|
||||
import { KvPrimitives, KvQueryOptions } from "./kv_primitives.ts";
|
||||
|
||||
const kvBatchSize = 100;
|
||||
|
||||
export class DenoKvPrimitives implements KvPrimitives {
|
||||
db!: Deno.Kv;
|
||||
constructor(private path?: string) {
|
||||
}
|
||||
|
||||
async init() {
|
||||
this.db = await Deno.openKv(this.path);
|
||||
constructor(private db: Deno.Kv) {
|
||||
}
|
||||
|
||||
async batchGet(keys: KvKey[]): Promise<any[]> {
|
||||
|
@ -1,32 +1,33 @@
|
||||
import { KV, KvKey, KvPrimitives, KvQueryOptions } from "./kv_primitives.ts";
|
||||
import { KV, KvKey } from "$sb/types.ts";
|
||||
import { KvPrimitives, KvQueryOptions } from "./kv_primitives.ts";
|
||||
import { IDBPDatabase, openDB } from "https://esm.sh/idb@7.1.1/with-async-ittr";
|
||||
|
||||
const sep = "\0";
|
||||
const objectStoreName = "data";
|
||||
|
||||
export class IndexedDBKvPrimitives implements KvPrimitives {
|
||||
db!: IDBPDatabase<any>;
|
||||
|
||||
constructor(
|
||||
private dbName: string,
|
||||
private objectStoreName: string = "data",
|
||||
) {
|
||||
}
|
||||
|
||||
async init() {
|
||||
this.db = await openDB(this.dbName, 1, {
|
||||
upgrade: (db) => {
|
||||
db.createObjectStore(this.objectStoreName);
|
||||
db.createObjectStore(objectStoreName);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
batchGet(keys: KvKey[]): Promise<any[]> {
|
||||
const tx = this.db.transaction(this.objectStoreName, "readonly");
|
||||
const tx = this.db.transaction(objectStoreName, "readonly");
|
||||
return Promise.all(keys.map((key) => tx.store.get(this.buildKey(key))));
|
||||
}
|
||||
|
||||
async batchSet(entries: KV[]): Promise<void> {
|
||||
const tx = this.db.transaction(this.objectStoreName, "readwrite");
|
||||
const tx = this.db.transaction(objectStoreName, "readwrite");
|
||||
await Promise.all([
|
||||
...entries.map(({ key, value }) =>
|
||||
tx.store.put(value, this.buildKey(key))
|
||||
@ -36,7 +37,7 @@ export class IndexedDBKvPrimitives implements KvPrimitives {
|
||||
}
|
||||
|
||||
async batchDelete(keys: KvKey[]): Promise<void> {
|
||||
const tx = this.db.transaction(this.objectStoreName, "readwrite");
|
||||
const tx = this.db.transaction(objectStoreName, "readwrite");
|
||||
await Promise.all([
|
||||
...keys.map((key) => tx.store.delete(this.buildKey(key))),
|
||||
tx.done,
|
||||
@ -44,12 +45,12 @@ export class IndexedDBKvPrimitives implements KvPrimitives {
|
||||
}
|
||||
|
||||
async *query({ prefix }: KvQueryOptions): AsyncIterableIterator<KV> {
|
||||
const tx = this.db.transaction(this.objectStoreName, "readonly");
|
||||
const tx = this.db.transaction(objectStoreName, "readonly");
|
||||
prefix = prefix || [];
|
||||
for await (
|
||||
const entry of tx.store.iterate(IDBKeyRange.bound(
|
||||
this.buildKey([...prefix, ""]),
|
||||
this.buildKey([...prefix, "\ufffe"]),
|
||||
this.buildKey([...prefix, "\uffff"]),
|
||||
))
|
||||
) {
|
||||
yield { key: this.extractKey(entry.key), value: entry.value };
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { KV, KvPrimitives } from "./kv_primitives.ts";
|
||||
import { KvPrimitives } from "./kv_primitives.ts";
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { KV } from "$sb/types.ts";
|
||||
|
||||
export async function allTests(db: KvPrimitives) {
|
||||
await db.batchSet([
|
||||
|
@ -1,17 +1,11 @@
|
||||
export type KvKey = string[];
|
||||
export type KvValue = any;
|
||||
|
||||
export type KV = {
|
||||
key: KvKey;
|
||||
value: KvValue;
|
||||
};
|
||||
import { KV, KvKey } from "$sb/types.ts";
|
||||
|
||||
export type KvQueryOptions = {
|
||||
prefix?: KvKey;
|
||||
};
|
||||
|
||||
export interface KvPrimitives {
|
||||
batchGet(keys: KvKey[]): Promise<(KvValue | undefined)[]>;
|
||||
batchGet(keys: KvKey[]): Promise<(any | undefined)[]>;
|
||||
batchSet(entries: KV[]): Promise<void>;
|
||||
batchDelete(keys: KvKey[]): Promise<void>;
|
||||
query(options: KvQueryOptions): AsyncIterableIterator<KV>;
|
||||
|
@ -1,58 +0,0 @@
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { DenoKVStore } from "./kv_store.deno_kv.ts";
|
||||
|
||||
Deno.test("Test KV index", async () => {
|
||||
const tmpFile = await Deno.makeTempFile();
|
||||
const denoKv = await Deno.openKv(tmpFile);
|
||||
const kv = new DenoKVStore(denoKv);
|
||||
|
||||
await kv.set("name", "Peter");
|
||||
assertEquals(await kv.get("name"), "Peter");
|
||||
await kv.del("name");
|
||||
assertEquals(await kv.has("name"), false);
|
||||
|
||||
await kv.batchSet([
|
||||
{ key: "page:hello", value: "Hello" },
|
||||
{ key: "page:hello2", value: "Hello 2" },
|
||||
{ key: "page:hello3", value: "Hello 3" },
|
||||
{ key: "something", value: "Something" },
|
||||
{ key: "something1", value: "Something" },
|
||||
{ key: "something2", value: "Something" },
|
||||
{ key: "something3", value: "Something" },
|
||||
{ key: "something4", value: "Something" },
|
||||
{ key: "something5", value: "Something" },
|
||||
{ key: "something6", value: "Something" },
|
||||
{ key: "something7", value: "Something" },
|
||||
{ key: "something8", value: "Something" },
|
||||
{ key: "something9", value: "Something" },
|
||||
{ key: "something10", value: "Something" },
|
||||
{ key: "something11", value: "Something" },
|
||||
{ key: "something12", value: "Something" },
|
||||
{ key: "something13", value: "Something" },
|
||||
{ key: "something14", value: "Something" },
|
||||
{ key: "something15", value: "Something" },
|
||||
{ key: "something16", value: "Something" },
|
||||
{ key: "something17", value: "Something" },
|
||||
{ key: "something18", value: "Something" },
|
||||
{ key: "something19", value: "Something" },
|
||||
]);
|
||||
|
||||
const results = await kv.queryPrefix("page:");
|
||||
assertEquals(results.length, 3);
|
||||
|
||||
assertEquals(await kv.batchGet(["page:hello", "page:hello3"]), [
|
||||
"Hello",
|
||||
"Hello 3",
|
||||
]);
|
||||
|
||||
await kv.deletePrefix("page:");
|
||||
|
||||
assertEquals(await kv.queryPrefix("page:"), []);
|
||||
assertEquals((await kv.queryPrefix("")).length, 20);
|
||||
|
||||
await kv.deletePrefix("");
|
||||
assertEquals(await kv.queryPrefix(""), []);
|
||||
|
||||
denoKv.close();
|
||||
await Deno.remove(tmpFile);
|
||||
});
|
@ -1,112 +0,0 @@
|
||||
/// <reference lib="deno.unstable" />
|
||||
|
||||
import { KV, KVStore } from "./kv_store.ts";
|
||||
|
||||
const kvBatchSize = 10;
|
||||
|
||||
export class DenoKVStore implements KVStore {
|
||||
constructor(private kv: Deno.Kv) {
|
||||
}
|
||||
|
||||
del(key: string): Promise<void> {
|
||||
return this.batchDelete([key]);
|
||||
}
|
||||
async deletePrefix(prefix: string): Promise<void> {
|
||||
const allKeys: string[] = [];
|
||||
for await (
|
||||
const result of this.kv.list(
|
||||
prefix
|
||||
? {
|
||||
start: [prefix],
|
||||
end: [endRange(prefix)],
|
||||
}
|
||||
: { prefix: [] },
|
||||
)
|
||||
) {
|
||||
allKeys.push(result.key[0] as string);
|
||||
}
|
||||
return this.batchDelete(allKeys);
|
||||
}
|
||||
deleteAll(): Promise<void> {
|
||||
return this.deletePrefix("");
|
||||
}
|
||||
set(key: string, value: any): Promise<void> {
|
||||
return this.batchSet([{ key, value }]);
|
||||
}
|
||||
async batchSet(kvs: KV[]): Promise<void> {
|
||||
// Split into batches of kvBatchSize
|
||||
const batches: KV[][] = [];
|
||||
for (let i = 0; i < kvs.length; i += kvBatchSize) {
|
||||
batches.push(kvs.slice(i, i + kvBatchSize));
|
||||
}
|
||||
for (const batch of batches) {
|
||||
let batchOp = this.kv.atomic();
|
||||
for (const { key, value } of batch) {
|
||||
batchOp = batchOp.set([key], value);
|
||||
}
|
||||
const res = await batchOp.commit();
|
||||
if (!res.ok) {
|
||||
throw res;
|
||||
}
|
||||
}
|
||||
}
|
||||
async batchDelete(keys: string[]): Promise<void> {
|
||||
const batches: string[][] = [];
|
||||
for (let i = 0; i < keys.length; i += kvBatchSize) {
|
||||
batches.push(keys.slice(i, i + kvBatchSize));
|
||||
}
|
||||
for (const batch of batches) {
|
||||
let batchOp = this.kv.atomic();
|
||||
for (const key of batch) {
|
||||
batchOp = batchOp.delete([key]);
|
||||
}
|
||||
const res = await batchOp.commit();
|
||||
if (!res.ok) {
|
||||
throw res;
|
||||
}
|
||||
}
|
||||
}
|
||||
async batchGet(keys: string[]): Promise<any[]> {
|
||||
const results: any[] = [];
|
||||
const batches: Deno.KvKey[][] = [];
|
||||
for (let i = 0; i < keys.length; i += kvBatchSize) {
|
||||
batches.push(keys.slice(i, i + kvBatchSize).map((k) => [k]));
|
||||
}
|
||||
for (const batch of batches) {
|
||||
const res = await this.kv.getMany(batch);
|
||||
results.push(...res.map((r) => r.value));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
async get(key: string): Promise<any> {
|
||||
return (await this.kv.get([key])).value;
|
||||
}
|
||||
async has(key: string): Promise<boolean> {
|
||||
return (await this.kv.get([key])).value !== null;
|
||||
}
|
||||
async queryPrefix(keyPrefix: string): Promise<{ key: string; value: any }[]> {
|
||||
const results: { key: string; value: any }[] = [];
|
||||
for await (
|
||||
const result of this.kv.list(
|
||||
keyPrefix
|
||||
? {
|
||||
start: [keyPrefix],
|
||||
end: [endRange(keyPrefix)],
|
||||
}
|
||||
: { prefix: [] },
|
||||
)
|
||||
) {
|
||||
results.push({
|
||||
key: result.key[0] as string,
|
||||
value: result.value as any,
|
||||
});
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
function endRange(prefix: string) {
|
||||
const lastChar = prefix[prefix.length - 1];
|
||||
const nextLastChar = String.fromCharCode(lastChar.charCodeAt(0) + 1);
|
||||
return prefix.slice(0, -1) + nextLastChar;
|
||||
}
|
@ -1,82 +0,0 @@
|
||||
import Dexie, { Table } from "dexie";
|
||||
import type { KV, KVStore } from "./kv_store.ts";
|
||||
|
||||
export class DexieKVStore implements KVStore {
|
||||
db: Dexie;
|
||||
items: Table<KV, string>;
|
||||
constructor(
|
||||
dbName: string,
|
||||
tableName: string,
|
||||
indexedDB?: any,
|
||||
IDBKeyRange?: any,
|
||||
) {
|
||||
this.db = new Dexie(dbName, {
|
||||
indexedDB,
|
||||
IDBKeyRange,
|
||||
});
|
||||
this.db.version(1).stores({
|
||||
[tableName]: "key",
|
||||
});
|
||||
this.items = this.db.table<KV, string>(tableName);
|
||||
}
|
||||
|
||||
async del(key: string) {
|
||||
await this.items.delete(key);
|
||||
}
|
||||
|
||||
async deletePrefix(prefix: string) {
|
||||
await this.items.where("key").startsWith(prefix).delete();
|
||||
}
|
||||
|
||||
async deleteAll() {
|
||||
await this.items.clear();
|
||||
}
|
||||
|
||||
async set(key: string, value: any) {
|
||||
await this.items.put({
|
||||
key,
|
||||
value,
|
||||
});
|
||||
}
|
||||
|
||||
async batchSet(kvs: KV[]) {
|
||||
await this.items.bulkPut(
|
||||
kvs.map(({ key, value }) => ({
|
||||
key,
|
||||
value,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
async batchDelete(keys: string[]) {
|
||||
await this.items.bulkDelete(keys);
|
||||
}
|
||||
|
||||
async batchGet(
|
||||
keys: string[],
|
||||
): Promise<(any | undefined)[]> {
|
||||
return (await this.items.bulkGet(keys)).map((result) => result?.value);
|
||||
}
|
||||
|
||||
async get(key: string): Promise<any | null> {
|
||||
const result = await this.items.get({ key });
|
||||
return result ? result.value : null;
|
||||
}
|
||||
|
||||
async has(key: string): Promise<boolean> {
|
||||
return await this.items.get({
|
||||
key,
|
||||
}) !== undefined;
|
||||
}
|
||||
|
||||
async queryPrefix(
|
||||
keyPrefix: string,
|
||||
): Promise<{ key: string; value: any }[]> {
|
||||
const results = await this.items.where("key").startsWith(keyPrefix)
|
||||
.toArray();
|
||||
return results.map((result) => ({
|
||||
key: result.key,
|
||||
value: result.value,
|
||||
}));
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import { KV, KVStore } from "./kv_store.ts";
|
||||
import { KV } from "$sb/types.ts";
|
||||
|
||||
export class JSONKVStore implements KVStore {
|
||||
export class JSONKVStore {
|
||||
private data: { [key: string]: any } = {};
|
||||
|
||||
async load(path: string) {
|
||||
@ -38,21 +38,6 @@ export class JSONKVStore implements KVStore {
|
||||
this.data[key] = value;
|
||||
return Promise.resolve();
|
||||
}
|
||||
batchSet(kvs: KV[]): Promise<void> {
|
||||
for (const kv of kvs) {
|
||||
this.data[kv.key] = kv.value;
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
batchDelete(keys: string[]): Promise<void> {
|
||||
for (const key of keys) {
|
||||
delete this.data[key];
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
batchGet(keys: string[]): Promise<any[]> {
|
||||
return Promise.resolve(keys.map((key) => this.data[key]));
|
||||
}
|
||||
get(key: string): Promise<any> {
|
||||
return Promise.resolve(this.data[key]);
|
||||
}
|
||||
|
@ -1,59 +0,0 @@
|
||||
export type KV = {
|
||||
key: string;
|
||||
value: any;
|
||||
};
|
||||
|
||||
/**
|
||||
* An interface to any simple key-value store.
|
||||
*/
|
||||
export interface KVStore {
|
||||
/**
|
||||
* Deletes the value associated with a given key.
|
||||
*/
|
||||
del(key: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Deletes all keys that start with a specific prefix.
|
||||
*/
|
||||
deletePrefix(prefix: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Deletes all keys in the store.
|
||||
*/
|
||||
deleteAll(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Sets the value for a given key.
|
||||
*/
|
||||
set(key: string, value: any): Promise<void>;
|
||||
|
||||
/**
|
||||
* Sets the values for a list of key-value pairs.
|
||||
*/
|
||||
batchSet(kvs: KV[]): Promise<void>;
|
||||
|
||||
/**
|
||||
* Deletes a list of keys.
|
||||
*/
|
||||
batchDelete(keys: string[]): Promise<void>;
|
||||
|
||||
/**
|
||||
* Gets the values for a list of keys.
|
||||
*/
|
||||
batchGet(keys: string[]): Promise<(any | undefined)[]>;
|
||||
|
||||
/**
|
||||
* Gets the value for a given key.
|
||||
*/
|
||||
get(key: string): Promise<any | null>;
|
||||
|
||||
/**
|
||||
* Checks whether a given key exists in the store.
|
||||
*/
|
||||
has(key: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Gets all key-value pairs where the key starts with a specific prefix.
|
||||
*/
|
||||
queryPrefix(keyPrefix: string): Promise<{ key: string; value: any }[]>;
|
||||
}
|
@ -1,10 +1,14 @@
|
||||
import { IDBKeyRange, indexedDB } from "https://esm.sh/fake-indexeddb@4.0.2";
|
||||
import { DexieMQ } from "./mq.dexie.ts";
|
||||
import { DataStoreMQ } from "./mq.datastore.ts";
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { sleep } from "$sb/lib/async.ts";
|
||||
import { DenoKvPrimitives } from "./deno_kv_primitives.ts";
|
||||
import { DataStore } from "./datastore.ts";
|
||||
|
||||
Deno.test("Dexie MQ", async () => {
|
||||
const mq = new DexieMQ("test", indexedDB, IDBKeyRange);
|
||||
Deno.test("DataStore MQ", async () => {
|
||||
const tmpFile = await Deno.makeTempFile();
|
||||
const db = new DenoKvPrimitives(await Deno.openKv(tmpFile));
|
||||
|
||||
const mq = new DataStoreMQ(new DataStore(db, ["mq"]));
|
||||
await mq.send("test", "Hello World");
|
||||
let messages = await mq.poll("test", 10);
|
||||
assertEquals(messages.length, 1);
|
||||
@ -28,12 +32,15 @@ Deno.test("Dexie MQ", async () => {
|
||||
let receivedMessage = false;
|
||||
const unsubscribe = mq.subscribe("test123", {}, async (messages) => {
|
||||
assertEquals(messages.length, 1);
|
||||
await mq.ack("test123", messages[0].id);
|
||||
receivedMessage = true;
|
||||
console.log("RECEIVED TEH EMSSSAGE");
|
||||
await mq.ack("test123", messages[0].id);
|
||||
});
|
||||
mq.send("test123", "Hello World");
|
||||
await mq.send("test123", "Hello World");
|
||||
console.log("After send");
|
||||
// Give time to process the message
|
||||
await sleep(1);
|
||||
await sleep(10);
|
||||
console.log("After sleep");
|
||||
assertEquals(receivedMessage, true);
|
||||
unsubscribe();
|
||||
|
||||
@ -50,4 +57,7 @@ Deno.test("Dexie MQ", async () => {
|
||||
assertEquals(await mq.fetchProcessingMessages(), []);
|
||||
// Give time to close the db
|
||||
await sleep(20);
|
||||
|
||||
db.close();
|
||||
await Deno.remove(tmpFile);
|
||||
});
|
276
plugos/lib/mq.datastore.ts
Normal file
276
plugos/lib/mq.datastore.ts
Normal file
@ -0,0 +1,276 @@
|
||||
import { KV, MQMessage, MQStats, MQSubscribeOptions } from "$sb/types.ts";
|
||||
import { MessageQueue } from "./mq.ts";
|
||||
import { DataStore } from "./datastore.ts";
|
||||
|
||||
export type ProcessingMessage = MQMessage & {
|
||||
ts: number;
|
||||
};
|
||||
|
||||
const queuedPrefix = ["mq", "queued"];
|
||||
const processingPrefix = ["mq", "processing"];
|
||||
const dlqPrefix = ["mq", "dlq"];
|
||||
|
||||
export class DataStoreMQ implements MessageQueue {
|
||||
// queue -> set of run() functions
|
||||
localSubscriptions = new Map<string, Set<() => void>>();
|
||||
|
||||
constructor(
|
||||
private ds: DataStore,
|
||||
) {
|
||||
}
|
||||
|
||||
// Internal sequencer for messages, only really necessary when batch sending tons of messages within a millisecond
|
||||
seq = 0;
|
||||
|
||||
async batchSend(queue: string, bodies: any[]): Promise<void> {
|
||||
const messages: KV<MQMessage>[] = bodies.map((body) => {
|
||||
const id = `${Date.now()}-${String(++this.seq).padStart(6, "0")}`;
|
||||
const key = [...queuedPrefix, queue, id];
|
||||
return {
|
||||
key,
|
||||
value: { id, queue, body },
|
||||
};
|
||||
});
|
||||
|
||||
await this.ds.batchSet(messages);
|
||||
|
||||
// See if we can immediately process the message with a local subscription
|
||||
const localSubscriptions = this.localSubscriptions.get(queue);
|
||||
if (localSubscriptions) {
|
||||
for (const run of localSubscriptions) {
|
||||
run();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
send(queue: string, body: any): Promise<void> {
|
||||
return this.batchSend(queue, [body]);
|
||||
}
|
||||
|
||||
async poll(queue: string, maxItems: number): Promise<MQMessage[]> {
|
||||
// Note: this is not happening in a transactional way, so we may get duplicate message delivery
|
||||
// Retrieve a batch of messages
|
||||
const messages = await this.ds.query<MQMessage>({
|
||||
prefix: [...queuedPrefix, queue],
|
||||
limit: ["number", maxItems],
|
||||
});
|
||||
// Put them in the processing queue
|
||||
await this.ds.batchSet(
|
||||
messages.map((m) => ({
|
||||
key: [...processingPrefix, queue, m.value.id],
|
||||
value: {
|
||||
...m.value,
|
||||
ts: Date.now(),
|
||||
},
|
||||
})),
|
||||
);
|
||||
// Delete them from the queued queue
|
||||
await this.ds.batchDelete(messages.map((m) => m.key));
|
||||
|
||||
// Return them
|
||||
return messages.map((m) => m.value);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param queue
|
||||
* @param batchSize
|
||||
* @param callback
|
||||
* @returns a function to be called to unsubscribe
|
||||
*/
|
||||
subscribe(
|
||||
queue: string,
|
||||
options: MQSubscribeOptions,
|
||||
callback: (messages: MQMessage[]) => Promise<void> | void,
|
||||
): () => void {
|
||||
let running = true;
|
||||
let timeout: number | undefined;
|
||||
const batchSize = options.batchSize || 1;
|
||||
const run = async () => {
|
||||
try {
|
||||
if (!running) {
|
||||
return;
|
||||
}
|
||||
const messages = await this.poll(queue, batchSize);
|
||||
if (messages.length > 0) {
|
||||
await callback(messages);
|
||||
}
|
||||
// If we got exactly the batch size, there might be more messages
|
||||
if (messages.length === batchSize) {
|
||||
await run();
|
||||
}
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
timeout = setTimeout(run, options.pollInterval || 5000);
|
||||
} catch (e: any) {
|
||||
console.error("Error in MQ subscription handler", e);
|
||||
}
|
||||
};
|
||||
|
||||
// Register as a local subscription handler
|
||||
const localSubscriptions = this.localSubscriptions.get(queue);
|
||||
if (!localSubscriptions) {
|
||||
this.localSubscriptions.set(queue, new Set([run]));
|
||||
} else {
|
||||
localSubscriptions.add(run);
|
||||
}
|
||||
|
||||
// Run the first time (which will schedule subsequent polling intervals)
|
||||
run();
|
||||
|
||||
// And return an unsubscribe function
|
||||
return () => {
|
||||
running = false;
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
// Remove the subscription from localSubscriptions
|
||||
const queueSubscriptions = this.localSubscriptions.get(queue);
|
||||
if (queueSubscriptions) {
|
||||
queueSubscriptions.delete(run);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ack(queue: string, id: string) {
|
||||
return this.batchAck(queue, [id]);
|
||||
}
|
||||
|
||||
async batchAck(queue: string, ids: string[]) {
|
||||
await this.ds.batchDelete(
|
||||
ids.map((id) => [...processingPrefix, queue, id]),
|
||||
);
|
||||
}
|
||||
|
||||
async requeueTimeouts(
|
||||
timeout: number,
|
||||
maxRetries?: number,
|
||||
disableDLQ?: boolean,
|
||||
) {
|
||||
const now = Date.now();
|
||||
const messages = await this.ds.query<ProcessingMessage>({
|
||||
prefix: processingPrefix,
|
||||
filter: ["<", ["attr", "ts"], ["number", now - timeout]],
|
||||
});
|
||||
await this.ds.batchDelete(messages.map((m) => m.key));
|
||||
const newMessages: KV<ProcessingMessage>[] = [];
|
||||
for (const { value: m } of messages) {
|
||||
const retries = (m.retries || 0) + 1;
|
||||
if (maxRetries && retries > maxRetries) {
|
||||
if (disableDLQ) {
|
||||
console.warn(
|
||||
"[mq]",
|
||||
"Message exceeded max retries, flushing message",
|
||||
m,
|
||||
);
|
||||
} else {
|
||||
console.warn(
|
||||
"[mq]",
|
||||
"Message exceeded max retries, moving to DLQ",
|
||||
m,
|
||||
);
|
||||
newMessages.push({
|
||||
key: [...dlqPrefix, m.queue, m.id],
|
||||
value: {
|
||||
queue: m.queue,
|
||||
id: m.id,
|
||||
body: m.body,
|
||||
ts: Date.now(),
|
||||
retries,
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.info("[mq]", "Message ack timed out, requeueing", m);
|
||||
newMessages.push({
|
||||
key: [...queuedPrefix, m.queue, m.id],
|
||||
value: {
|
||||
...m,
|
||||
retries,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
await this.ds.batchSet(newMessages);
|
||||
}
|
||||
|
||||
async fetchDLQMessages(): Promise<ProcessingMessage[]> {
|
||||
return (await this.ds.query<ProcessingMessage>({ prefix: dlqPrefix })).map((
|
||||
{ value },
|
||||
) => value);
|
||||
}
|
||||
|
||||
async fetchProcessingMessages(): Promise<ProcessingMessage[]> {
|
||||
return (await this.ds.query<ProcessingMessage>({
|
||||
prefix: processingPrefix,
|
||||
})).map((
|
||||
{ value },
|
||||
) => value);
|
||||
}
|
||||
|
||||
flushDLQ(): Promise<void> {
|
||||
return this.ds.queryDelete({ prefix: dlqPrefix });
|
||||
}
|
||||
|
||||
async getQueueStats(queue: string): Promise<MQStats> {
|
||||
const queued =
|
||||
(await (this.ds.query({ prefix: [...queuedPrefix, queue] }))).length;
|
||||
const processing =
|
||||
(await (this.ds.query({ prefix: [...processingPrefix, queue] }))).length;
|
||||
const dlq =
|
||||
(await (this.ds.query({ prefix: [...dlqPrefix, queue] }))).length;
|
||||
return {
|
||||
queued,
|
||||
processing,
|
||||
dlq,
|
||||
};
|
||||
}
|
||||
|
||||
async getAllQueueStats(): Promise<Record<string, MQStats>> {
|
||||
const allStatus: Record<string, MQStats> = {};
|
||||
for (
|
||||
const { value: message } of await this.ds.query<MQMessage>({
|
||||
prefix: queuedPrefix,
|
||||
})
|
||||
) {
|
||||
if (!allStatus[message.queue]) {
|
||||
allStatus[message.queue] = {
|
||||
queued: 0,
|
||||
processing: 0,
|
||||
dlq: 0,
|
||||
};
|
||||
}
|
||||
allStatus[message.queue].queued++;
|
||||
}
|
||||
for (
|
||||
const { value: message } of await this.ds.query<MQMessage>({
|
||||
prefix: processingPrefix,
|
||||
})
|
||||
) {
|
||||
if (!allStatus[message.queue]) {
|
||||
allStatus[message.queue] = {
|
||||
queued: 0,
|
||||
processing: 0,
|
||||
dlq: 0,
|
||||
};
|
||||
}
|
||||
allStatus[message.queue].processing++;
|
||||
}
|
||||
for (
|
||||
const { value: message } of await this.ds.query<MQMessage>({
|
||||
prefix: dlqPrefix,
|
||||
})
|
||||
) {
|
||||
if (!allStatus[message.queue]) {
|
||||
allStatus[message.queue] = {
|
||||
queued: 0,
|
||||
processing: 0,
|
||||
dlq: 0,
|
||||
};
|
||||
}
|
||||
allStatus[message.queue].dlq++;
|
||||
}
|
||||
|
||||
return allStatus;
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
import { sleep } from "$sb/lib/async.ts";
|
||||
import { DenoKvMQ } from "./mq.deno_kv.ts";
|
||||
|
||||
Deno.test("Deno MQ", async () => {
|
||||
const denoKv = await Deno.openKv("test.db");
|
||||
const mq = new DenoKvMQ(denoKv);
|
||||
const unsub = mq.subscribe("test", {}, (messages) => {
|
||||
console.log("Received on test", messages);
|
||||
});
|
||||
const unsub2 = mq.subscribe("test2", {}, (messages) => {
|
||||
console.log("Received on test2", messages);
|
||||
});
|
||||
await mq.send("test", "Hello World");
|
||||
await mq.batchSend("test2", ["Hello World 2", "Hello World 3"]);
|
||||
|
||||
// Let's avoid a panic here
|
||||
await sleep(20);
|
||||
denoKv.close();
|
||||
await Deno.remove("test.db");
|
||||
});
|
@ -1,93 +0,0 @@
|
||||
/// <reference lib="deno.unstable" />
|
||||
|
||||
import {
|
||||
MQMessage,
|
||||
MQStats,
|
||||
MQSubscribeOptions,
|
||||
} from "../../plug-api/types.ts";
|
||||
import { MessageQueue } from "./mq.ts";
|
||||
|
||||
type QueuedMessage = [string, MQMessage];
|
||||
|
||||
export class DenoKvMQ implements MessageQueue {
|
||||
listeners: Map<string, Set<(messages: MQMessage[]) => void | Promise<void>>> =
|
||||
new Map();
|
||||
|
||||
constructor(private kv: Deno.Kv) {
|
||||
kv.listenQueue(async (message: unknown) => {
|
||||
const [queue, body] = message as QueuedMessage;
|
||||
const listeners = this.listeners.get(queue);
|
||||
if (!listeners) {
|
||||
return;
|
||||
}
|
||||
for (const listener of listeners) {
|
||||
await Promise.resolve(listener([{ id: "_dummyid", queue, body }]));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Dummy implementation
|
||||
getQueueStats(_queue: string): Promise<MQStats> {
|
||||
return Promise.resolve({
|
||||
queued: 0,
|
||||
processing: 0,
|
||||
dlq: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// Dummy implementation
|
||||
getAllQueueStats(): Promise<Record<string, MQStats>> {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
|
||||
async batchSend(queue: string, bodies: any[]): Promise<void> {
|
||||
for (const body of bodies) {
|
||||
const result = await this.kv.enqueue([queue, body]);
|
||||
if (!result.ok) {
|
||||
throw result;
|
||||
}
|
||||
}
|
||||
// const results = await Promise.all(
|
||||
// bodies.map((body) => this.kv.enqueue([queue, body])),
|
||||
// );
|
||||
// for (const result of results) {
|
||||
// if (!result.ok) {
|
||||
// throw result;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
async send(queue: string, body: any): Promise<void> {
|
||||
const result = await this.kv.enqueue([queue, body]);
|
||||
if (!result.ok) {
|
||||
throw result;
|
||||
}
|
||||
}
|
||||
subscribe(
|
||||
queue: string,
|
||||
_options: MQSubscribeOptions,
|
||||
callback: (messages: MQMessage[]) => void | Promise<void>,
|
||||
): () => void {
|
||||
const listeners = this.listeners.get(queue);
|
||||
if (!listeners) {
|
||||
this.listeners.set(queue, new Set([callback]));
|
||||
} else {
|
||||
listeners.add(callback);
|
||||
}
|
||||
|
||||
return () => {
|
||||
const listeners = this.listeners.get(queue);
|
||||
if (!listeners) {
|
||||
return;
|
||||
}
|
||||
listeners.delete(callback);
|
||||
};
|
||||
}
|
||||
ack(_queue: string, _id: string): Promise<void> {
|
||||
// Doesn't apply to this implementation
|
||||
return Promise.resolve();
|
||||
}
|
||||
batchAck(_queue: string, _ids: string[]): Promise<void> {
|
||||
// Doesn't apply to this implementation
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
@ -1,279 +0,0 @@
|
||||
import Dexie, { Table } from "dexie";
|
||||
import { MQMessage, MQStats, MQSubscribeOptions } from "$sb/types.ts";
|
||||
import { MessageQueue } from "./mq.ts";
|
||||
|
||||
export type ProcessingMessage = MQMessage & {
|
||||
ts: number;
|
||||
};
|
||||
|
||||
export class DexieMQ implements MessageQueue {
|
||||
db: Dexie;
|
||||
queued: Table<MQMessage, [string, string]>;
|
||||
processing: Table<ProcessingMessage, [string, string]>;
|
||||
dlq: Table<ProcessingMessage, [string, string]>;
|
||||
|
||||
// queue -> set of run() functions
|
||||
localSubscriptions = new Map<string, Set<() => void>>();
|
||||
|
||||
constructor(
|
||||
dbName: string,
|
||||
indexedDB?: any,
|
||||
IDBKeyRange?: any,
|
||||
) {
|
||||
this.db = new Dexie(dbName, {
|
||||
indexedDB,
|
||||
IDBKeyRange,
|
||||
});
|
||||
this.db.version(1).stores({
|
||||
queued: "[queue+id], queue, id",
|
||||
processing: "[queue+id], queue, id, ts",
|
||||
dlq: "[queue+id], queue, id",
|
||||
});
|
||||
this.queued = this.db.table("queued");
|
||||
this.processing = this.db.table("processing");
|
||||
this.dlq = this.db.table("dlq");
|
||||
}
|
||||
|
||||
// Internal sequencer for messages, only really necessary when batch sending tons of messages within a millisecond
|
||||
seq = 0;
|
||||
|
||||
async batchSend(queue: string, bodies: any[]) {
|
||||
const messages = bodies.map((body) => ({
|
||||
id: `${Date.now()}-${String(++this.seq).padStart(6, "0")}`,
|
||||
queue,
|
||||
body,
|
||||
}));
|
||||
|
||||
await this.queued.bulkAdd(messages);
|
||||
|
||||
// See if we can immediately process the message with a local subscription
|
||||
const localSubscriptions = this.localSubscriptions.get(queue);
|
||||
if (localSubscriptions) {
|
||||
for (const run of localSubscriptions) {
|
||||
run();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
send(queue: string, body: any) {
|
||||
return this.batchSend(queue, [body]);
|
||||
}
|
||||
|
||||
poll(queue: string, maxItems: number): Promise<MQMessage[]> {
|
||||
return this.db.transaction(
|
||||
"rw",
|
||||
[this.queued, this.processing],
|
||||
async (tx) => {
|
||||
const messages =
|
||||
(await tx.table<MQMessage, [string, string]>("queued").where({
|
||||
queue,
|
||||
})
|
||||
.sortBy("id")).slice(0, maxItems);
|
||||
const ids: [string, string][] = messages.map((m) => [queue, m.id]);
|
||||
await tx.table("queued").bulkDelete(ids);
|
||||
await tx.table<ProcessingMessage, [string, string]>("processing")
|
||||
.bulkPut(
|
||||
messages.map((m) => ({
|
||||
...m,
|
||||
ts: Date.now(),
|
||||
})),
|
||||
);
|
||||
return messages;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param queue
|
||||
* @param batchSize
|
||||
* @param callback
|
||||
* @returns a function to be called to unsubscribe
|
||||
*/
|
||||
subscribe(
|
||||
queue: string,
|
||||
options: MQSubscribeOptions,
|
||||
callback: (messages: MQMessage[]) => Promise<void> | void,
|
||||
): () => void {
|
||||
let running = true;
|
||||
let timeout: number | undefined;
|
||||
const batchSize = options.batchSize || 1;
|
||||
const run = async () => {
|
||||
try {
|
||||
if (!running) {
|
||||
return;
|
||||
}
|
||||
const messages = await this.poll(queue, batchSize);
|
||||
if (messages.length > 0) {
|
||||
await callback(messages);
|
||||
}
|
||||
// If we got exactly the batch size, there might be more messages
|
||||
if (messages.length === batchSize) {
|
||||
await run();
|
||||
}
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
timeout = setTimeout(run, options.pollInterval || 5000);
|
||||
} catch (e: any) {
|
||||
console.error("Error in MQ subscription handler", e);
|
||||
}
|
||||
};
|
||||
|
||||
// Register as a local subscription handler
|
||||
const localSubscriptions = this.localSubscriptions.get(queue);
|
||||
if (!localSubscriptions) {
|
||||
this.localSubscriptions.set(queue, new Set([run]));
|
||||
} else {
|
||||
localSubscriptions.add(run);
|
||||
}
|
||||
|
||||
// Run the first time (which will schedule subsequent polling intervals)
|
||||
run();
|
||||
|
||||
// And return an unsubscribe function
|
||||
return () => {
|
||||
running = false;
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
// Remove the subscription from localSubscriptions
|
||||
const queueSubscriptions = this.localSubscriptions.get(queue);
|
||||
if (queueSubscriptions) {
|
||||
queueSubscriptions.delete(run);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ack(queue: string, id: string) {
|
||||
return this.batchAck(queue, [id]);
|
||||
}
|
||||
|
||||
async batchAck(queue: string, ids: string[]) {
|
||||
await this.processing.bulkDelete(ids.map((id) => [queue, id]));
|
||||
}
|
||||
|
||||
async requeueTimeouts(
|
||||
timeout: number,
|
||||
maxRetries?: number,
|
||||
disableDLQ?: boolean,
|
||||
) {
|
||||
const now = Date.now();
|
||||
const messages = await this.processing.where("ts").below(now - timeout)
|
||||
.toArray();
|
||||
const ids: [string, string][] = messages.map((m) => [m.queue, m.id]);
|
||||
await this.db.transaction(
|
||||
"rw",
|
||||
[this.queued, this.processing, this.dlq],
|
||||
async (tx) => {
|
||||
await tx.table("processing").bulkDelete(ids);
|
||||
const requeuedMessages: ProcessingMessage[] = [];
|
||||
const dlqMessages: ProcessingMessage[] = [];
|
||||
for (const m of messages) {
|
||||
const retries = (m.retries || 0) + 1;
|
||||
if (maxRetries && retries > maxRetries) {
|
||||
if (disableDLQ) {
|
||||
console.warn(
|
||||
"[mq]",
|
||||
"Message exceeded max retries, flushing message",
|
||||
m,
|
||||
);
|
||||
} else {
|
||||
console.warn(
|
||||
"[mq]",
|
||||
"Message exceeded max retries, moving to DLQ",
|
||||
m,
|
||||
);
|
||||
dlqMessages.push({
|
||||
queue: m.queue,
|
||||
id: m.id,
|
||||
body: m.body,
|
||||
ts: Date.now(),
|
||||
retries,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.info("[mq]", "Message ack timed out, requeueing", m);
|
||||
requeuedMessages.push({
|
||||
...m,
|
||||
retries,
|
||||
});
|
||||
}
|
||||
}
|
||||
await tx.table("queued").bulkPut(requeuedMessages);
|
||||
await tx.table("dlq").bulkPut(dlqMessages);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fetchDLQMessages(): Promise<ProcessingMessage[]> {
|
||||
return this.dlq.toArray();
|
||||
}
|
||||
|
||||
fetchProcessingMessages(): Promise<ProcessingMessage[]> {
|
||||
return this.processing.toArray();
|
||||
}
|
||||
|
||||
flushDLQ(): Promise<void> {
|
||||
return this.dlq.clear();
|
||||
}
|
||||
|
||||
getQueueStats(queue: string): Promise<MQStats> {
|
||||
return this.db.transaction(
|
||||
"r",
|
||||
[this.queued, this.processing, this.dlq],
|
||||
async (tx) => {
|
||||
const queued = await tx.table("queued").where({ queue }).count();
|
||||
const processing = await tx.table("processing").where({ queue })
|
||||
.count();
|
||||
const dlq = await tx.table("dlq").where({ queue }).count();
|
||||
return {
|
||||
queued,
|
||||
processing,
|
||||
dlq,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async getAllQueueStats(): Promise<Record<string, MQStats>> {
|
||||
const allStatus: Record<string, MQStats> = {};
|
||||
await this.db.transaction(
|
||||
"r",
|
||||
[this.queued, this.processing, this.dlq],
|
||||
async (tx) => {
|
||||
for (const item of await tx.table("queued").toArray()) {
|
||||
if (!allStatus[item.queue]) {
|
||||
allStatus[item.queue] = {
|
||||
queued: 0,
|
||||
processing: 0,
|
||||
dlq: 0,
|
||||
};
|
||||
}
|
||||
allStatus[item.queue].queued++;
|
||||
}
|
||||
for (const item of await tx.table("processing").toArray()) {
|
||||
if (!allStatus[item.queue]) {
|
||||
allStatus[item.queue] = {
|
||||
queued: 0,
|
||||
processing: 0,
|
||||
dlq: 0,
|
||||
};
|
||||
}
|
||||
allStatus[item.queue].processing++;
|
||||
}
|
||||
for (const item of await tx.table("dlq").toArray()) {
|
||||
if (!allStatus[item.queue]) {
|
||||
allStatus[item.queue] = {
|
||||
queued: 0,
|
||||
processing: 0,
|
||||
dlq: 0,
|
||||
};
|
||||
}
|
||||
allStatus[item.queue].dlq++;
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return allStatus;
|
||||
}
|
||||
}
|
@ -86,7 +86,7 @@ export class Plug<HookT> {
|
||||
`Function ${name} is not available in ${this.runtimeEnv}`,
|
||||
);
|
||||
}
|
||||
return await sandbox.invoke(name, args);
|
||||
return sandbox.invoke(name, args);
|
||||
}
|
||||
|
||||
stop() {
|
||||
|
75
plugos/syscalls/datastore.ts
Normal file
75
plugos/syscalls/datastore.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { KV, KvKey, KvQuery } from "$sb/types.ts";
|
||||
import type { DataStore } from "../lib/datastore.ts";
|
||||
import type { SyscallContext, SysCallMapping } from "../system.ts";
|
||||
|
||||
/**
|
||||
* Exposes the datastore API to plugs, but scoping everything to a prefix based on the plug's name
|
||||
* @param ds the datastore to wrap
|
||||
* @param prefix prefix to scope all keys to to which the plug name will be appended
|
||||
*/
|
||||
export function dataStoreSyscalls(
|
||||
ds: DataStore,
|
||||
prefix: KvKey = ["ds"],
|
||||
): SysCallMapping {
|
||||
return {
|
||||
"datastore.delete": (ctx, key: KvKey) => {
|
||||
return ds.delete(applyPrefix(ctx, key));
|
||||
},
|
||||
|
||||
"datastore.set": (ctx, key: KvKey, value: any) => {
|
||||
return ds.set(applyPrefix(ctx, key), value);
|
||||
},
|
||||
|
||||
"datastore.batchSet": (ctx, kvs: KV[]) => {
|
||||
return ds.batchSet(
|
||||
kvs.map((kv) => ({ key: applyPrefix(ctx, kv.key), value: kv.value })),
|
||||
);
|
||||
},
|
||||
|
||||
"datastore.batchDelete": (ctx, keys: KvKey[]) => {
|
||||
return ds.batchDelete(keys.map((k) => applyPrefix(ctx, k)));
|
||||
},
|
||||
|
||||
"datastore.batchGet": (
|
||||
ctx,
|
||||
keys: KvKey[],
|
||||
): Promise<(any | undefined)[]> => {
|
||||
return ds.batchGet(keys.map((k) => applyPrefix(ctx, k)));
|
||||
},
|
||||
|
||||
"datastore.get": (ctx, key: KvKey): Promise<any | null> => {
|
||||
return ds.get(applyPrefix(ctx, key));
|
||||
},
|
||||
|
||||
"datastore.query": async (
|
||||
ctx,
|
||||
query: KvQuery,
|
||||
): Promise<KV[]> => {
|
||||
return (await ds.query({
|
||||
...query,
|
||||
prefix: applyPrefix(ctx, query.prefix),
|
||||
})).map((kv) => ({
|
||||
key: stripPrefix(kv.key),
|
||||
value: kv.value,
|
||||
}));
|
||||
},
|
||||
|
||||
"datastore.queryDelete": (
|
||||
ctx,
|
||||
query: KvQuery,
|
||||
): Promise<void> => {
|
||||
return ds.queryDelete({
|
||||
...query,
|
||||
prefix: applyPrefix(ctx, query.prefix),
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
function applyPrefix(ctx: SyscallContext, key?: KvKey): KvKey {
|
||||
return [...prefix, ctx.plug.name!, ...(key ? key : [])];
|
||||
}
|
||||
|
||||
function stripPrefix(key: KvKey): KvKey {
|
||||
return key.slice(prefix.length + 1);
|
||||
}
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
import { SysCallMapping } from "../system.ts";
|
||||
import { KV, KVStore } from "../lib/kv_store.ts";
|
||||
|
||||
export function storeSyscalls(
|
||||
db: KVStore,
|
||||
): SysCallMapping {
|
||||
return {
|
||||
"store.delete": (_ctx, key: string) => {
|
||||
return db.del(key);
|
||||
},
|
||||
|
||||
"store.deletePrefix": (_ctx, prefix: string) => {
|
||||
return db.deletePrefix(prefix);
|
||||
},
|
||||
|
||||
"store.deleteAll": () => {
|
||||
return db.deleteAll();
|
||||
},
|
||||
|
||||
"store.set": (_ctx, key: string, value: any) => {
|
||||
return db.set(key, value);
|
||||
},
|
||||
|
||||
"store.batchSet": (_ctx, kvs: KV[]) => {
|
||||
return db.batchSet(kvs);
|
||||
},
|
||||
|
||||
"store.batchDelete": (_ctx, keys: string[]) => {
|
||||
return db.batchDelete(keys);
|
||||
},
|
||||
|
||||
"store.batchGet": (
|
||||
_ctx,
|
||||
keys: string[],
|
||||
): Promise<(any | undefined)[]> => {
|
||||
return db.batchGet(keys);
|
||||
},
|
||||
|
||||
"store.get": (_ctx, key: string): Promise<any | null> => {
|
||||
return db.get(key);
|
||||
},
|
||||
|
||||
"store.has": (_ctx, key: string): Promise<boolean> => {
|
||||
return db.has(key);
|
||||
},
|
||||
|
||||
"store.queryPrefix": (
|
||||
_ctx,
|
||||
keyPrefix: string,
|
||||
): Promise<{ key: string; value: any }[]> => {
|
||||
return db.queryPrefix(keyPrefix);
|
||||
},
|
||||
};
|
||||
}
|
@ -86,8 +86,7 @@ export class System<HookT> extends EventEmitter<SystemEvents<HookT>> {
|
||||
args: any[],
|
||||
): Promise<any> {
|
||||
return this.syscallWithContext(
|
||||
// Mock the plug
|
||||
{ plug: { name: contextPlugName } as any },
|
||||
{ plug: this.plugs.get(contextPlugName)! },
|
||||
syscallName,
|
||||
args,
|
||||
);
|
||||
|
@ -75,7 +75,12 @@ export function setupMessageListener(
|
||||
result: result,
|
||||
} as ControllerMessage);
|
||||
} catch (e: any) {
|
||||
console.error(e);
|
||||
console.error(
|
||||
"An exception was thrown as a result of invoking function",
|
||||
data.name,
|
||||
"error:",
|
||||
e,
|
||||
);
|
||||
workerPostMessage({
|
||||
type: "invr",
|
||||
id: data.id!,
|
||||
|
@ -7,6 +7,7 @@ export const builtinPlugNames = [
|
||||
"plug-manager",
|
||||
"directive",
|
||||
"emoji",
|
||||
"query",
|
||||
"markdown",
|
||||
"share",
|
||||
"tasks",
|
||||
|
@ -1,5 +1,8 @@
|
||||
import { editor, markdown, mq, space, sync } from "$sb/syscalls.ts";
|
||||
import {
|
||||
addParentPointers,
|
||||
findParentMatching,
|
||||
nodeAtPos,
|
||||
ParseTree,
|
||||
removeParentPointers,
|
||||
renderToText,
|
||||
@ -7,9 +10,8 @@ import {
|
||||
} from "$sb/lib/tree.ts";
|
||||
import { renderDirectives } from "./directives.ts";
|
||||
import { extractFrontmatter } from "$sb/lib/frontmatter.ts";
|
||||
import type { PageMeta } from "../../web/types.ts";
|
||||
import { isFederationPath } from "$sb/lib/resolve.ts";
|
||||
import { MQMessage } from "$sb/types.ts";
|
||||
import { MQMessage, PageMeta } from "$sb/types.ts";
|
||||
import { sleep } from "$sb/lib/async.ts";
|
||||
|
||||
const directiveUpdateQueueName = "directiveUpdateQueue";
|
||||
@ -200,3 +202,30 @@ export async function updateDirectives(
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
export async function convertToLiveQuery() {
|
||||
const text = await editor.getText();
|
||||
const pos = await editor.getCursor();
|
||||
const tree = await markdown.parseMarkdown(text);
|
||||
addParentPointers(tree);
|
||||
const currentNode = nodeAtPos(tree, pos);
|
||||
const directive = findParentMatching(
|
||||
currentNode!,
|
||||
(node) => node.type === "Directive",
|
||||
);
|
||||
if (!directive) {
|
||||
await editor.flashNotification(
|
||||
"No directive found at cursor position",
|
||||
"error",
|
||||
);
|
||||
return;
|
||||
}
|
||||
const queryText = renderToText(directive!.children![0].children![1]);
|
||||
await editor.dispatch({
|
||||
changes: {
|
||||
from: directive.from,
|
||||
to: directive.to,
|
||||
insert: "```query\n" + queryText + "\n```",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { events } from "$sb/syscalls.ts";
|
||||
import { CompleteEvent } from "$sb/app_event.ts";
|
||||
import { buildHandebarOptions } from "./util.ts";
|
||||
import type { PageMeta } from "../../web/types.ts";
|
||||
import type {
|
||||
AttributeCompleteEvent,
|
||||
AttributeCompletion,
|
||||
} from "../index/attributes.ts";
|
||||
import { PageMeta } from "$sb/types.ts";
|
||||
|
||||
export async function queryComplete(completeEvent: CompleteEvent) {
|
||||
const querySourceMatch = /#query\s+([\w\-_]*)$/.exec(
|
||||
@ -14,18 +14,31 @@ export async function queryComplete(completeEvent: CompleteEvent) {
|
||||
if (querySourceMatch) {
|
||||
const allEvents = await events.listEvents();
|
||||
|
||||
return {
|
||||
from: completeEvent.pos - querySourceMatch[1].length,
|
||||
options: allEvents
|
||||
.filter((eventName) => eventName.startsWith("query:"))
|
||||
const completionOptions = allEvents
|
||||
.filter((eventName) =>
|
||||
eventName.startsWith("query:") && !eventName.includes("*")
|
||||
)
|
||||
.map((source) => ({
|
||||
label: source.substring("query:".length),
|
||||
})),
|
||||
}));
|
||||
|
||||
const allObjectTypes: string[] = (await events.dispatchEvent("query_", {}))
|
||||
.flat();
|
||||
|
||||
for (const type of allObjectTypes) {
|
||||
completionOptions.push({
|
||||
label: type,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
from: completeEvent.pos - querySourceMatch[1].length,
|
||||
options: completionOptions,
|
||||
};
|
||||
}
|
||||
|
||||
if (completeEvent.parentNodes.includes("DirectiveStart")) {
|
||||
const querySourceMatch = /#query\s+([\w\-_]+)/.exec(
|
||||
const querySourceMatch = /#query\s+([\w\-_\/]+)/.exec(
|
||||
completeEvent.linePrefix,
|
||||
);
|
||||
const whereMatch =
|
||||
@ -69,9 +82,9 @@ export async function templateVariableComplete(completeEvent: CompleteEvent) {
|
||||
);
|
||||
|
||||
const completions = (await events.dispatchEvent(
|
||||
`attribute:complete:*`,
|
||||
`attribute:complete:_`,
|
||||
{
|
||||
source: "*",
|
||||
source: "",
|
||||
prefix: match[1],
|
||||
} as AttributeCompleteEvent,
|
||||
)).flat() as AttributeCompletion[];
|
||||
@ -92,7 +105,7 @@ export function attributeCompletionsToCMCompletion(
|
||||
return completions.map(
|
||||
(completion) => ({
|
||||
label: completion.name,
|
||||
detail: `${completion.type} (${completion.source})`,
|
||||
detail: `${completion.attributeType} (${completion.source})`,
|
||||
type: "attribute",
|
||||
}),
|
||||
);
|
||||
|
@ -20,14 +20,6 @@ functions:
|
||||
mqSubscriptions:
|
||||
- queue: directiveUpdateQueue
|
||||
batchSize: 3
|
||||
indexData:
|
||||
path: ./data.ts:indexData
|
||||
events:
|
||||
- page:index
|
||||
dataQueryProvider:
|
||||
path: ./data.ts:queryProvider
|
||||
events:
|
||||
- query:data
|
||||
queryComplete:
|
||||
path: ./complete.ts:queryComplete
|
||||
events:
|
||||
@ -37,43 +29,13 @@ functions:
|
||||
events:
|
||||
- editor:complete
|
||||
|
||||
# Conversion
|
||||
convertToLiveQuery:
|
||||
path: command.ts:convertToLiveQuery
|
||||
command:
|
||||
name: "Directive: Convert Query to Live Query"
|
||||
|
||||
# Templates
|
||||
insertQuery:
|
||||
redirect: template.insertTemplateText
|
||||
slashCommand:
|
||||
name: query
|
||||
description: Insert a query
|
||||
value: |
|
||||
<!-- #query |^| -->
|
||||
|
||||
<!-- /query -->
|
||||
insertInclude:
|
||||
redirect: template.insertTemplateText
|
||||
slashCommand:
|
||||
name: include
|
||||
description: Include another page
|
||||
value: |
|
||||
<!-- #include [[|^|]] -->
|
||||
|
||||
<!-- /include -->
|
||||
insertUseTemplate:
|
||||
redirect: template.insertTemplateText
|
||||
slashCommand:
|
||||
name: use
|
||||
description: Use a template
|
||||
value: |
|
||||
<!-- #use [[|^|]] {} -->
|
||||
|
||||
<!-- /use -->
|
||||
insertUseVerboseTemplate:
|
||||
redirect: template.insertTemplateText
|
||||
slashCommand:
|
||||
name: use-verbose
|
||||
description: Use a template (verbose mode)
|
||||
value: |
|
||||
<!-- #use-verbose [[|^|]] {} -->
|
||||
|
||||
<!-- /use-verbose -->
|
||||
insertEvalTemplate:
|
||||
redirect: template.insertTemplateText
|
||||
slashCommand:
|
||||
|
@ -1,5 +1,11 @@
|
||||
import { ParseTree, renderToText } from "$sb/lib/tree.ts";
|
||||
import { PageMeta } from "../../web/types.ts";
|
||||
import {
|
||||
addParentPointers,
|
||||
findParentMatching,
|
||||
ParseTree,
|
||||
renderToText,
|
||||
} from "$sb/lib/tree.ts";
|
||||
import { PageMeta } from "$sb/types.ts";
|
||||
import { editor, markdown } from "$sb/syscalls.ts";
|
||||
|
||||
import { evalDirectiveRenderer } from "./eval_directive.ts";
|
||||
import { queryDirectiveRenderer } from "./query_directive.ts";
|
||||
@ -53,8 +59,29 @@ export async function directiveDispatcher(
|
||||
const directiveStartText = renderToText(directiveStart).trim();
|
||||
const directiveEndText = renderToText(directiveEnd).trim();
|
||||
|
||||
if (directiveStart.children!.length === 1) {
|
||||
// Everything not #query
|
||||
const firstPart = directiveStart.children![0].text!;
|
||||
if (firstPart?.includes("#query")) {
|
||||
// #query
|
||||
const newBody = await directiveRenderers["query"](
|
||||
"query",
|
||||
pageMeta,
|
||||
directiveStart.children![1].children![0], // The query ParseTree
|
||||
);
|
||||
const result =
|
||||
`${directiveStartText}\n${newBody.trim()}\n${directiveEndText}`;
|
||||
return result;
|
||||
} else if (firstPart?.includes("#eval")) {
|
||||
console.log("Eval stuff", directiveStart.children![1].children![0]);
|
||||
const newBody = await directiveRenderers["eval"](
|
||||
"eval",
|
||||
pageMeta,
|
||||
directiveStart.children![1].children![0],
|
||||
);
|
||||
const result =
|
||||
`${directiveStartText}\n${newBody.trim()}\n${directiveEndText}`;
|
||||
return result;
|
||||
} else {
|
||||
// Everything not #query and #eval
|
||||
const match = directiveStartRegex.exec(directiveStart.children![0].text!);
|
||||
if (!match) {
|
||||
throw Error("No match");
|
||||
@ -70,16 +97,6 @@ export async function directiveDispatcher(
|
||||
} catch (e: any) {
|
||||
return `${directiveStartText}\n**ERROR:** ${e.message}\n${directiveEndText}`;
|
||||
}
|
||||
} else {
|
||||
// #query
|
||||
const newBody = await directiveRenderers["query"](
|
||||
"query",
|
||||
pageMeta,
|
||||
directiveStart.children![1], // The query ParseTree
|
||||
);
|
||||
const result =
|
||||
`${directiveStartText}\n${newBody.trim()}\n${directiveEndText}`;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,23 +1,9 @@
|
||||
// This is some shocking stuff. My profession would kill me for this.
|
||||
|
||||
import { YAML } from "$sb/syscalls.ts";
|
||||
import { ParseTree } from "$sb/lib/tree.ts";
|
||||
import { jsonToMDTable, renderTemplate } from "./util.ts";
|
||||
import type { PageMeta } from "../../web/types.ts";
|
||||
import { ParseTree, parseTreeToAST } from "$sb/lib/tree.ts";
|
||||
import { replaceTemplateVars } from "../template/template.ts";
|
||||
|
||||
// Enables plugName.functionName(arg1, arg2) syntax in JS expressions
|
||||
function translateJs(js: string): string {
|
||||
return js.replaceAll(
|
||||
/(\w+\.\w+)\s*\(/g,
|
||||
'await invokeFunction("$1", ',
|
||||
);
|
||||
}
|
||||
|
||||
// Syntaxes to support:
|
||||
// - random JS expression
|
||||
// - random JS expression render [[some/template]]
|
||||
const expressionRegex = /(.+?)(\s+render\s+\[\[([^\]]+)\]\])?$/;
|
||||
import { PageMeta } from "$sb/types.ts";
|
||||
import { expressionToKvQueryExpression } from "$sb/lib/parse-query.ts";
|
||||
import { evalQueryExpression } from "$sb/lib/query.ts";
|
||||
import { builtinFunctions } from "$sb/lib/builtin_query_functions.ts";
|
||||
|
||||
// This is rather scary and fragile stuff, but it works.
|
||||
export async function evalDirectiveRenderer(
|
||||
@ -25,42 +11,19 @@ export async function evalDirectiveRenderer(
|
||||
pageMeta: PageMeta,
|
||||
expression: string | ParseTree,
|
||||
): Promise<string> {
|
||||
if (typeof expression !== "string") {
|
||||
throw new Error("Expected a string");
|
||||
}
|
||||
// console.log("Got JS expression", expression);
|
||||
const match = expressionRegex.exec(expression);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid eval directive: ${expression}`);
|
||||
}
|
||||
let template = "";
|
||||
if (match[3]) {
|
||||
// This is the template reference
|
||||
expression = match[1];
|
||||
template = match[3];
|
||||
}
|
||||
try {
|
||||
// Why the weird "eval" call? https://esbuild.github.io/content-types/#direct-eval
|
||||
const result = await (0, eval)(
|
||||
`(async () => {
|
||||
function invokeFunction(name, ...args) {
|
||||
return syscall("system.invokeFunction", name, ...args);
|
||||
}
|
||||
return ${replaceTemplateVars(translateJs(expression), pageMeta)};
|
||||
})()`,
|
||||
const result = evalQueryExpression(
|
||||
expressionToKvQueryExpression(parseTreeToAST(
|
||||
JSON.parse(
|
||||
await replaceTemplateVars(JSON.stringify(expression), pageMeta),
|
||||
),
|
||||
)),
|
||||
{},
|
||||
builtinFunctions,
|
||||
);
|
||||
if (template) {
|
||||
return await renderTemplate(pageMeta, template, result);
|
||||
}
|
||||
if (typeof result === "string") {
|
||||
return result;
|
||||
} else if (typeof result === "number") {
|
||||
return "" + result;
|
||||
} else if (Array.isArray(result)) {
|
||||
return jsonToMDTable(result);
|
||||
}
|
||||
return await YAML.stringify(result);
|
||||
|
||||
return Promise.resolve("" + result);
|
||||
} catch (e: any) {
|
||||
return `**ERROR:** ${e.message}`;
|
||||
return Promise.resolve(`**ERROR:** ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
@ -1,122 +0,0 @@
|
||||
import {
|
||||
collectNodesOfType,
|
||||
findNodeOfType,
|
||||
ParseTree,
|
||||
replaceNodesMatching,
|
||||
} from "$sb/lib/tree.ts";
|
||||
|
||||
// @ts-ignore auto generated
|
||||
import { ParsedQuery, QueryFilter } from "$sb/lib/query.ts";
|
||||
|
||||
export function parseQuery(queryTree: ParseTree): ParsedQuery {
|
||||
// const n = lezerToParseTree(query, parser.parse(query).topNode);
|
||||
// Clean the tree a bit
|
||||
replaceNodesMatching(queryTree, (n) => {
|
||||
if (!n.type) {
|
||||
const trimmed = n.text!.trim();
|
||||
if (!trimmed) {
|
||||
return null;
|
||||
}
|
||||
n.text = trimmed;
|
||||
}
|
||||
});
|
||||
|
||||
// console.log("Parsed", JSON.stringify(n, null, 2));
|
||||
const queryNode = queryTree.children![0];
|
||||
const parsedQuery: ParsedQuery = {
|
||||
table: queryNode.children![0].children![0].text!,
|
||||
filter: [],
|
||||
ordering: [],
|
||||
};
|
||||
|
||||
const orderByNodes = collectNodesOfType(queryNode, "OrderClause");
|
||||
for (const orderByNode of orderByNodes) {
|
||||
const nameNode = findNodeOfType(orderByNode, "Name");
|
||||
const orderBy = nameNode!.children![0].text!;
|
||||
const orderNode = findNodeOfType(orderByNode, "OrderDirection");
|
||||
const orderDesc = orderNode
|
||||
? orderNode.children![0].text! === "desc"
|
||||
: false;
|
||||
parsedQuery.ordering.push({ orderBy, orderDesc });
|
||||
}
|
||||
/**
|
||||
* @deprecated due to PR #387
|
||||
* We'll take the first ordering and send that as the deprecated
|
||||
* fields orderBy and orderDesc. This way it will be backward
|
||||
* Plugs using the old ParsedQuery.
|
||||
* Remove this block completely when ParsedQuery no longer have
|
||||
* those two fields
|
||||
*/
|
||||
if (parsedQuery.ordering.length > 0) {
|
||||
parsedQuery.orderBy = parsedQuery.ordering[0].orderBy;
|
||||
parsedQuery.orderDesc = parsedQuery.ordering[0].orderDesc;
|
||||
}
|
||||
/** @end-deprecation due to PR #387 */
|
||||
|
||||
const limitNode = findNodeOfType(queryNode, "LimitClause");
|
||||
if (limitNode) {
|
||||
const nameNode = findNodeOfType(limitNode, "Number");
|
||||
parsedQuery.limit = valueNodeToVal(nameNode!);
|
||||
}
|
||||
|
||||
const filterNodes = collectNodesOfType(queryNode, "FilterExpr");
|
||||
for (const filterNode of filterNodes) {
|
||||
let val: any = undefined;
|
||||
const valNode = filterNode.children![2].children![0];
|
||||
val = valueNodeToVal(valNode);
|
||||
const f: QueryFilter = {
|
||||
prop: filterNode.children![0].children![0].text!,
|
||||
op: filterNode.children![1].text!,
|
||||
value: val,
|
||||
};
|
||||
parsedQuery.filter.push(f);
|
||||
}
|
||||
const selectNode = findNodeOfType(queryNode, "SelectClause");
|
||||
if (selectNode) {
|
||||
parsedQuery.select = [];
|
||||
collectNodesOfType(selectNode, "Name").forEach((t) => {
|
||||
parsedQuery.select!.push(t.children![0].text!);
|
||||
});
|
||||
}
|
||||
|
||||
const renderNode = findNodeOfType(queryNode, "RenderClause");
|
||||
if (renderNode) {
|
||||
let renderNameNode = findNodeOfType(renderNode, "PageRef");
|
||||
if (!renderNameNode) {
|
||||
renderNameNode = findNodeOfType(renderNode, "String");
|
||||
}
|
||||
parsedQuery.render = valueNodeToVal(renderNameNode!);
|
||||
}
|
||||
|
||||
return parsedQuery;
|
||||
}
|
||||
|
||||
export function valueNodeToVal(valNode: ParseTree): any {
|
||||
switch (valNode.type) {
|
||||
case "Number":
|
||||
return +valNode.children![0].text!;
|
||||
case "Bool":
|
||||
return valNode.children![0].text! === "true";
|
||||
case "Null":
|
||||
return null;
|
||||
case "Name":
|
||||
return valNode.children![0].text!;
|
||||
case "Regex": {
|
||||
const val = valNode.children![0].text!;
|
||||
return val.substring(1, val.length - 1);
|
||||
}
|
||||
case "String": {
|
||||
const stringVal = valNode.children![0].text!;
|
||||
return stringVal.substring(1, stringVal.length - 1);
|
||||
}
|
||||
case "PageRef": {
|
||||
const pageRefVal = valNode.children![0].text!;
|
||||
return pageRefVal.substring(2, pageRefVal.length - 2);
|
||||
}
|
||||
case "List": {
|
||||
return collectNodesOfType(valNode, "Value").map((t) =>
|
||||
valueNodeToVal(t.children![0])
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,187 +0,0 @@
|
||||
import { assertEquals } from "../../test_deps.ts";
|
||||
import { applyQuery } from "$sb/lib/query.ts";
|
||||
|
||||
import wikiMarkdownLang from "../../common/markdown_parser/parser.ts";
|
||||
import { parse } from "../../common/markdown_parser/parse_tree.ts";
|
||||
import { parseQuery as parseQueryQuery } from "./parser.ts";
|
||||
import { findNodeOfType, renderToText } from "../../plug-api/lib/tree.ts";
|
||||
|
||||
function parseQuery(query: string) {
|
||||
const lang = wikiMarkdownLang([]);
|
||||
const mdTree = parse(
|
||||
lang,
|
||||
`<!-- #query ${query} -->
|
||||
|
||||
<!-- /query -->`,
|
||||
);
|
||||
const programNode = findNodeOfType(mdTree, "Program")!;
|
||||
return parseQueryQuery(programNode);
|
||||
}
|
||||
|
||||
Deno.test("Test parser", () => {
|
||||
const parsedBasicQuery = parseQuery(`page`);
|
||||
assertEquals(parsedBasicQuery.table, "page");
|
||||
|
||||
const parsedQuery1 = parseQuery(
|
||||
`task where completed = false and dueDate <= "{{today}}" order by dueDate desc limit 5`,
|
||||
);
|
||||
assertEquals(parsedQuery1.table, "task");
|
||||
assertEquals(parsedQuery1.ordering.length, 1);
|
||||
assertEquals(parsedQuery1.ordering[0].orderBy, "dueDate");
|
||||
assertEquals(parsedQuery1.ordering[0].orderDesc, true);
|
||||
assertEquals(parsedQuery1.limit, 5);
|
||||
assertEquals(parsedQuery1.filter.length, 2);
|
||||
assertEquals(parsedQuery1.filter[0], {
|
||||
op: "=",
|
||||
prop: "completed",
|
||||
value: false,
|
||||
});
|
||||
assertEquals(parsedQuery1.filter[1], {
|
||||
op: "<=",
|
||||
prop: "dueDate",
|
||||
value: "{{today}}",
|
||||
});
|
||||
|
||||
const parsedQuery2 = parseQuery(`page where name =~ /interview\\/.*/"`);
|
||||
assertEquals(parsedQuery2.table, "page");
|
||||
assertEquals(parsedQuery2.filter.length, 1);
|
||||
assertEquals(parsedQuery2.filter[0], {
|
||||
op: "=~",
|
||||
prop: "name",
|
||||
value: "interview\\/.*",
|
||||
});
|
||||
|
||||
const parsedQuery3 = parseQuery(`page where something != null`);
|
||||
assertEquals(parsedQuery3.table, "page");
|
||||
assertEquals(parsedQuery3.filter.length, 1);
|
||||
assertEquals(parsedQuery3.filter[0], {
|
||||
op: "!=",
|
||||
prop: "something",
|
||||
value: null,
|
||||
});
|
||||
|
||||
assertEquals(parseQuery(`page select name`).select, ["name"]);
|
||||
assertEquals(parseQuery(`page select name, age`).select, [
|
||||
"name",
|
||||
"age",
|
||||
]);
|
||||
|
||||
assertEquals(
|
||||
parseQuery(`gh-events where type in ["PushEvent", "somethingElse"]`),
|
||||
{
|
||||
table: "gh-events",
|
||||
ordering: [],
|
||||
filter: [
|
||||
{
|
||||
op: "in",
|
||||
prop: "type",
|
||||
value: ["PushEvent", "somethingElse"],
|
||||
},
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
assertEquals(parseQuery(`something render [[template/table]]`), {
|
||||
table: "something",
|
||||
ordering: [],
|
||||
filter: [],
|
||||
render: "template/table",
|
||||
});
|
||||
|
||||
assertEquals(parseQuery(`something render "template/table"`), {
|
||||
table: "something",
|
||||
ordering: [],
|
||||
filter: [],
|
||||
render: "template/table",
|
||||
});
|
||||
});
|
||||
|
||||
Deno.test("Test applyQuery", () => {
|
||||
const data: any[] = [
|
||||
{ name: "interview/My Interview", lastModified: 1 },
|
||||
{ name: "interview/My Interview 2", lastModified: 2 },
|
||||
{ name: "Pete", age: 38 },
|
||||
{ name: "Angie", age: 28 },
|
||||
];
|
||||
|
||||
assertEquals(
|
||||
applyQuery(parseQuery(`page where name =~ /interview\\/.*/`), data),
|
||||
[
|
||||
{ name: "interview/My Interview", lastModified: 1 },
|
||||
{ name: "interview/My Interview 2", lastModified: 2 },
|
||||
],
|
||||
);
|
||||
assertEquals(
|
||||
applyQuery(
|
||||
parseQuery(`page where name =~ /interview\\/.*/ order by lastModified`),
|
||||
data,
|
||||
),
|
||||
[
|
||||
{ name: "interview/My Interview", lastModified: 1 },
|
||||
{ name: "interview/My Interview 2", lastModified: 2 },
|
||||
],
|
||||
);
|
||||
assertEquals(
|
||||
applyQuery(
|
||||
parseQuery(
|
||||
`page where name =~ /interview\\/.*/ order by lastModified desc`,
|
||||
),
|
||||
data,
|
||||
),
|
||||
[
|
||||
{ name: "interview/My Interview 2", lastModified: 2 },
|
||||
{ name: "interview/My Interview", lastModified: 1 },
|
||||
],
|
||||
);
|
||||
assertEquals(applyQuery(parseQuery(`page where age > 30`), data), [
|
||||
{ name: "Pete", age: 38 },
|
||||
]);
|
||||
assertEquals(
|
||||
applyQuery(parseQuery(`page where age > 28 and age < 38`), data),
|
||||
[],
|
||||
);
|
||||
assertEquals(
|
||||
applyQuery(parseQuery(`page where age > 30 select name`), data),
|
||||
[{ name: "Pete" }],
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
applyQuery(parseQuery(`page where name in ["Pete"] select name`), data),
|
||||
[{ name: "Pete" }],
|
||||
);
|
||||
});
|
||||
|
||||
Deno.test("Test applyQuery with multi value", () => {
|
||||
const data: any[] = [
|
||||
{ name: "Pete", children: ["John", "Angie"] },
|
||||
{ name: "Angie", children: ["Angie"] },
|
||||
{ name: "Steve" },
|
||||
];
|
||||
|
||||
assertEquals(
|
||||
applyQuery(parseQuery(`page where children = "Angie"`), data),
|
||||
[
|
||||
{ name: "Pete", children: ["John", "Angie"] },
|
||||
{ name: "Angie", children: ["Angie"] },
|
||||
],
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
applyQuery(parseQuery(`page where children = ["Angie", "John"]`), data),
|
||||
[
|
||||
{ name: "Pete", children: ["John", "Angie"] },
|
||||
{ name: "Angie", children: ["Angie"] },
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
const testQuery = `<!-- #query source where a = 1 and b = "2" and c = "3" -->
|
||||
|
||||
<!-- /query -->`;
|
||||
|
||||
Deno.test("Query parsing and serialization", () => {
|
||||
const lang = wikiMarkdownLang([]);
|
||||
const mdTree = parse(lang, testQuery);
|
||||
// console.log(JSON.stringify(mdTree, null, 2));
|
||||
assertEquals(renderToText(mdTree), testQuery);
|
||||
});
|
@ -2,10 +2,10 @@ import { events } from "$sb/syscalls.ts";
|
||||
|
||||
import { replaceTemplateVars } from "../template/template.ts";
|
||||
import { renderTemplate } from "./util.ts";
|
||||
import { parseQuery } from "./parser.ts";
|
||||
import { jsonToMDTable } from "./util.ts";
|
||||
import { ParseTree } from "$sb/lib/tree.ts";
|
||||
import type { PageMeta } from "../../web/types.ts";
|
||||
import { ParseTree, parseTreeToAST } from "$sb/lib/tree.ts";
|
||||
import { astToKvQuery } from "$sb/lib/parse-query.ts";
|
||||
import { PageMeta, Query } from "$sb/types.ts";
|
||||
|
||||
export async function queryDirectiveRenderer(
|
||||
_directive: string,
|
||||
@ -15,11 +15,14 @@ export async function queryDirectiveRenderer(
|
||||
if (typeof query === "string") {
|
||||
throw new Error("Argument must be a ParseTree");
|
||||
}
|
||||
const parsedQuery = parseQuery(
|
||||
JSON.parse(replaceTemplateVars(JSON.stringify(query), pageMeta)),
|
||||
const parsedQuery: Query = astToKvQuery(
|
||||
parseTreeToAST(
|
||||
JSON.parse(await replaceTemplateVars(JSON.stringify(query), pageMeta)),
|
||||
),
|
||||
);
|
||||
// console.log("QUERY", parsedQuery);
|
||||
|
||||
const eventName = `query:${parsedQuery.table}`;
|
||||
const eventName = `query:${parsedQuery.querySource}`;
|
||||
|
||||
// console.log("Parsed query", parsedQuery);
|
||||
// Let's dispatch an event and see what happens
|
||||
@ -30,24 +33,23 @@ export async function queryDirectiveRenderer(
|
||||
);
|
||||
if (results.length === 0) {
|
||||
// This means there was no handler for the event which means it's unsupported
|
||||
return `**Error:** Unsupported query source '${parsedQuery.table}'`;
|
||||
} else if (results.length === 1) {
|
||||
return `**Error:** Unsupported query source '${parsedQuery.querySource}'`;
|
||||
} else {
|
||||
// console.log("Parsed query", parsedQuery);
|
||||
const allResults = results.flat();
|
||||
if (parsedQuery.render) {
|
||||
const rendered = await renderTemplate(
|
||||
pageMeta,
|
||||
parsedQuery.render,
|
||||
results[0],
|
||||
allResults,
|
||||
);
|
||||
return rendered.trim();
|
||||
} else {
|
||||
if (results[0].length === 0) {
|
||||
if (allResults.length === 0) {
|
||||
return "No results";
|
||||
} else {
|
||||
return jsonToMDTable(results[0]);
|
||||
return jsonToMDTable(allResults);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Too many query results: ${results.length}`);
|
||||
}
|
||||
}
|
||||
|
@ -1,15 +1,13 @@
|
||||
import { queryRegex } from "$sb/lib/query.ts";
|
||||
import { ParseTree, renderToText } from "$sb/lib/tree.ts";
|
||||
import { markdown, space } from "$sb/syscalls.ts";
|
||||
import Handlebars from "handlebars";
|
||||
import { handlebars, markdown, space } from "$sb/syscalls.ts";
|
||||
|
||||
import { replaceTemplateVars } from "../template/template.ts";
|
||||
import { extractFrontmatter } from "$sb/lib/frontmatter.ts";
|
||||
import { directiveRegex } from "./directives.ts";
|
||||
import { updateDirectives } from "./command.ts";
|
||||
import { buildHandebarOptions } from "./util.ts";
|
||||
import { PageMeta } from "../../web/types.ts";
|
||||
import { resolvePath, rewritePageRefs } from "$sb/lib/resolve.ts";
|
||||
import { PageMeta } from "$sb/types.ts";
|
||||
|
||||
const templateRegex = /\[\[([^\]]+)\]\]\s*(.*)\s*/;
|
||||
|
||||
@ -30,7 +28,7 @@ export async function templateDirectiveRenderer(
|
||||
let parsedArgs = {};
|
||||
if (args) {
|
||||
try {
|
||||
parsedArgs = JSON.parse(replaceTemplateVars(args, pageMeta));
|
||||
parsedArgs = JSON.parse(await replaceTemplateVars(args, pageMeta));
|
||||
} catch {
|
||||
throw new Error(
|
||||
`Failed to parse template instantiation arg: ${
|
||||
@ -65,11 +63,9 @@ export async function templateDirectiveRenderer(
|
||||
|
||||
// if it's a template injection (not a literal "include")
|
||||
if (directive === "use") {
|
||||
const templateFn = Handlebars.compile(
|
||||
newBody,
|
||||
{ noEscape: true },
|
||||
);
|
||||
newBody = templateFn(parsedArgs, buildHandebarOptions(pageMeta));
|
||||
newBody = await handlebars.renderTemplate(newBody, parsedArgs, {
|
||||
page: pageMeta,
|
||||
});
|
||||
|
||||
// Recursively render directives
|
||||
const tree = await markdown.parseMarkdown(newBody);
|
||||
|
@ -1,8 +1,6 @@
|
||||
import Handlebars from "handlebars";
|
||||
|
||||
import { space } from "$sb/syscalls.ts";
|
||||
import type { PageMeta } from "../../web/types.ts";
|
||||
import { handlebarHelpers } from "./handlebar_helpers.ts";
|
||||
import { handlebars, space } from "$sb/syscalls.ts";
|
||||
import { handlebarHelpers } from "../../common/syscalls/handlebar_helpers.ts";
|
||||
import { PageMeta } from "$sb/types.ts";
|
||||
|
||||
const maxWidth = 70;
|
||||
|
||||
@ -10,6 +8,9 @@ export function defaultJsonTransformer(_k: string, v: any) {
|
||||
if (v === undefined) {
|
||||
return "";
|
||||
}
|
||||
if (typeof v === "string") {
|
||||
return v.replaceAll("\n", " ").replaceAll("|", "\\|");
|
||||
}
|
||||
return "" + v;
|
||||
}
|
||||
|
||||
@ -86,13 +87,12 @@ export async function renderTemplate(
|
||||
): Promise<string> {
|
||||
let templateText = await space.readPage(renderTemplate);
|
||||
templateText = `{{#each .}}\n${templateText}\n{{/each}}`;
|
||||
const template = Handlebars.compile(templateText, { noEscape: true });
|
||||
return template(data, buildHandebarOptions(pageMeta));
|
||||
return handlebars.renderTemplate(templateText, data, { page: pageMeta });
|
||||
}
|
||||
|
||||
export function buildHandebarOptions(pageMeta: PageMeta) {
|
||||
return {
|
||||
helpers: handlebarHelpers(pageMeta.name),
|
||||
helpers: handlebarHelpers(),
|
||||
data: { page: pageMeta },
|
||||
};
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { CompleteEvent } from "$sb/app_event.ts";
|
||||
import { space } from "$sb/syscalls.ts";
|
||||
import { PageMeta } from "../../web/types.ts";
|
||||
import { PageMeta } from "$sb/types.ts";
|
||||
import { cacheFileListing } from "../federation/federation.ts";
|
||||
|
||||
// Completion
|
||||
|
@ -57,7 +57,7 @@ export async function titleUnfurl(url: string): Promise<string> {
|
||||
const response = await fetch(url);
|
||||
if (response.status < 200 || response.status >= 300) {
|
||||
console.error("Unfurl failed", await response.text());
|
||||
throw new Error(`Failed to fetch: ${await response.statusText}`);
|
||||
throw new Error(`Failed to fetch: ${response.statusText}`);
|
||||
}
|
||||
const body = await response.text();
|
||||
const match = titleRegex.exec(body);
|
||||
|
@ -1,9 +1,5 @@
|
||||
import type { CompleteEvent } from "$sb/app_event.ts";
|
||||
import { editor, space } from "$sb/syscalls.ts";
|
||||
|
||||
import { cacheFileListing } from "../federation/federation.ts";
|
||||
import type { PageMeta } from "../../web/types.ts";
|
||||
|
||||
export async function deletePage() {
|
||||
const pageName = await editor.getCurrentPage();
|
||||
if (
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { readCodeBlockPage } from "$sb/lib/yaml_page.ts";
|
||||
import { editor, store } from "$sb/syscalls.ts";
|
||||
import { clientStore, editor } from "$sb/syscalls.ts";
|
||||
|
||||
export async function toggleVimMode() {
|
||||
let vimMode = await store.get("vimMode");
|
||||
let vimMode = await clientStore.get("vimMode");
|
||||
vimMode = !vimMode;
|
||||
await editor.setUiOption("vimMode", vimMode);
|
||||
await store.set("vimMode", vimMode);
|
||||
await clientStore.set("vimMode", vimMode);
|
||||
}
|
||||
|
||||
export async function loadVimRc() {
|
||||
@ -28,7 +28,7 @@ export async function loadVimRc() {
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
} catch {
|
||||
// No VIMRC page found
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import "$sb/lib/fetch.ts";
|
||||
import { federatedPathToUrl } from "$sb/lib/resolve.ts";
|
||||
import { readFederationConfigs } from "./config.ts";
|
||||
import { store } from "$sb/syscalls.ts";
|
||||
import { datastore } from "$sb/syscalls.ts";
|
||||
import type { FileMeta } from "$sb/types.ts";
|
||||
|
||||
async function responseToFileMeta(
|
||||
@ -29,7 +29,7 @@ async function responseToFileMeta(
|
||||
};
|
||||
}
|
||||
|
||||
const fileListingPrefixCacheKey = `federationListCache:`;
|
||||
const fileListingPrefixCacheKey = `federationListCache`;
|
||||
const listingCacheTimeout = 1000 * 30;
|
||||
const listingFetchTimeout = 2000;
|
||||
|
||||
@ -56,8 +56,8 @@ export async function listFiles(): Promise<FileMeta[]> {
|
||||
}
|
||||
|
||||
export async function cacheFileListing(uri: string): Promise<FileMeta[]> {
|
||||
const cachedListing = await store.get(
|
||||
`${fileListingPrefixCacheKey}${uri}`,
|
||||
const cachedListing = await datastore.get(
|
||||
[fileListingPrefixCacheKey, uri],
|
||||
) as FileListingCacheEntry;
|
||||
if (
|
||||
cachedListing &&
|
||||
@ -99,7 +99,7 @@ export async function cacheFileListing(uri: string): Promise<FileMeta[]> {
|
||||
perm: "ro",
|
||||
name: `${rootUri}/${meta.name}`,
|
||||
}));
|
||||
await store.set(`${fileListingPrefixCacheKey}${uri}`, {
|
||||
await datastore.set([fileListingPrefixCacheKey, uri], {
|
||||
items,
|
||||
lastUpdated: Date.now(),
|
||||
} as FileListingCacheEntry);
|
||||
|
@ -1,24 +1,31 @@
|
||||
import { collectNodesOfType } from "$sb/lib/tree.ts";
|
||||
import { index } from "$sb/syscalls.ts";
|
||||
import type { CompleteEvent, IndexTreeEvent } from "$sb/app_event.ts";
|
||||
import { removeQueries } from "$sb/lib/query.ts";
|
||||
import { ObjectValue } from "$sb/types.ts";
|
||||
import { indexObjects, queryObjects } from "./api.ts";
|
||||
|
||||
// Key space
|
||||
// a:pageName:anchorName => pos
|
||||
type AnchorObject = ObjectValue<{
|
||||
name: string;
|
||||
page: string;
|
||||
pos: number;
|
||||
}>;
|
||||
|
||||
export async function indexAnchors({ name: pageName, tree }: IndexTreeEvent) {
|
||||
removeQueries(tree);
|
||||
const anchors: { key: string; value: string }[] = [];
|
||||
const anchors: ObjectValue<AnchorObject>[] = [];
|
||||
|
||||
collectNodesOfType(tree, "NamedAnchor").forEach((n) => {
|
||||
const aName = n.children![0].text!.substring(1);
|
||||
anchors.push({
|
||||
key: `a:${pageName}:${aName}`,
|
||||
value: "" + n.from,
|
||||
ref: `${pageName}@${aName}`,
|
||||
tags: ["anchor"],
|
||||
name: aName,
|
||||
page: pageName,
|
||||
pos: n.from!,
|
||||
});
|
||||
});
|
||||
// console.log("Found", anchors.length, "anchors(s)");
|
||||
await index.batchSet(pageName, anchors);
|
||||
await indexObjects(pageName, anchors);
|
||||
}
|
||||
|
||||
export async function anchorComplete(completeEvent: CompleteEvent) {
|
||||
@ -31,13 +38,13 @@ export async function anchorComplete(completeEvent: CompleteEvent) {
|
||||
if (!pageRef) {
|
||||
pageRef = completeEvent.pageName;
|
||||
}
|
||||
const allAnchors = await index.queryPrefix(
|
||||
`a:${pageRef}:${anchorRef}`,
|
||||
);
|
||||
const allAnchors = await queryObjects<AnchorObject>("anchor", {
|
||||
filter: ["=", ["attr", "page"], ["string", pageRef]],
|
||||
});
|
||||
return {
|
||||
from: completeEvent.pos - anchorRef.length,
|
||||
options: allAnchors.map((a) => ({
|
||||
label: a.key.split(":")[2],
|
||||
label: a.name,
|
||||
type: "anchor",
|
||||
})),
|
||||
};
|
||||
|
156
plugs/index/api.ts
Normal file
156
plugs/index/api.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import { datastore } from "$sb/syscalls.ts";
|
||||
import { KV, KvKey, ObjectQuery, ObjectValue } from "$sb/types.ts";
|
||||
import { QueryProviderEvent } from "$sb/app_event.ts";
|
||||
import { builtins } from "./builtins.ts";
|
||||
import { AttributeObject, determineType } from "./attributes.ts";
|
||||
|
||||
const indexKey = "idx";
|
||||
const pageKey = "ridx";
|
||||
|
||||
/*
|
||||
* Key namespace:
|
||||
* [indexKey, type, ...key, page] -> value
|
||||
* [pageKey, page, ...key] -> true // for fast page clearing
|
||||
* ["type", type] -> true // for fast type listing
|
||||
*/
|
||||
|
||||
export function batchSet(page: string, kvs: KV[]): Promise<void> {
|
||||
const finalBatch: KV[] = [];
|
||||
for (const { key, value } of kvs) {
|
||||
finalBatch.push({
|
||||
key: [indexKey, ...key, page],
|
||||
value,
|
||||
}, {
|
||||
key: [pageKey, page, ...key],
|
||||
value: true,
|
||||
});
|
||||
}
|
||||
return datastore.batchSet(finalBatch);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all keys for a given page
|
||||
* @param page
|
||||
*/
|
||||
export async function clearPageIndex(page: string): Promise<void> {
|
||||
const allKeys: KvKey[] = [];
|
||||
for (
|
||||
const { key } of await datastore.query({
|
||||
prefix: [pageKey, page],
|
||||
})
|
||||
) {
|
||||
allKeys.push(key);
|
||||
allKeys.push([indexKey, ...key.slice(2), page]);
|
||||
}
|
||||
await datastore.batchDel(allKeys);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the entire datastore for this indexKey plug
|
||||
*/
|
||||
export async function clearIndex(): Promise<void> {
|
||||
const allKeys: KvKey[] = [];
|
||||
for (
|
||||
const { key } of await datastore.query({ prefix: [] })
|
||||
) {
|
||||
allKeys.push(key);
|
||||
}
|
||||
await datastore.batchDel(allKeys);
|
||||
console.log("Deleted", allKeys.length, "keys from the index");
|
||||
}
|
||||
|
||||
// ENTITIES API
|
||||
|
||||
/**
|
||||
* Indexes entities in the data store
|
||||
*/
|
||||
export async function indexObjects<T>(
|
||||
page: string,
|
||||
objects: ObjectValue<T>[],
|
||||
): Promise<void> {
|
||||
const kvs: KV<T>[] = [];
|
||||
const allAttributes = new Map<string, string>(); // tag:name -> attributeType
|
||||
for (const obj of objects) {
|
||||
for (const tag of obj.tags) {
|
||||
kvs.push({
|
||||
key: [tag, cleanKey(obj.ref, page)],
|
||||
value: obj,
|
||||
});
|
||||
// Index attributes
|
||||
if (!builtins[tag]) {
|
||||
// But only for non-builtin tags
|
||||
for (
|
||||
const [attrName, attrValue] of Object.entries(
|
||||
obj as Record<string, any>,
|
||||
)
|
||||
) {
|
||||
if (attrName.startsWith("$")) {
|
||||
continue;
|
||||
}
|
||||
allAttributes.set(`${tag}:${attrName}`, determineType(attrValue));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (allAttributes.size > 0) {
|
||||
await indexObjects<AttributeObject>(
|
||||
page,
|
||||
[...allAttributes].map(([key, value]) => {
|
||||
const [tag, name] = key.split(":");
|
||||
return {
|
||||
ref: key,
|
||||
tags: ["attribute"],
|
||||
tag,
|
||||
name,
|
||||
attributeType: value,
|
||||
page,
|
||||
};
|
||||
}),
|
||||
);
|
||||
}
|
||||
return batchSet(page, kvs);
|
||||
}
|
||||
|
||||
function cleanKey(ref: string, page: string) {
|
||||
if (ref.startsWith(`${page}@`)) {
|
||||
return ref.substring(page.length + 1);
|
||||
} else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
||||
export async function queryObjects<T>(
|
||||
tag: string,
|
||||
query: ObjectQuery,
|
||||
): Promise<ObjectValue<T>[]> {
|
||||
return (await datastore.query({
|
||||
...query,
|
||||
prefix: [indexKey, tag],
|
||||
})).map(({ value }) => value);
|
||||
}
|
||||
|
||||
export async function getObjectByRef<T>(
|
||||
page: string,
|
||||
tag: string,
|
||||
ref: string,
|
||||
): Promise<ObjectValue<T> | undefined> {
|
||||
console.log("Fetching!!!!!", [indexKey, tag, cleanKey(ref, page), page]);
|
||||
return (await datastore.get([indexKey, tag, cleanKey(ref, page), page]));
|
||||
}
|
||||
|
||||
export async function objectSourceProvider({
|
||||
query,
|
||||
}: QueryProviderEvent): Promise<any[]> {
|
||||
const tag = query.querySource!;
|
||||
const results = await datastore.query({
|
||||
...query,
|
||||
prefix: [indexKey, tag],
|
||||
});
|
||||
return results.map((r) => r.value);
|
||||
}
|
||||
|
||||
export async function discoverSources() {
|
||||
return (await datastore.query({ prefix: [indexKey, "tag"] })).map((
|
||||
{ key },
|
||||
) => key[2]);
|
||||
}
|
17
plugs/index/asset/linked_mentions.js
Normal file
17
plugs/index/asset/linked_mentions.js
Normal file
@ -0,0 +1,17 @@
|
||||
function processClick(e) {
|
||||
const dataEl = e.target.closest("[data-ref]");
|
||||
syscall(
|
||||
"system.invokeFunction",
|
||||
"index.navigateToMention",
|
||||
dataEl.getAttribute("data-ref"),
|
||||
).catch(console.error);
|
||||
}
|
||||
|
||||
document.getElementById("link-ul").addEventListener("click", processClick);
|
||||
document.getElementById("hide-button").addEventListener("click", function () {
|
||||
console.log("HERE")
|
||||
syscall(
|
||||
"system.invokeFunction",
|
||||
"index.toggleMentions",
|
||||
).catch(console.error);
|
||||
});
|
25
plugs/index/asset/style.css
Normal file
25
plugs/index/asset/style.css
Normal file
@ -0,0 +1,25 @@
|
||||
body {
|
||||
font-family: var(--ui-font);
|
||||
background-color: var(--root-background-color);
|
||||
color: var(--root-color);
|
||||
overflow: scroll;
|
||||
}
|
||||
|
||||
.sb-line-h2 {
|
||||
border-top-right-radius: 5px;
|
||||
border-top-left-radius: 5px;
|
||||
margin: 0;
|
||||
padding: 10px !important;
|
||||
background-color: rgba(233, 233, 233, 0.5);
|
||||
}
|
||||
|
||||
#hide-button {
|
||||
position: absolute;
|
||||
right: 15px;
|
||||
top: 15px;
|
||||
}
|
||||
|
||||
li code {
|
||||
font-size: 80%;
|
||||
color: #a5a4a4;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user