Fixes #92: implements frontmatter syntax
This commit is contained in:
parent
ce6122458f
commit
7d1a04f392
125
build.ts
125
build.ts
@ -0,0 +1,125 @@
|
|||||||
|
// -- esbuild --
|
||||||
|
// @deno-types="https://deno.land/x/esbuild@v0.14.54/mod.d.ts"
|
||||||
|
import * as esbuildWasm from "https://deno.land/x/esbuild@v0.14.54/wasm.js";
|
||||||
|
import * as esbuildNative from "https://deno.land/x/esbuild@v0.14.54/mod.js";
|
||||||
|
import { denoPlugin } from "https://deno.land/x/esbuild_deno_loader@0.6.0/mod.ts"; //"./esbuild_deno_loader/mod.ts";
|
||||||
|
import { copy } from "https://deno.land/std@0.158.0/fs/copy.ts";
|
||||||
|
|
||||||
|
import sass from "https://deno.land/x/denosass@1.0.4/mod.ts";
|
||||||
|
import { bundleFolder } from "./plugos/asset_bundle/builder.ts";
|
||||||
|
import { patchDenoLibJS } from "./plugos/hack.ts";
|
||||||
|
import { bundle as plugOsBundle } from "./plugos/bin/plugos-bundle.ts";
|
||||||
|
|
||||||
|
import * as flags from "https://deno.land/std@0.158.0/flags/mod.ts";
|
||||||
|
|
||||||
|
// @ts-ignore trust me
|
||||||
|
const esbuild: typeof esbuildWasm = Deno.run === undefined
|
||||||
|
? esbuildWasm
|
||||||
|
: esbuildNative;
|
||||||
|
|
||||||
|
async function prepareAssets(dist: string) {
|
||||||
|
await copy("web/fonts", `${dist}/web`, { overwrite: true });
|
||||||
|
await copy("web/index.html", `${dist}/web/index.html`, {
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
await copy("web/images/favicon.gif", `${dist}/web/favicon.gif`, {
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
await copy("web/images/logo.png", `${dist}/web/logo.png`, {
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
await copy("web/manifest.json", `${dist}/web/manifest.json`, {
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
await copy("server/SETTINGS_template.md", `${dist}/SETTINGS_template.md`, {
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
const compiler = sass(
|
||||||
|
Deno.readTextFileSync("web/styles/main.scss"),
|
||||||
|
{
|
||||||
|
load_paths: ["web/styles"],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
await Deno.writeTextFile(
|
||||||
|
`${dist}/web/main.css`,
|
||||||
|
compiler.to_string("expanded") as string,
|
||||||
|
);
|
||||||
|
const globalManifest = await plugOsBundle(
|
||||||
|
new URL(`./plugs/global.plug.yaml`, import.meta.url).pathname,
|
||||||
|
);
|
||||||
|
await Deno.writeTextFile(
|
||||||
|
`${dist}/web/global.plug.json`,
|
||||||
|
JSON.stringify(globalManifest, null, 2),
|
||||||
|
);
|
||||||
|
|
||||||
|
// HACK: Patch the JS by removing an invalid regex
|
||||||
|
let bundleJs = await Deno.readTextFile(`${dist}/web/client.js`);
|
||||||
|
bundleJs = patchDenoLibJS(bundleJs);
|
||||||
|
await Deno.writeTextFile(`${dist}/web/client.js`, bundleJs);
|
||||||
|
|
||||||
|
await bundleFolder(dist, "dist/asset_bundle.json");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function bundle(watch: boolean): Promise<void> {
|
||||||
|
let building = false;
|
||||||
|
await doBuild();
|
||||||
|
let timer;
|
||||||
|
if (watch) {
|
||||||
|
const watcher = Deno.watchFs(["web", "dist_bundle/_plug"]);
|
||||||
|
for await (const _event of watcher) {
|
||||||
|
if (timer) {
|
||||||
|
clearTimeout(timer);
|
||||||
|
}
|
||||||
|
timer = setTimeout(() => {
|
||||||
|
console.log("Change detected, rebuilding...");
|
||||||
|
doBuild();
|
||||||
|
}, 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doBuild() {
|
||||||
|
if (building) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
building = true;
|
||||||
|
await Promise.all([
|
||||||
|
esbuild.build({
|
||||||
|
entryPoints: {
|
||||||
|
client: "web/boot.ts",
|
||||||
|
service_worker: "web/service_worker.ts",
|
||||||
|
},
|
||||||
|
outdir: "./dist_bundle/web",
|
||||||
|
absWorkingDir: Deno.cwd(),
|
||||||
|
bundle: true,
|
||||||
|
treeShaking: true,
|
||||||
|
sourcemap: "linked",
|
||||||
|
minify: true,
|
||||||
|
jsxFactory: "h",
|
||||||
|
jsx: "automatic",
|
||||||
|
jsxFragment: "Fragment",
|
||||||
|
jsxImportSource: "https://esm.sh/preact@10.11.1",
|
||||||
|
plugins: [
|
||||||
|
denoPlugin({
|
||||||
|
importMapURL: new URL("./import_map.json", import.meta.url),
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
await prepareAssets("dist_bundle");
|
||||||
|
building = false;
|
||||||
|
console.log("Built!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const args = flags.parse(Deno.args, {
|
||||||
|
boolean: ["watch"],
|
||||||
|
alias: { w: "watch" },
|
||||||
|
default: {
|
||||||
|
watch: false,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await bundle(args.watch);
|
||||||
|
if (!args.watch) {
|
||||||
|
esbuild.stop();
|
||||||
|
}
|
@ -27,6 +27,7 @@ export type {
|
|||||||
BlockContext,
|
BlockContext,
|
||||||
LeafBlock,
|
LeafBlock,
|
||||||
LeafBlockParser,
|
LeafBlockParser,
|
||||||
|
Line,
|
||||||
MarkdownConfig,
|
MarkdownConfig,
|
||||||
MarkdownExtension,
|
MarkdownExtension,
|
||||||
} from "@lezer/markdown";
|
} from "@lezer/markdown";
|
||||||
|
@ -2,6 +2,7 @@ import { ParseTree } from "$sb/lib/tree.ts";
|
|||||||
|
|
||||||
import type { SyntaxNode } from "./deps.ts";
|
import type { SyntaxNode } from "./deps.ts";
|
||||||
import type { Language } from "./deps.ts";
|
import type { Language } from "./deps.ts";
|
||||||
|
|
||||||
export function lezerToParseTree(
|
export function lezerToParseTree(
|
||||||
text: string,
|
text: string,
|
||||||
n: SyntaxNode,
|
n: SyntaxNode,
|
||||||
@ -24,10 +25,10 @@ export function lezerToParseTree(
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
} else {
|
} else {
|
||||||
let newChildren: ParseTree[] = [];
|
const newChildren: ParseTree[] = [];
|
||||||
let index = n.from;
|
let index = n.from;
|
||||||
for (let child of children) {
|
for (const child of children) {
|
||||||
let s = text.substring(index, child.from);
|
const s = text.substring(index, child.from);
|
||||||
if (s) {
|
if (s) {
|
||||||
newChildren.push({
|
newChildren.push({
|
||||||
from: index + offset,
|
from: index + offset,
|
||||||
@ -38,14 +39,14 @@ export function lezerToParseTree(
|
|||||||
newChildren.push(child);
|
newChildren.push(child);
|
||||||
index = child.to!;
|
index = child.to!;
|
||||||
}
|
}
|
||||||
let s = text.substring(index, n.to);
|
const s = text.substring(index, n.to);
|
||||||
if (s) {
|
if (s) {
|
||||||
newChildren.push({ from: index + offset, to: n.to + offset, text: s });
|
newChildren.push({ from: index + offset, to: n.to + offset, text: s });
|
||||||
}
|
}
|
||||||
children = newChildren;
|
children = newChildren;
|
||||||
}
|
}
|
||||||
|
|
||||||
let result: ParseTree = {
|
const result: ParseTree = {
|
||||||
type: n.name,
|
type: n.name,
|
||||||
from: n.from + offset,
|
from: n.from + offset,
|
||||||
to: n.to + offset,
|
to: n.to + offset,
|
||||||
@ -60,7 +61,7 @@ export function lezerToParseTree(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function parse(language: Language, text: string): ParseTree {
|
export function parse(language: Language, text: string): ParseTree {
|
||||||
let tree = lezerToParseTree(text, language.parser.parse(text).topNode);
|
const tree = lezerToParseTree(text, language.parser.parse(text).topNode);
|
||||||
// replaceNodesMatching(tree, (n): MarkdownTree | undefined | null => {
|
// replaceNodesMatching(tree, (n): MarkdownTree | undefined | null => {
|
||||||
// if (n.type === "FencedCode") {
|
// if (n.type === "FencedCode") {
|
||||||
// let infoN = findNodeMatching(n, (n) => n.type === "CodeInfo");
|
// let infoN = findNodeMatching(n, (n) => n.type === "CodeInfo");
|
||||||
|
38
common/parser.test.ts
Normal file
38
common/parser.test.ts
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import { parse } from "./parse_tree.ts";
|
||||||
|
import buildMarkdown from "./parser.ts";
|
||||||
|
import { findNodeOfType, renderToText } from "../plug-api/lib/tree.ts";
|
||||||
|
import { assertEquals, assertNotEquals } from "../test_deps.ts";
|
||||||
|
|
||||||
|
const sample1 = `---
|
||||||
|
type: page
|
||||||
|
tags:
|
||||||
|
- hello
|
||||||
|
- world
|
||||||
|
|
||||||
|
---
|
||||||
|
# This is a doc
|
||||||
|
|
||||||
|
Supper`;
|
||||||
|
|
||||||
|
const sampleInvalid1 = `---
|
||||||
|
name: Zef
|
||||||
|
# This is a doc
|
||||||
|
|
||||||
|
Supper`;
|
||||||
|
|
||||||
|
Deno.test("Test parser", () => {
|
||||||
|
const lang = buildMarkdown([]);
|
||||||
|
let tree = parse(
|
||||||
|
lang,
|
||||||
|
sample1,
|
||||||
|
);
|
||||||
|
// Check if rendering back to text works
|
||||||
|
assertEquals(renderToText(tree), sample1);
|
||||||
|
// console.log("tree", JSON.stringify(tree, null, 2));
|
||||||
|
let node = findNodeOfType(tree, "FrontMatter");
|
||||||
|
assertNotEquals(node, undefined);
|
||||||
|
tree = parse(lang, sampleInvalid1);
|
||||||
|
node = findNodeOfType(tree, "FrontMatter");
|
||||||
|
// console.log("Invalid node", node);
|
||||||
|
assertEquals(node, undefined);
|
||||||
|
});
|
@ -1,17 +1,17 @@
|
|||||||
import {
|
import {
|
||||||
BlockContext,
|
BlockContext,
|
||||||
Language,
|
Language,
|
||||||
LanguageDescription,
|
|
||||||
LanguageSupport,
|
|
||||||
LeafBlock,
|
LeafBlock,
|
||||||
LeafBlockParser,
|
LeafBlockParser,
|
||||||
|
Line,
|
||||||
markdown,
|
markdown,
|
||||||
MarkdownConfig,
|
MarkdownConfig,
|
||||||
parseCode,
|
StreamLanguage,
|
||||||
styleTags,
|
styleTags,
|
||||||
Table,
|
Table,
|
||||||
tags as t,
|
tags as t,
|
||||||
TaskList,
|
TaskList,
|
||||||
|
yamlLanguage,
|
||||||
} from "./deps.ts";
|
} from "./deps.ts";
|
||||||
import * as ct from "./customtags.ts";
|
import * as ct from "./customtags.ts";
|
||||||
import {
|
import {
|
||||||
@ -92,7 +92,7 @@ export const Comment: MarkdownConfig = {
|
|||||||
parseBlock: [
|
parseBlock: [
|
||||||
{
|
{
|
||||||
name: "Comment",
|
name: "Comment",
|
||||||
leaf(cx, leaf) {
|
leaf(_cx, leaf) {
|
||||||
return /^%%\s/.test(leaf.content) ? new CommentParser() : null;
|
return /^%%\s/.test(leaf.content) ? new CommentParser() : null;
|
||||||
},
|
},
|
||||||
after: "SetextHeading",
|
after: "SetextHeading",
|
||||||
@ -100,34 +100,80 @@ export const Comment: MarkdownConfig = {
|
|||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// FrontMatter parser
|
||||||
|
|
||||||
|
const lang = StreamLanguage.define(yamlLanguage);
|
||||||
|
|
||||||
|
export const FrontMatter: MarkdownConfig = {
|
||||||
|
defineNodes: [
|
||||||
|
{ name: "FrontMatter", block: true },
|
||||||
|
{ name: "FrontMatterMarker" },
|
||||||
|
{ name: "FrontMatterCode" },
|
||||||
|
],
|
||||||
|
parseBlock: [{
|
||||||
|
name: "FrontMatter",
|
||||||
|
parse: (cx, line: Line) => {
|
||||||
|
if (cx.parsedPos !== 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (line.text !== "---") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const frontStart = cx.parsedPos;
|
||||||
|
const elts = [
|
||||||
|
cx.elt(
|
||||||
|
"FrontMatterMarker",
|
||||||
|
cx.parsedPos,
|
||||||
|
cx.parsedPos + line.text.length + 1,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
cx.nextLine();
|
||||||
|
const startPos = cx.parsedPos;
|
||||||
|
let endPos = startPos;
|
||||||
|
let text = "";
|
||||||
|
let lastPos = cx.parsedPos;
|
||||||
|
do {
|
||||||
|
text += line.text + "\n";
|
||||||
|
endPos += line.text.length + 1;
|
||||||
|
cx.nextLine();
|
||||||
|
if (cx.parsedPos === lastPos) {
|
||||||
|
// End of file, no progress made, there may be a better way to do this but :shrug:
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
lastPos = cx.parsedPos;
|
||||||
|
} while (line.text !== "---");
|
||||||
|
const yamlTree = lang.parser.parse(text);
|
||||||
|
|
||||||
|
elts.push(
|
||||||
|
cx.elt("FrontMatterCode", startPos, endPos, [
|
||||||
|
cx.elt(yamlTree, startPos),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
endPos = cx.parsedPos + line.text.length;
|
||||||
|
elts.push(cx.elt(
|
||||||
|
"FrontMatterMarker",
|
||||||
|
cx.parsedPos,
|
||||||
|
cx.parsedPos + line.text.length,
|
||||||
|
));
|
||||||
|
cx.nextLine();
|
||||||
|
cx.addElement(cx.elt("FrontMatter", frontStart, endPos, elts));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
before: "HorizontalRule",
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
export default function buildMarkdown(mdExtensions: MDExt[]): Language {
|
export default function buildMarkdown(mdExtensions: MDExt[]): Language {
|
||||||
return markdown({
|
return markdown({
|
||||||
extensions: [
|
extensions: [
|
||||||
WikiLink,
|
WikiLink,
|
||||||
|
FrontMatter,
|
||||||
TaskList,
|
TaskList,
|
||||||
Comment,
|
Comment,
|
||||||
Strikethrough,
|
Strikethrough,
|
||||||
Table,
|
Table,
|
||||||
...mdExtensions.map(mdExtensionSyntaxConfig),
|
...mdExtensions.map(mdExtensionSyntaxConfig),
|
||||||
// parseCode({
|
|
||||||
// codeParser: getCodeParser([
|
|
||||||
// LanguageDescription.of({
|
|
||||||
// name: "yaml",
|
|
||||||
// alias: ["meta", "data"],
|
|
||||||
// support: new LanguageSupport(StreamLanguage.define(yaml)),
|
|
||||||
// }),
|
|
||||||
// LanguageDescription.of({
|
|
||||||
// name: "javascript",
|
|
||||||
// alias: ["js"],
|
|
||||||
// support: new LanguageSupport(javascriptLanguage),
|
|
||||||
// }),
|
|
||||||
// LanguageDescription.of({
|
|
||||||
// name: "typescript",
|
|
||||||
// alias: ["ts"],
|
|
||||||
// support: new LanguageSupport(typescriptLanguage),
|
|
||||||
// }),
|
|
||||||
// ]),
|
|
||||||
// }),
|
|
||||||
{
|
{
|
||||||
props: [
|
props: [
|
||||||
styleTags({
|
styleTags({
|
||||||
|
@ -172,12 +172,12 @@ functions:
|
|||||||
insertPageMeta:
|
insertPageMeta:
|
||||||
path: "./template.ts:insertTemplateText"
|
path: "./template.ts:insertTemplateText"
|
||||||
slashCommand:
|
slashCommand:
|
||||||
name: meta
|
name: front-matter
|
||||||
description: Insert a page metadata block
|
description: Insert page front matter
|
||||||
value: |
|
value: |
|
||||||
```meta
|
---
|
||||||
|^|
|
|^|
|
||||||
```
|
---
|
||||||
insertTask:
|
insertTask:
|
||||||
path: "./template.ts:insertTemplateText"
|
path: "./template.ts:insertTemplateText"
|
||||||
slashCommand:
|
slashCommand:
|
||||||
|
@ -8,10 +8,12 @@ import {
|
|||||||
collectNodesOfType,
|
collectNodesOfType,
|
||||||
findNodeOfType,
|
findNodeOfType,
|
||||||
ParseTree,
|
ParseTree,
|
||||||
|
renderToText,
|
||||||
replaceNodesMatching,
|
replaceNodesMatching,
|
||||||
} from "$sb/lib/tree.ts";
|
} from "$sb/lib/tree.ts";
|
||||||
import { applyQuery, removeQueries } from "$sb/lib/query.ts";
|
import { applyQuery, removeQueries } from "$sb/lib/query.ts";
|
||||||
import * as YAML from "yaml";
|
import * as YAML from "yaml";
|
||||||
|
import { text } from "https://esm.sh/v96/@fortawesome/fontawesome-svg-core@1.3.0/X-ZS9AZm9ydGF3ZXNvbWUvZm9udGF3ZXNvbWUtY29tbW9uLXR5cGVz/index.d.ts";
|
||||||
|
|
||||||
export async function indexData({ name, tree }: IndexTreeEvent) {
|
export async function indexData({ name, tree }: IndexTreeEvent) {
|
||||||
const dataObjects: { key: string; value: any }[] = [];
|
const dataObjects: { key: string; value: any }[] = [];
|
||||||
@ -61,11 +63,13 @@ export function extractMeta(
|
|||||||
): any {
|
): any {
|
||||||
let data: any = {};
|
let data: any = {};
|
||||||
addParentPointers(parseTree);
|
addParentPointers(parseTree);
|
||||||
|
|
||||||
replaceNodesMatching(parseTree, (t) => {
|
replaceNodesMatching(parseTree, (t) => {
|
||||||
|
// Find top-level hash tags
|
||||||
if (t.type === "Hashtag") {
|
if (t.type === "Hashtag") {
|
||||||
// Check if if nested directly into a Paragraph
|
// Check if if nested directly into a Paragraph
|
||||||
if (t.parent && t.parent.type === "Paragraph") {
|
if (t.parent && t.parent.type === "Paragraph") {
|
||||||
const tagname = t.children![0].text;
|
const tagname = t.children![0].text!.substring(1);
|
||||||
if (!data.tags) {
|
if (!data.tags) {
|
||||||
data.tags = [];
|
data.tags = [];
|
||||||
}
|
}
|
||||||
@ -75,7 +79,32 @@ export function extractMeta(
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Find a fenced code block
|
// Find FrontMatter and parse it
|
||||||
|
if (t.type === "FrontMatter") {
|
||||||
|
const yamlText = renderToText(t.children![1].children![0]);
|
||||||
|
const parsedData: any = YAML.parse(yamlText);
|
||||||
|
const newData = { ...parsedData };
|
||||||
|
data = { ...data, ...parsedData };
|
||||||
|
if (removeKeys.length > 0) {
|
||||||
|
let removedOne = false;
|
||||||
|
|
||||||
|
for (const key of removeKeys) {
|
||||||
|
if (key in newData) {
|
||||||
|
delete newData[key];
|
||||||
|
removedOne = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (removedOne) {
|
||||||
|
t.children![0].text = YAML.stringify(newData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If nothing is left, let's just delete this whole block
|
||||||
|
if (Object.keys(newData).length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find a fenced code block with `meta` as the language type
|
||||||
if (t.type !== "FencedCode") {
|
if (t.type !== "FencedCode") {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -92,18 +121,26 @@ export function extractMeta(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const codeText = codeTextNode.children![0].text!;
|
const codeText = codeTextNode.children![0].text!;
|
||||||
data = YAML.parse(codeText);
|
const parsedData: any = YAML.parse(codeText);
|
||||||
|
const newData = { ...parsedData };
|
||||||
|
data = { ...data, ...parsedData };
|
||||||
if (removeKeys.length > 0) {
|
if (removeKeys.length > 0) {
|
||||||
const newData = { ...data };
|
let removedOne = false;
|
||||||
for (const key of removeKeys) {
|
for (const key of removeKeys) {
|
||||||
delete newData[key];
|
if (key in newData) {
|
||||||
|
delete newData[key];
|
||||||
|
removedOne = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
codeTextNode.children![0].text = YAML.stringify(newData).trim();
|
if (removedOne) {
|
||||||
// If nothing is left, let's just delete this thing
|
codeTextNode.children![0].text = YAML.stringify(newData).trim();
|
||||||
if (Object.keys(newData).length === 0) {
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// If nothing is left, let's just delete this whole block
|
||||||
|
if (Object.keys(newData).length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -127,6 +127,7 @@ export async function updateMaterializedQueriesOnPage(
|
|||||||
let newText = await updateTemplateInstantiations(text, pageName);
|
let newText = await updateTemplateInstantiations(text, pageName);
|
||||||
const tree = await markdown.parseMarkdown(newText);
|
const tree = await markdown.parseMarkdown(newText);
|
||||||
const metaData = extractMeta(tree, ["$disableDirectives"]);
|
const metaData = extractMeta(tree, ["$disableDirectives"]);
|
||||||
|
console.log("Meta data", pageName, metaData);
|
||||||
if (metaData.$disableDirectives) {
|
if (metaData.$disableDirectives) {
|
||||||
console.log("Directives disabled, skipping");
|
console.log("Directives disabled, skipping");
|
||||||
return false;
|
return false;
|
||||||
|
@ -441,6 +441,7 @@ export class Editor {
|
|||||||
{ selector: "BulletList", class: "sb-line-ul" },
|
{ selector: "BulletList", class: "sb-line-ul" },
|
||||||
{ selector: "OrderedList", class: "sb-line-ol" },
|
{ selector: "OrderedList", class: "sb-line-ol" },
|
||||||
{ selector: "TableHeader", class: "sb-line-tbl-header" },
|
{ selector: "TableHeader", class: "sb-line-tbl-header" },
|
||||||
|
{ selector: "FrontMatter", class: "sb-frontmatter" },
|
||||||
]),
|
]),
|
||||||
keymap.of([
|
keymap.of([
|
||||||
...smartQuoteKeymap,
|
...smartQuoteKeymap,
|
||||||
|
@ -17,23 +17,23 @@ interface WrapElement {
|
|||||||
|
|
||||||
function wrapLines(view: EditorView, wrapElements: WrapElement[]) {
|
function wrapLines(view: EditorView, wrapElements: WrapElement[]) {
|
||||||
let widgets: Range<Decoration>[] = [];
|
let widgets: Range<Decoration>[] = [];
|
||||||
let elementStack: string[] = [];
|
const elementStack: string[] = [];
|
||||||
const doc = view.state.doc;
|
const doc = view.state.doc;
|
||||||
// Disabling the visible ranges for now, because it may be a bit buggy.
|
// Disabling the visible ranges for now, because it may be a bit buggy.
|
||||||
// RISK: this may actually become slow for large documents.
|
// RISK: this may actually become slow for large documents.
|
||||||
for (let { from, to } of view.visibleRanges) {
|
for (const { from, to } of view.visibleRanges) {
|
||||||
syntaxTree(view.state).iterate({
|
syntaxTree(view.state).iterate({
|
||||||
from,
|
from,
|
||||||
to,
|
to,
|
||||||
enter: ({ type, from, to }) => {
|
enter: ({ type, from, to }) => {
|
||||||
for (let wrapElement of wrapElements) {
|
for (const wrapElement of wrapElements) {
|
||||||
if (type.name == wrapElement.selector) {
|
if (type.name == wrapElement.selector) {
|
||||||
if (wrapElement.nesting) {
|
if (wrapElement.nesting) {
|
||||||
elementStack.push(type.name);
|
elementStack.push(type.name);
|
||||||
}
|
}
|
||||||
const bodyText = doc.sliceString(from, to);
|
const bodyText = doc.sliceString(from, to);
|
||||||
let idx = from;
|
let idx = from;
|
||||||
for (let line of bodyText.split("\n")) {
|
for (const line of bodyText.split("\n")) {
|
||||||
let cls = wrapElement.class;
|
let cls = wrapElement.class;
|
||||||
if (wrapElement.nesting) {
|
if (wrapElement.nesting) {
|
||||||
cls = `${cls} ${cls}-${elementStack.length}`;
|
cls = `${cls} ${cls}-${elementStack.length}`;
|
||||||
@ -49,7 +49,7 @@ function wrapLines(view: EditorView, wrapElements: WrapElement[]) {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
leave({ type }) {
|
leave({ type }) {
|
||||||
for (let wrapElement of wrapElements) {
|
for (const wrapElement of wrapElements) {
|
||||||
if (type.name == wrapElement.selector && wrapElement.nesting) {
|
if (type.name == wrapElement.selector && wrapElement.nesting) {
|
||||||
elementStack.pop();
|
elementStack.pop();
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,12 @@
|
|||||||
import { KeyBinding } from "./deps.ts";
|
import { KeyBinding } from "./deps.ts";
|
||||||
import { syntaxTree } from "../common/deps.ts";
|
import { syntaxTree } from "../common/deps.ts";
|
||||||
|
|
||||||
const straightQuoteContexts = ["CommentBlock", "FencedCode", "InlineCode"];
|
const straightQuoteContexts = [
|
||||||
|
"CommentBlock",
|
||||||
|
"FencedCode",
|
||||||
|
"InlineCode",
|
||||||
|
"FrontMatterCode",
|
||||||
|
];
|
||||||
|
|
||||||
// TODO: Add support for selection (put quotes around or create blockquote block?)
|
// TODO: Add support for selection (put quotes around or create blockquote block?)
|
||||||
function keyBindingForQuote(
|
function keyBindingForQuote(
|
||||||
@ -12,8 +17,8 @@ function keyBindingForQuote(
|
|||||||
return {
|
return {
|
||||||
key: quote,
|
key: quote,
|
||||||
run: (target): boolean => {
|
run: (target): boolean => {
|
||||||
let cursorPos = target.state.selection.main.from;
|
const cursorPos = target.state.selection.main.from;
|
||||||
let chBefore = target.state.sliceDoc(cursorPos - 1, cursorPos);
|
const chBefore = target.state.sliceDoc(cursorPos - 1, cursorPos);
|
||||||
|
|
||||||
// Figure out the context, if in some sort of code/comment fragment don't be smart
|
// Figure out the context, if in some sort of code/comment fragment don't be smart
|
||||||
let node = syntaxTree(target.state).resolveInner(cursorPos);
|
let node = syntaxTree(target.state).resolveInner(cursorPos);
|
||||||
|
@ -269,6 +269,11 @@
|
|||||||
padding-left: 2ch;
|
padding-left: 2ch;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.sb-frontmatter {
|
||||||
|
background-color: rgba(255, 246, 189, 0.5);
|
||||||
|
color: #676767;
|
||||||
|
}
|
||||||
|
|
||||||
.sb-emphasis {
|
.sb-emphasis {
|
||||||
font-style: italic;
|
font-style: italic;
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,12 @@ release.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## 0.1.3
|
||||||
|
* Frontmatter support! You can now use front matter in your markdown, to do this start your page with `---` and end it with `---`. This will now be the preferred way to define page meta data (although the old way will still work). The old `/meta` slash command has now been replaced with `/front-matter`.
|
||||||
|
* Tags are now indexed as page meta without the prefixing `#` character, the reason is to make this compatible with Obsidian. You can now attach tags to your page either by just using a `#tag` at the top level of your page, or by adding a `tags` attribute to your front matter.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## 0.1.2
|
## 0.1.2
|
||||||
|
|
||||||
- Breaking plugs API change: `readPage`, `readAttachment`, `readFile` now return
|
- Breaking plugs API change: `readPage`, `readAttachment`, `readFile` now return
|
||||||
|
Loading…
Reference in New Issue
Block a user