From 7b7f772d07778084e937d44f777c86fe40a536ba Mon Sep 17 00:00:00 2001
From: chris <1010084+cloverich@users.noreply.github.com>
Date: Sun, 22 Dec 2024 07:33:51 -0800
Subject: [PATCH] persist and maintain front matter (#283)
import and track front matter; misc minor fixes and refactors
- on import, track front matter in staging table if parsed
- work core Chronicles metadata (tags, title, dates) into front matter
- maintain front matter in documents (keep existing, merge Chronicles properties)
- refactor: migrate rest of client.documents to knex
- refactor: parse and stringify frontmatter tags as valid yaml
- add micromark front matter libraries and prefer them when parsing / serializing
- fix: parse and serialize tags as valid yaml array
- fix: When document not found, display error and fix back button (no-op before)
- fix: Correctly set default journal on sync, and add an additional fallback
- fix: CACHE_DIR / DATABSE_URL references
---
package.json | 2 +
src/electron/migrations/20211005142122.sql | 1 +
src/electron/userFilesInit.js | 1 -
src/markdown/index.test.ts | 41 +++
src/markdown/index.ts | 16 +-
src/preload/client/documents.ts | 276 ++++++++++---------
src/preload/client/importer.ts | 42 +--
src/preload/client/importer/frontmatter.ts | 134 +++++----
src/preload/client/importer/importer.test.ts | 4 +-
src/preload/client/index.ts | 4 +
src/preload/client/preferences.ts | 4 +-
src/preload/client/sync.ts | 40 +--
src/preload/client/types.ts | 31 ++-
src/views/create/index.tsx | 17 +-
src/views/documents/SearchStore.ts | 17 +-
src/views/edit/EditableDocument.ts | 110 ++++----
src/views/edit/loading.tsx | 6 +-
src/views/edit/useEditableDocument.ts | 9 +-
src/views/preferences/index.tsx | 2 +-
yarn.lock | 34 +++
20 files changed, 471 insertions(+), 320 deletions(-)
diff --git a/package.json b/package.json
index 218e6ac..61f54f8 100644
--- a/package.json
+++ b/package.json
@@ -58,8 +58,10 @@
"lucide-react": "^0.314.0",
"luxon": "^2.4.0",
"mdast-util-from-markdown": "^2.0.2",
+ "mdast-util-frontmatter": "^2.0.1",
"mdast-util-gfm": "^3.0.0",
"mdast-util-to-markdown": "^2.1.2",
+ "micromark-extension-frontmatter": "^2.0.0",
"micromark-extension-gfm": "^3.0.0",
"mobx": "^5.15.4",
"mobx-react-lite": "^2.0.7",
diff --git a/src/electron/migrations/20211005142122.sql b/src/electron/migrations/20211005142122.sql
index 22fa2a6..b4cf801 100644
--- a/src/electron/migrations/20211005142122.sql
+++ b/src/electron/migrations/20211005142122.sql
@@ -26,6 +26,7 @@ CREATE TABLE IF NOT EXISTS "documents" (
"title" TEXT,
"content" TEXT NOT NULL,
"journal" TEXT NOT NULL,
+ "frontmatter" TEXT NOT NULL,
FOREIGN KEY ("journal") REFERENCES "journals" ("name") ON DELETE CASCADE ON UPDATE CASCADE
);
diff --git a/src/electron/userFilesInit.js b/src/electron/userFilesInit.js
index 38f5bd4..c038cb1 100644
--- a/src/electron/userFilesInit.js
+++ b/src/electron/userFilesInit.js
@@ -13,7 +13,6 @@ const { ensureDir } = require("./ensureDir");
*/
exports.initUserFilesDir = (userDataDir) => {
initDir("NOTES_DIR", path.join(userDataDir, "/notes"));
- initDir("CACHE_DIR", userDataDir);
initDir("SETTINGS_DIR", userDataDir);
};
diff --git a/src/markdown/index.test.ts b/src/markdown/index.test.ts
index 995a77b..2fbda33 100644
--- a/src/markdown/index.test.ts
+++ b/src/markdown/index.test.ts
@@ -2,6 +2,7 @@ import { expect } from "chai";
import fs from "fs";
import { describe, it } from "mocha";
import path from "path";
+import yaml from "yaml";
import { slateToString, stringToSlate } from "./index.js";
import { dig, parseMarkdown, parseMarkdownForImport } from "./test-utils.js";
@@ -616,3 +617,43 @@ describe("Whacky shit", function () {
****[5 variations of Binary search (A Self Note)](https://leetcode.com/discuss/interview-question/1322500/5-variations-of-Binary-search-(A-Self-Note))****`;
});
+
+describe("front matter parsing", function () {
+ const content = `---
+title: 2024-09-29
+tags: weekly-todo
+createdAt: 2024-09-30T17:50:22.000Z
+updatedAt: 2024-11-04T16:24:11.000Z
+---
+
+#weekly-todo
+
+Last week: [2024-09-22](../work/0193acd4fa3574698c36c4514b907c70.md)
+
+**I am on call this week** [On call week of 2024-09-30](../persona/0193acd4fa45731f81350d4443c1ed16.md)
+
+## Monday
+
+`;
+
+ // A very basic "it works" test
+ // todo: End to end test with a real document, asserting against the database values
+ it("parses front matter as an mdast node, and can be parsed with yaml.parse", function () {
+ const parsed = parseMarkdown(content);
+ expect(parsed.children[0].type).to.equal("yaml");
+ expect(parsed.children[0].value).to.equal(
+ "title: 2024-09-29\n" +
+ "tags: weekly-todo\n" +
+ "createdAt: 2024-09-30T17:50:22.000Z\n" +
+ "updatedAt: 2024-11-04T16:24:11.000Z",
+ );
+
+ const frontMatter = yaml.parse(parsed.children[0].value as string);
+ expect(frontMatter).to.deep.equal({
+ title: "2024-09-29",
+ tags: "weekly-todo",
+ createdAt: "2024-09-30T17:50:22.000Z",
+ updatedAt: "2024-11-04T16:24:11.000Z",
+ });
+ });
+});
diff --git a/src/markdown/index.ts b/src/markdown/index.ts
index a549e43..02a5887 100644
--- a/src/markdown/index.ts
+++ b/src/markdown/index.ts
@@ -5,8 +5,13 @@ import * as mdast from "mdast";
export { slateToMdast } from "./remark-slate-transformer/transformers/slate-to-mdast.js";
import { fromMarkdown } from "mdast-util-from-markdown";
+import {
+ frontmatterFromMarkdown,
+ frontmatterToMarkdown,
+} from "mdast-util-frontmatter";
import { gfmFromMarkdown, gfmToMarkdown } from "mdast-util-gfm";
import { toMarkdown } from "mdast-util-to-markdown";
+import { frontmatter } from "micromark-extension-frontmatter";
import { gfm } from "micromark-extension-gfm";
import { ofmTagFromMarkdown } from "./mdast-util-ofm-tag";
import { ofmWikilinkFromMarkdown } from "./mdast-util-ofm-wikilink";
@@ -53,25 +58,28 @@ function wrapImages(tree: mdast.Root) {
// to Chronicles tags and markdown links. Future versions may support these properly.
export const parseMarkdownForImport = (markdown: string): mdast.Root => {
return fromMarkdown(markdown, {
- extensions: [gfm(), ofmTag(), ofmWikilink()],
+ extensions: [gfm(), ofmTag(), ofmWikilink(), frontmatter(["yaml"])],
mdastExtensions: [
gfmFromMarkdown(),
ofmTagFromMarkdown(),
ofmWikilinkFromMarkdown(),
+ // https://github.com/micromark/micromark-extension-frontmatter?tab=readme-ov-file#preset
+ // todo: support toml (need toml parser)
+ frontmatterFromMarkdown(["yaml"]),
],
});
};
export const parseMarkdown = (markdown: string): mdast.Root => {
return fromMarkdown(markdown, {
- extensions: [gfm()],
- mdastExtensions: [gfmFromMarkdown()],
+ extensions: [gfm(), frontmatter(["yaml"])],
+ mdastExtensions: [gfmFromMarkdown(), frontmatterFromMarkdown(["yaml"])],
});
};
export const mdastToString = (tree: mdast.Nodes) => {
return toMarkdown(tree, {
- extensions: [gfmToMarkdown() as any],
+ extensions: [gfmToMarkdown() as any, frontmatterToMarkdown(["yaml"])],
bullet: "-",
emphasis: "_",
});
diff --git a/src/preload/client/documents.ts b/src/preload/client/documents.ts
index 766521d..769a1b2 100644
--- a/src/preload/client/documents.ts
+++ b/src/preload/client/documents.ts
@@ -1,7 +1,9 @@
import { Database } from "better-sqlite3";
+import fs from "fs";
import { Knex } from "knex";
import path from "path";
import { uuidv7obj } from "uuidv7";
+import yaml from "yaml";
import { mdastToString, parseMarkdown, selectNoteLinks } from "../../markdown";
import { parseNoteLink } from "../../views/edit/editor/features/note-linking/toMdast";
import { Files } from "../files";
@@ -10,13 +12,26 @@ import { parseChroniclesFrontMatter } from "./importer/frontmatter";
import { IPreferencesClient } from "./preferences";
import {
+ CreateRequest,
GetDocumentResponse,
- SaveRequest,
+ IndexRequest,
SearchItem,
SearchRequest,
SearchResponse,
+ UpdateRequest,
} from "./types";
+// document as it appears in the database
+interface DocumentDb {
+ id: string;
+ journal: string;
+ title?: string;
+ content: string;
+ frontMatter: string;
+ createdAt: string;
+ updatedAt: string;
+}
+
// table structure of document_links
interface DocumentLinkDb {
documentId: string;
@@ -36,13 +51,14 @@ export class DocumentsClient {
) {}
findById = async ({ id }: { id: string }): Promise => {
- const document = this.db
- .prepare(`SELECT * FROM documents WHERE id = :id`)
- .get({ id });
- const documentTags = this.db
- .prepare(`SELECT tag FROM document_tags WHERE documentId = :documentId`)
- .all({ documentId: id })
- .map((row) => row.tag);
+ const document = await this.knex("documents")
+ .where({ id })
+ .first();
+
+ // todo: test 404 behavior
+ if (!document) {
+ throw new Error(`Document ${id} not found`);
+ }
const filepath = path.join(
await this.preferences.get("NOTES_DIR"),
@@ -53,12 +69,23 @@ export class DocumentsClient {
// freshly load the document from disk to avoid desync issues
// todo: a real strategy for keeping db in sync w/ filesystem, that allows
// loading from db.
- const { contents } = await this.loadDoc(filepath);
+ const { contents, frontMatter } = await this.loadDoc(filepath);
+
+ // todo: Are the dates ever null at this point?
+ frontMatter.createdAt = frontMatter.createdAt || document.createdAt;
+ frontMatter.updatedAt = frontMatter.updatedAt || document.updatedAt;
+
+ // todo: parseChroniclesFrontMatter _should_ migrate my old tags to the new format...
+ // the old code would splice in documentTags at this point...
+ // const documentTags = await this.knex("document_tags")
+ // .where({ documentId: id })
+ // .select("tag");
+ // frontMatter.tags = frontMatter.tags || documentTags.map((t) => t.tag);
return {
...document,
- contents,
- tags: documentTags,
+ frontMatter,
+ content: contents,
};
};
@@ -68,18 +95,19 @@ export class DocumentsClient {
// const rootDir = await this.preferences.get("NOTES_DIR");
// todo: sha comparison
const contents = await Files.read(path);
- const { frontMatter, body } = parseChroniclesFrontMatter(contents);
+ const stats = await fs.promises.stat(path);
+ const { frontMatter, body } = parseChroniclesFrontMatter(contents, stats);
return { contents: body, frontMatter };
};
del = async (id: string, journal: string) => {
await this.files.deleteDocument(id, journal);
- this.db.prepare("delete from documents where id = :id").run({ id });
+ await this.knex("documents").where({ id }).del();
};
search = async (q?: SearchRequest): Promise => {
- let query = this.knex("documents");
+ let query = this.knex("documents");
// filter by journal
if (q?.journals?.length) {
@@ -133,46 +161,16 @@ export class DocumentsClient {
return { data: [] };
};
- /**
- * Create or update a document and its tags
- *
- * todo: test; for tags: test prefix is removed, spaces are _, lowercased, max length
- * todo: test description max length
- *
- * @returns - The document as it exists after the save
- */
- save = async (args: SaveRequest): Promise => {
- // de-dupe tags -- should happen before getting here.
- args.tags = Array.from(new Set(args.tags));
- let id;
-
- args.title = args.title;
- args.updatedAt = args.updatedAt || new Date().toISOString();
-
- if (args.id) {
- this.updateDocument(args);
- id = args.id;
- } else {
- args.createdAt = new Date().toISOString();
- args.updatedAt = new Date().toISOString();
- [id] = await this.createDocument(args);
- }
-
- return this.findById({ id });
- };
+ // Extend front-matter (if any) with Chronicles standard properties, then
+ // add to serialized document contents.
+ private prependFrontMatter = (
+ contents: string,
+ frontMatter: Record,
+ ) => {
+ // need to re-add ---, and also double-newline the ending frontmatter
+ const fm = ["---", yaml.stringify(frontMatter), "---"].join("\n");
- /**
- * Convert the properties we track to frontmatter
- */
- contentsWithFrontMatter = (document: SaveRequest) => {
- const fm = `---
-title: ${document.title}
-tags: ${document.tags.join(", ")}
-createdAt: ${document.createdAt}
-updatedAt: ${document.updatedAt}
----`;
-
- return `${fm}\n\n${document.content}`;
+ return `${fm}\n\n${contents}`;
};
/**
@@ -181,42 +179,76 @@ updatedAt: ${document.updatedAt}
* @param index - Whether to index the document - set to false when importing (we import, then call `sync` instead)
*/
createDocument = async (
- args: SaveRequest,
+ args: CreateRequest,
index: boolean = true,
): Promise<[string, string]> => {
const id = args.id || uuidv7obj().toHex();
- const content = this.contentsWithFrontMatter(args);
+ args.frontMatter.tags = Array.from(new Set(args.frontMatter.tags));
+ args.frontMatter.createdAt =
+ args.frontMatter.createdAt || new Date().toISOString();
+ args.frontMatter.updatedAt =
+ args.frontMatter.updatedAt || new Date().toISOString();
+
+ const content = this.prependFrontMatter(args.content, args.frontMatter);
const docPath = await this.files.uploadDocument(
{ id, content },
args.journal,
);
if (index) {
- return [await this.createIndex({ id, ...args }), docPath];
+ return [
+ await this.createIndex({
+ id,
+ journal: args.journal,
+ content,
+ frontMatter: args.frontMatter,
+ }),
+ docPath,
+ ];
} else {
return [id, docPath];
}
};
- private updateDocument = async (args: SaveRequest): Promise => {
- const content = this.contentsWithFrontMatter(args);
+ updateDocument = async (args: UpdateRequest): Promise => {
+ if (!args.id) throw new Error("id required to update document");
+
+ args.frontMatter.tags = Array.from(new Set(args.frontMatter.tags));
+ // todo: I think we accept this from the client now and just expect
+ // callers to update updatedAt, to support importers and sync manually configuring
+ // this...
+ args.frontMatter.updatedAt =
+ args.frontMatter.updatedAt || new Date().toISOString();
- const origDoc = await this.findById({ id: args.id! });
- await this.files.uploadDocument({ id: args.id!, content }, args.journal);
+ const content = this.prependFrontMatter(args.content, args.frontMatter);
- // sigh; this is a bit of a mess
+ const origDoc = await this.findById({ id: args.id });
+ await this.files.uploadDocument({ id: args.id, content }, args.journal);
+
+ // sigh; this is a bit of a mess.
if (origDoc.journal !== args.journal) {
+ // delete the original markdown file, in the old journal
// no await, optimistic delete
this.files.deleteDocument(args.id!, origDoc.journal);
+ // update any markdown files which had links pointing to the old journal
+ // only necessary because we use markdown links, i.e. ..//.md
this.updateDependentLinks([args.id!], args.journal);
}
- return await this.updateIndex(args);
+ await this.updateIndex({
+ id: args.id,
+ content,
+ journal: args.journal,
+ frontMatter: args.frontMatter,
+ });
};
// todo: also need to update dependent title, if the title of the original note
// changes...again wikilinks simplify this.
- updateDependentLinks = async (documentIds: string[], journal: string) => {
+ private updateDependentLinks = async (
+ documentIds: string[],
+ journal: string,
+ ) => {
for (const targetId of documentIds) {
const links = await this.knex("document_links").where({
targetId,
@@ -224,7 +256,11 @@ updatedAt: ${document.updatedAt}
for (const link of links) {
const dependentNote = await this.findById({ id: link.documentId });
- console.log("udating links for", dependentNote.title, dependentNote.id);
+ console.log(
+ "udating links for",
+ dependentNote.frontMatter.title,
+ dependentNote.id,
+ );
const mdast = parseMarkdown(dependentNote.content);
const noteLinks = selectNoteLinks(mdast);
@@ -240,7 +276,7 @@ updatedAt: ${document.updatedAt}
}
});
- await this.save({
+ await this.updateDocument({
...dependentNote,
content: mdastToString(mdast),
});
@@ -250,89 +286,71 @@ updatedAt: ${document.updatedAt}
createIndex = async ({
id,
- createdAt,
- updatedAt,
journal,
content,
- title,
- tags,
- }: SaveRequest): Promise => {
+ frontMatter,
+ }: IndexRequest): Promise => {
if (!id) {
throw new Error("id required to create document index");
}
- return this.db.transaction(async () => {
- this.db
- .prepare(
- `INSERT INTO documents (id, journal, content, title, createdAt, updatedAt) VALUES (:id, :journal, :content, :title, :createdAt, :updatedAt)`,
- )
- .run({
- id,
- journal,
- content,
- title,
- // allow passing createdAt to support backfilling prior notes
- createdAt: createdAt || new Date().toISOString(),
- updatedAt: updatedAt || new Date().toISOString(),
- });
+ return this.knex.transaction(async (trx) => {
+ await trx("documents").insert({
+ id,
+ journal,
+ content,
+ title: frontMatter.title,
+ createdAt: frontMatter.createdAt,
+ updatedAt: frontMatter.updatedAt,
+ frontMatter: JSON.stringify(frontMatter || {}),
+ });
- if (tags.length > 0) {
- this.db
- .prepare(
- `INSERT INTO document_tags (documentId, tag) VALUES ${tags.map((tag) => `(:documentId, '${tag}')`).join(", ")}`,
- )
- .run({ documentId: id });
+ if (frontMatter.tags.length > 0) {
+ await trx("document_tags").insert(
+ frontMatter.tags.map((tag: string) => ({ documentId: id, tag })),
+ );
}
- await this.addNoteLinks(id, content);
+ await this.addNoteLinks(trx, id, content);
return id;
- })();
+ });
};
updateIndex = async ({
id,
- createdAt,
- updatedAt,
journal,
content,
- title,
- tags,
- }: SaveRequest): Promise => {
- return this.db.transaction(async () => {
- this.db
- .prepare(
- `UPDATE documents SET journal=:journal, content=:content, title=:title, updatedAt=:updatedAt, createdAt=:createdAt WHERE id=:id`,
- )
- .run({
- id,
+ frontMatter,
+ }: IndexRequest): Promise => {
+ return this.knex.transaction(async (trx) => {
+ await trx("documents")
+ .update({
content,
- title,
+ title: frontMatter.title,
journal,
- updatedAt: updatedAt || new Date().toISOString(),
- createdAt,
- });
-
- // re-create tags to avoid diffing
- this.db
- .prepare(`DELETE FROM document_tags WHERE documentId = :documentId`)
- .run({ documentId: id });
-
- if (tags.length > 0) {
- this.db
- .prepare(
- `INSERT INTO document_tags (documentId, tag) VALUES ${tags.map((tag) => `(:documentId, '${tag}')`).join(", ")}`,
- )
- .run({ documentId: id });
+ updatedAt: frontMatter.updatedAt,
+ frontMatter: JSON.stringify(frontMatter),
+ })
+ .where({ id });
+
+ await trx("document_tags").where({ documentId: id }).del();
+ if (frontMatter.tags.length > 0) {
+ await trx("document_tags").insert(
+ frontMatter.tags.map((tag: string) => ({ documentId: id, tag })),
+ );
}
- // re-create note links to avoid diffing
- await this.knex("document_links").where({ documentId: id }).del();
- await this.addNoteLinks(id!, content);
- })();
+ await trx("document_links").where({ documentId: id }).del();
+ await this.addNoteLinks(trx, id!, content);
+ });
};
- private addNoteLinks = async (documentId: string, content: string) => {
+ private addNoteLinks = async (
+ trx: Knex.Transaction,
+ documentId: string,
+ content: string,
+ ) => {
const mdast = parseMarkdown(content);
const noteLinks = selectNoteLinks(mdast)
.map((link) => parseNoteLink(link.url))
@@ -350,7 +368,7 @@ updatedAt: ${document.updatedAt}
});
if (noteLinks.length > 0) {
- await this.knex("document_links").insert(
+ await trx("document_links").insert(
noteLinksUnique.map((link) => ({
documentId,
targetId: link.noteId,
@@ -363,10 +381,8 @@ updatedAt: ${document.updatedAt}
/**
* When removing a journal, call this to de-index all documents from that journal.
*/
- deindexJournal = (journal: string): void => {
- this.db
- .prepare("DELETE FROM documents WHERE journal = :journal")
- .run({ journal });
+ deindexJournal = (journal: string): Promise => {
+ return this.knex("documents").where({ journal }).del();
};
/**
diff --git a/src/preload/client/importer.ts b/src/preload/client/importer.ts
index b5cf66d..815be7f 100644
--- a/src/preload/client/importer.ts
+++ b/src/preload/client/importer.ts
@@ -11,7 +11,7 @@ import {
} from "./journals";
import { IPreferencesClient } from "./preferences";
import { ISyncClient } from "./sync";
-import { SKIPPABLE_FILES, SKIPPABLE_PREFIXES } from "./types";
+import { FrontMatter, SKIPPABLE_FILES, SKIPPABLE_PREFIXES } from "./types";
import * as mdast from "mdast";
@@ -69,9 +69,8 @@ interface StagedNote {
// may be empty if could not parse body
// correctly
journal: string;
- title: string;
content: string;
- frontMatter: string; // json
+ frontMatter: string; // FrontMatter (json)
// Where this note will end up
chroniclesId: string;
@@ -211,7 +210,7 @@ export class ImporterClient {
try {
// todo: fallback title to filename - uuid
- const { frontMatter, body, title } = parseTitleAndFrontMatter(
+ const { frontMatter, body } = parseTitleAndFrontMatter(
contents,
name,
sourceType,
@@ -222,7 +221,7 @@ export class ImporterClient {
importDir,
journals,
sourceType,
- // See notes in inferOrGenerateJournalName; this is a very specific
+ // See notes in inferOrGenerateJournalName; this is very specific
// to my Notion export.
frontMatter.Category,
);
@@ -234,14 +233,19 @@ export class ImporterClient {
// 2. Whether to use birthtime or mtime
// 3. Which timezone to use
// 4. Whether to use the front-matter date or the file date
- if (!frontMatter.createdAt) {
- frontMatter.createdAt =
- file.stats.birthtime.toISOString() || file.stats.mtime.toISOString();
- }
-
- if (!frontMatter.updatedAt) {
- frontMatter.updatedAt = file.stats.mtime.toISOString();
- }
+ const requiredFm: FrontMatter = {
+ ...frontMatter,
+ tags: frontMatter.tags || [],
+ createdAt:
+ frontMatter.createdAt ||
+ file.stats.birthtime.toISOString() ||
+ file.stats.mtime.toISOString() ||
+ new Date().toISOString(),
+ updatedAt:
+ frontMatter.updatedAt ||
+ file.stats.mtime.toISOString() ||
+ new Date().toISOString(),
+ };
// todo: handle additional kinds of frontMatter; just add a column for them
// and ensure they are not overwritten when editing existing files
@@ -253,10 +257,9 @@ export class ImporterClient {
// hmm... what am I going to do with this? Should it be absolute to NOTES_DIR?
chroniclesPath: `${path.join(journalName, chroniclesId)}.md`,
sourcePath: file.path,
- title,
content: body,
journal: journalName,
- frontMatter: JSON.stringify(frontMatter),
+ frontMatter: JSON.stringify(requiredFm),
status: "pending",
};
@@ -300,7 +303,7 @@ export class ImporterClient {
});
for await (const item of items) {
- const frontMatter = JSON.parse(item.frontMatter);
+ const frontMatter: FrontMatter = JSON.parse(item.frontMatter);
const mdast = stringToMdast(item.content) as any as mdast.Root;
await this.updateNoteLinks(mdast, item, linkMapping, wikiLinkMapping);
@@ -313,7 +316,7 @@ export class ImporterClient {
this.convertWikiLinks(mdast);
- // process tags into front matter
+ // process inline tags into front matter
frontMatter.tags = Array.from(
new Set(this.processAndConvertTags(mdast, frontMatter.tags || [])),
);
@@ -325,10 +328,7 @@ export class ImporterClient {
id: item.chroniclesId,
journal: item.journal, // using name as id
content: mdastToString(mdast),
- title: item.title,
- tags: frontMatter.tags || [],
- createdAt: frontMatter.createdAt,
- updatedAt: frontMatter.updatedAt,
+ frontMatter,
},
false, // don't index; we'll call sync after import
);
diff --git a/src/preload/client/importer/frontmatter.ts b/src/preload/client/importer/frontmatter.ts
index b5afe73..0d8b6ac 100644
--- a/src/preload/client/importer/frontmatter.ts
+++ b/src/preload/client/importer/frontmatter.ts
@@ -1,9 +1,15 @@
+import { Stats } from "fs";
import yaml from "yaml";
+import {
+ mdastToString,
+ parseMarkdown,
+ parseMarkdownForImport,
+} from "../../../markdown";
import { SourceType } from "../importer/SourceType";
+import { FrontMatter } from "../types";
interface ParseTitleAndFrontMatterRes {
- title: string;
- frontMatter: Record;
+ frontMatter: Partial;
body: string;
}
@@ -23,15 +29,76 @@ export const parseTitleAndFrontMatter = (
if (sourceType === "notion") {
return parseTitleAndFrontMatterNotion(contents);
} else {
- // Otherwise for other import types, for now, make no attempt at finding
- // or parsing front matter.
+ return parseTitleAndFrontMatterMarkdown(contents, filename);
+ }
+};
+
+function parseTitleAndFrontMatterMarkdown(
+ contents: string,
+ filename: string,
+): ParseTitleAndFrontMatterRes {
+ const { frontMatter, body } = extractFronMatter(
+ contents,
+ parseMarkdownForImport,
+ );
+
+ frontMatter.title = frontMatter.title || filename;
+ return {
+ frontMatter,
+ body,
+ };
+}
+
+function extractFronMatter(
+ contents: string,
+ parse = parseMarkdown,
+): {
+ frontMatter: Partial;
+ body: string;
+} {
+ const mdast = parse(contents);
+ if (mdast.children[0].type === "yaml") {
+ const frontMatter = yaml.parse(mdast.children[0].value);
+ mdast.children = mdast.children.slice(1);
+ const contents = mdastToString(mdast);
+ return {
+ frontMatter,
+ body: contents,
+ };
+ } else {
return {
- title: filename,
frontMatter: {},
body: contents,
};
}
-};
+}
+
+// extract well formatted front matter from content, and return the front matter and body
+// stats to set defaults and ensure dates are always present
+export function parseChroniclesFrontMatter(content: string, stats: Stats) {
+ const { frontMatter, body } = extractFronMatter(content);
+
+ frontMatter.tags = frontMatter.tags || [];
+ frontMatter.title = frontMatter.title;
+ frontMatter.createdAt =
+ frontMatter.createdAt || (stats.birthtime || stats.mtime).toISOString();
+ frontMatter.updatedAt = frontMatter.updatedAt || stats.mtime.toISOString();
+
+ // Prior version of Chronicles manually encoded as comma separated tags,
+ // then re-parsed out. Now using proper yaml parsing, this can be removed
+ // once all my personal notes are migrated.
+ if ("tags" in frontMatter && typeof frontMatter.tags === "string") {
+ frontMatter.tags = (frontMatter.tags as string)
+ .split(",")
+ .map((tag: string) => tag.trim())
+ .filter(Boolean);
+ }
+
+ return {
+ frontMatter,
+ body,
+ } as { frontMatter: FrontMatter; body: string };
+}
/**
* Parses a string of contents into a title, front matter, and body; strips title / frontmatter
@@ -44,7 +111,9 @@ function parseTitleAndFrontMatterNotion(
const frontMatter = rawFrontMatter.length
? parseExtractedFrontMatter(rawFrontMatter)
: {};
- return { title, frontMatter, body };
+
+ frontMatter.title = title;
+ return { frontMatter, body };
}
/**
@@ -266,54 +335,3 @@ function preprocessRawFrontMatter(content: string) {
})
);
}
-
-function preprocessChroniclesFrontMatter(content: string) {
- // Regular expression to match key-value pairs in front matter
- return content
- .replace(/^(\w+):\s*$/gm, '$1: ""') // Handle keys with no values
- .replace(/^(\w+):\s*(.+)$/gm, (match, key, value) => {
- // Check if value contains special characters that need quoting
- if (value.match(/[:{}[\],&*#?|\-<>=!%@`]/) || value.includes("\n")) {
- // If the value is not already quoted, wrap it in double quotes
- if (!/^['"].*['"]$/.test(value)) {
- // Escape any existing double quotes in the value
- value = value.replace(/"/g, '\\"');
- return `${key}: "${value}"`;
- }
- }
- return match; // Return unchanged if no special characters
- });
-}
-
-// naive frontmatter parser for files formatted in chronicles style...
-// which just means a regular markdown file + yaml front matter
-// ... todo: use remark ecosystem parser
-export function parseChroniclesFrontMatter(content: string) {
- // Regular expression to match front matter (--- at the beginning and end)
- const frontMatterRegex = /^---\n([\s\S]*?)\n---\n*/;
-
- // Match the front matter
- const match = content.match(frontMatterRegex);
- if (!match) {
- return {
- frontMatter: {}, // No front matter found
- body: content, // Original content without changes
- };
- }
-
- // Extract front matter and body
- const frontMatterContent = preprocessChroniclesFrontMatter(match[1]);
- const body = content.slice(match[0].length); // Content without front matter
-
- // Parse the front matter using yaml
- const frontMatter = yaml.parse(frontMatterContent);
- frontMatter.tags = frontMatter.tags
- .split(",")
- .map((tag: string) => tag.trim())
- .filter(Boolean);
-
- return {
- frontMatter,
- body,
- };
-}
diff --git a/src/preload/client/importer/importer.test.ts b/src/preload/client/importer/importer.test.ts
index 2f9a6bb..452553a 100644
--- a/src/preload/client/importer/importer.test.ts
+++ b/src/preload/client/importer/importer.test.ts
@@ -25,11 +25,11 @@ function runFrontmatterTests(importer: ImporterClient) {
console.error(testCase.input);
break;
} else {
- if (result.title !== testCase.expected.title) {
+ if (result.frontMatter.title !== testCase.expected.title) {
console.error("FAILED:", testCase.expected.title);
console.error("FAILED title");
console.error("We should have:", testCase.expected.title);
- console.error("We got:", result.title);
+ console.error("We got:", result.frontMatter.title);
console.error();
break;
}
diff --git a/src/preload/client/index.ts b/src/preload/client/index.ts
index 0aabc77..0221604 100644
--- a/src/preload/client/index.ts
+++ b/src/preload/client/index.ts
@@ -27,6 +27,10 @@ const knex = Knex({
connection: {
filename: settings.get("DATABASE_URL") as string,
},
+ // https://knexjs.org/guide/query-builder.html#insert
+ // don't replace undefined with "DEFAULT" in insert statements; replace
+ // it with NULL instead (SQLite raises otherwise)
+ useNullAsDefault: true,
});
export { GetDocumentResponse } from "./types";
diff --git a/src/preload/client/preferences.ts b/src/preload/client/preferences.ts
index ca80a39..cc2f65e 100644
--- a/src/preload/client/preferences.ts
+++ b/src/preload/client/preferences.ts
@@ -2,7 +2,7 @@ import { ipcRenderer } from "electron";
import Store from "electron-store";
export interface Preferences {
- CACHE_DIR: string;
+ DATABASE_URL: string;
DEFAULT_JOURNAL: string | null;
ARCHIVED_JOURNALS: Record;
NOTES_DIR: string;
@@ -10,7 +10,7 @@ export interface Preferences {
}
const defaults = (): Preferences => ({
- CACHE_DIR: "",
+ DATABASE_URL: "",
DEFAULT_JOURNAL: null,
ARCHIVED_JOURNALS: {},
NOTES_DIR: "",
diff --git a/src/preload/client/sync.ts b/src/preload/client/sync.ts
index d50dff9..f0301c8 100644
--- a/src/preload/client/sync.ts
+++ b/src/preload/client/sync.ts
@@ -8,11 +8,7 @@ import { IDocumentsClient } from "./documents";
import { IFilesClient } from "./files";
import { IJournalsClient } from "./journals";
import { IPreferencesClient } from "./preferences";
-import {
- GetDocumentResponse,
- SKIPPABLE_FILES,
- SKIPPABLE_PREFIXES,
-} from "./types";
+import { SKIPPABLE_FILES, SKIPPABLE_PREFIXES } from "./types";
export type ISyncClient = SyncClient;
@@ -44,20 +40,6 @@ export class SyncClient {
private preferences: IPreferencesClient,
) {}
- /**
- * Convert the properties we track to frontmatter
- */
- contentsWithFrontMatter = (document: GetDocumentResponse) => {
- const fm = `---
-title: ${document.title}
-tags: ${document.tags.join(", ")}
-createdAt: ${document.createdAt}
-updatedAt: ${document.updatedAt}
----`;
-
- return `${fm}\n\n${document.content}`;
- };
-
/**
* Sync the notes directory with the database
*/
@@ -132,19 +114,12 @@ updatedAt: ${document.updatedAt}
const { contents, frontMatter } = await this.documents.loadDoc(file.path);
- // todo: handle additional kinds of frontMatter; just add a column for them
- // and ensure they are not overwritten when editing existing files
- // https://github.com/cloverich/chronicles/issues/127
-
try {
await this.documents.createIndex({
id: documentId,
journal: dirname, // using name as id
content: contents,
- title: frontMatter.title,
- tags: frontMatter.tags || [],
- createdAt: frontMatter.createdAt,
- updatedAt: frontMatter.updatedAt,
+ frontMatter,
});
syncedCount++;
} catch (e) {
@@ -161,14 +136,19 @@ updatedAt: ${document.updatedAt}
}
}
- // Ensure default journal exists; attempt to declare one otherwise
+ // Ensure default journal exists; attempt to declare one. Otherwise,
+ // new documents will default to a journal that does not exist, and fail
+ // to create.
const defaultJournal = await this.preferences.get("DEFAULT_JOURNAL");
if (!defaultJournal || !(defaultJournal in journals)) {
console.log("updating default journal", defaultJournal, journals);
- if (journals.length) {
- await this.preferences.set("DEFAULT_JOURNAL", journals[0]);
+ if (Object.keys(journals).length) {
+ await this.preferences.set("DEFAULT_JOURNAL", Object.keys(journals)[0]);
+ } else {
+ await this.journals.create({ name: "default_journal" });
+ await this.preferences.set("DEFAULT_JOURNAL", "default_journal");
}
}
diff --git a/src/preload/client/types.ts b/src/preload/client/types.ts
index bc3f15e..4d1c13d 100644
--- a/src/preload/client/types.ts
+++ b/src/preload/client/types.ts
@@ -34,12 +34,9 @@ export type JournalResponse = {
export interface GetDocumentResponse {
id: string;
- createdAt: string;
- updatedAt: string;
- title?: string;
content: string;
journal: string;
- tags: string[];
+ frontMatter: FrontMatter;
}
/**
@@ -113,19 +110,33 @@ export interface SaveMdastRequest {
date: string;
mdast: any;
}
-// export type SaveRequest = SaveRawRequest | SaveMdastRequest;
-export interface SaveRequest {
+export interface CreateRequest {
id?: string;
journal: string;
content: string;
+ frontMatter: FrontMatter;
+}
+
+export interface UpdateRequest extends CreateRequest {
+ id: string;
+}
+
+// arbitrary front matter is allowed, but a subset of properties
+// are tracked as first-class citizens by the application
+export interface FrontMatter {
title?: string;
tags: string[];
+ createdAt: string;
+ updatedAt: string;
+ [key: string]: any;
+}
- // these included for override, originally,
- // to support the import process
- createdAt?: string;
- updatedAt?: string;
+export interface IndexRequest {
+ id: string;
+ journal: string;
+ content: string;
+ frontMatter: FrontMatter;
}
// Nobody would put node_modules in their note directory... right?
diff --git a/src/views/create/index.tsx b/src/views/create/index.tsx
index 983d711..0cb318f 100644
--- a/src/views/create/index.tsx
+++ b/src/views/create/index.tsx
@@ -66,18 +66,25 @@ function useCreateDocument() {
}
try {
- const document = await client.documents.save({
+ const document = {
content: "",
journal: journal,
- tags: searchStore.selectedTags,
- });
+ frontMatter: {
+ title: undefined,
+ tags: searchStore.selectedTags,
+ createdAt: new Date().toISOString(),
+ updatedAt: new Date().toISOString(),
+ },
+ };
+
+ const [id, _] = await client.documents.createDocument(document);
if (!isMounted) return;
// Ensure the document is added to the search, so its available when user hits
// back (even when that doesn't make sense!)
- searchStore.updateSearch(document, "create");
- navigate(`/documents/edit/${document.id}`, { replace: true });
+ searchStore.updateSearch({ ...document, id }, "create");
+ navigate(`/documents/edit/${id}`, { replace: true });
} catch (err) {
console.error("Error creating document", err);
if (!isMounted) return;
diff --git a/src/views/documents/SearchStore.ts b/src/views/documents/SearchStore.ts
index 463b9fa..222b25d 100644
--- a/src/views/documents/SearchStore.ts
+++ b/src/views/documents/SearchStore.ts
@@ -7,21 +7,30 @@ import { SearchToken } from "./search/tokens";
export interface SearchItem {
id: string;
+ journal: string;
createdAt: string;
title?: string;
+}
+
+interface DocumentBase {
+ id: string;
journal: string;
+ frontMatter: {
+ createdAt: string;
+ title?: string;
+ };
}
// Accepts any document satisfying the SearchItem interface, and copies properties
// into an actual SearchItem; i.e. I dont want to stuff an EditableDocument or other smart
// object into search results b/c may get weird.
-function toSearchItem(doc: SearchItem): SearchItem | null {
+function toSearchItem(doc: DocumentBase): SearchItem | null {
if (!doc.id) return null;
return {
id: doc.id,
- createdAt: doc.createdAt,
- title: doc.title,
+ createdAt: doc.frontMatter.createdAt,
+ title: doc.frontMatter.title,
journal: doc.journal,
};
}
@@ -137,7 +146,7 @@ export class SearchStore {
* in the search results.
*/
updateSearch = (
- document: SearchItem,
+ document: DocumentBase,
operation: "edit" | "create" | "del" = "edit",
) => {
const idx = this.docs.findIndex((d) => d.id === document.id);
diff --git a/src/views/edit/EditableDocument.ts b/src/views/edit/EditableDocument.ts
index 3bdc5a4..cdd2849 100644
--- a/src/views/edit/EditableDocument.ts
+++ b/src/views/edit/EditableDocument.ts
@@ -1,9 +1,9 @@
import { toaster } from "evergreen-ui";
-import { debounce, pick } from "lodash";
+import { debounce } from "lodash";
import { IReactionDisposer, computed, observable, reaction, toJS } from "mobx";
import { IClient } from "../../hooks/useClient";
import * as SlateCustom from "../../markdown/remark-slate-transformer/transformers/mdast-to-slate";
-import { GetDocumentResponse } from "../../preload/client/types";
+import { FrontMatter, GetDocumentResponse } from "../../preload/client/types";
import { SlateTransformer } from "./SlateTransformer";
function isExistingDocument(
@@ -48,12 +48,18 @@ export class EditableDocument {
@observable id: string;
@observable createdAt: string;
@observable updatedAt: string; // read-only outside this class
- @observable tags: string[] = [];
+ @observable tags: string[];
+ @observable frontMatter: FrontMatter;
// editor properties
slateContent: SlateCustom.SlateNode[];
@observable private changeCount = 0;
+ // todo: save queue. I'm saving too often, but need to do this until I allow exiting note
+ // while save is in progress; track and report saveCount to discover if this is a major issue
+ // or not.
+ saveCount = 0;
+
// reaction clean-up when component unmounts; see constructor
teardown?: IReactionDisposer;
@@ -61,13 +67,14 @@ export class EditableDocument {
private client: IClient,
doc: GetDocumentResponse,
) {
- this.title = doc.title;
+ this.title = doc.frontMatter.title;
this.journal = doc.journal;
this.content = doc.content;
this.id = doc.id;
- this.createdAt = doc.createdAt;
- this.updatedAt = doc.updatedAt;
- this.tags = doc.tags;
+ this.createdAt = doc.frontMatter.createdAt;
+ this.updatedAt = doc.frontMatter.updatedAt;
+ this.tags = doc.frontMatter.tags;
+ this.frontMatter = doc.frontMatter;
const content = doc.content;
const slateNodes = SlateTransformer.nodify(content);
this.slateContent = slateNodes;
@@ -107,48 +114,53 @@ export class EditableDocument {
}
};
- save = debounce(async () => {
- if (this.saving || !this.dirty) return;
- this.saving = true;
-
- // note: Immediately reset dirty so if edits happen while (auto) saving,
- // it can call save again on completion
- // Error case is kind of hacky but unlikely an issue in practice
- this.dirty = false;
-
- this.content = SlateTransformer.stringify(toJS(this.slateContent));
- let wasError = false;
-
- try {
- // note: I was passing documentId instead of id, and because id is optional in save it wasn't complaining.
- // Maybe 'save' and optional, unvalidated params is a bad idea :|
- const res = await this.client.documents.save(
- pick(
- toJS(this),
- "title",
- "content",
- "journal",
- "id",
- "createdAt",
- "tags",
- ),
- );
- this.id = res.id;
- this.createdAt = res.createdAt;
- this.updatedAt = res.updatedAt;
- } catch (err) {
- this.saving = false;
- this.dirty = true;
- wasError = true;
- toaster.danger(JSON.stringify(err));
- } finally {
- this.saving = false;
-
- // if edits made after last save attempt, re-run
- // Check error to avoid infinite save loop
- if (this.dirty && !wasError) this.save();
- }
- }, 1000);
+ save = debounce(
+ async () => {
+ if (this.saving || !this.dirty) return;
+ this.saving = true;
+
+ // note: Immediately reset dirty so if edits happen while (auto) saving,
+ // it can call save again on completion
+ // Error case is kind of hacky but unlikely an issue in practice
+ this.dirty = false;
+
+ this.content = SlateTransformer.stringify(toJS(this.slateContent));
+ let wasError = false;
+
+ try {
+ this.updatedAt = this.frontMatter.updatedAt = new Date().toISOString();
+ this.frontMatter.title = this.title;
+ this.frontMatter.tags = this.tags;
+
+ // todo: is toJS necessary here, i.e. copying this.journal to journal, loses Proxy or not?
+ // todo: use mobx viewmodel over GetDocumentResponse; track frontMatter properties directly rather
+ // than copying back and forth
+ await this.client.documents.updateDocument(
+ toJS({
+ journal: this.journal,
+ content: this.content,
+ id: this.id,
+ frontMatter: toJS(this.frontMatter),
+ }),
+ );
+ this.saveCount++;
+ } catch (err) {
+ this.saving = false;
+ this.dirty = true;
+ wasError = true;
+ console.error("Error saving document", err);
+ toaster.danger(JSON.stringify(err));
+ } finally {
+ this.saving = false;
+
+ // if edits made after last save attempt, re-run
+ // Check error to avoid infinite save loop
+ if (this.dirty && !wasError) this.save();
+ }
+ },
+ 3000,
+ { leading: true },
+ );
del = async () => {
// overload saving for deleting
diff --git a/src/views/edit/loading.tsx b/src/views/edit/loading.tsx
index 85299c2..20103dd 100644
--- a/src/views/edit/loading.tsx
+++ b/src/views/edit/loading.tsx
@@ -23,7 +23,7 @@ export const EditLoadingComponent = observer((props: LoadingComponentProps) => {
border="none"
icon={ChevronLeftIcon}
className="drag-none"
- onClick={() => {}}
+ onClick={() => navigate(-1)}
marginRight={8}
>
Back to documents
@@ -32,7 +32,9 @@ export const EditLoadingComponent = observer((props: LoadingComponentProps) => {
-
+
+ {props.error && props.error?.message}
+
>
diff --git a/src/views/edit/useEditableDocument.ts b/src/views/edit/useEditableDocument.ts
index 6c42481..ca65941 100644
--- a/src/views/edit/useEditableDocument.ts
+++ b/src/views/edit/useEditableDocument.ts
@@ -66,7 +66,14 @@ export function useEditableDocument(documentId: string) {
load();
return () => {
- if (state.document?.teardown) state.document.teardown();
+ if (state.document?.teardown) {
+ console.log(
+ `save count for ${state.document.id}: ${state.document.saveCount}`,
+ );
+ state.document.teardown();
+ }
+ if (state.document?.saveCount)
+ console.log("saved", state.document.saveCount, "times");
};
}, [documentId]);
diff --git a/src/views/preferences/index.tsx b/src/views/preferences/index.tsx
index a4a4763..a58aff0 100644
--- a/src/views/preferences/index.tsx
+++ b/src/views/preferences/index.tsx
@@ -194,7 +194,7 @@ const Preferences = observer(() => {
The current Chronicles cache is located at{" "}
- {store.preferences.CACHE_DIR}
+ {store.preferences.DATABASE_URL}