Add more tests and readme docs
This commit is contained in:
parent
bc5ddf3c23
commit
d964f1149e
80
README.md
80
README.md
|
@ -1,3 +1,83 @@
|
|||
# Hammerstone
|
||||
|
||||
Load and manipulate Obsidian vault data
|
||||
|
||||
## API
|
||||
|
||||
### `new Vault(vaultRootPath [, options])`
|
||||
|
||||
Create a new `Vault` object. Searches for markdown files in the given directory, loads, and parses frontmatter for each document.
|
||||
|
||||
#### Options
|
||||
|
||||
| Option | Description | Default |
|
||||
| ---------------- | ---------------------------------------------------------- | ------- |
|
||||
| `ignorePatterns` | An optional array of globs to ignore when discovering docs | `[]` |
|
||||
|
||||
### `vault.process(fn)`
|
||||
|
||||
Process all documents in the vault. Each document is passed to the provided function.
|
||||
|
||||
Returns a reference to the vault to allow for chaining of document processing flows.
|
||||
|
||||
### `vault.scope(fn)`
|
||||
|
||||
Returns a `VaultView` containing only the documents which the provided function returns a truthy value for (similar to `Array.filter()`).
|
||||
|
||||
A `VaultView` has the `.process()` function just as a `Vault` does. You can also call `.unscope()` on a `VaultView` which returns a reference to the original vault, allowing you to chain processing flows which dive in and out of different filtered scopes.
|
||||
|
||||
### `vault.write()`
|
||||
|
||||
Write all vault documents back to disk.
|
||||
|
||||
### `vault.documents`
|
||||
|
||||
(Property) An array of all documents in this vault
|
||||
|
||||
### `vault.index`
|
||||
|
||||
(Property) A map of document slugs to the document itself
|
||||
|
||||
### `new MarkdownDocument(filePath, vault)`
|
||||
|
||||
A `MarkdownDocument` object represents a single document in the `Vault`. Generally, the `Vault` itself handles creating these objects, which can be accessed via the `.documents` or `.index` properties on the `Vault`.
|
||||
|
||||
### `markdownDocument.setMarkdown(markdownContent)`
|
||||
|
||||
Set the markdown content of this document (separately from the YAML frontmatter)
|
||||
|
||||
### `markdownDocument.setFrontmatter(frontmatterShape)`
|
||||
|
||||
Set the frontmatter content of this document (separately from the markdown)
|
||||
|
||||
### `markdownDocument.setContent(newContent)`
|
||||
|
||||
Set the full text content of this document (raw YAML frontmatter and markdown)
|
||||
|
||||
### `markdownDocument.hasTag(tag)`
|
||||
|
||||
Check if this document is tagged with the given tag
|
||||
|
||||
### `markdownDocument.hasTaxonomy(dirs)`
|
||||
|
||||
Check if this document exists in the given directory structure as an array of directory names
|
||||
|
||||
### `markdownDocument.revert()`
|
||||
|
||||
Revert any changes to this document back to its original loaded content
|
||||
|
||||
### `markdownDocument.write()`
|
||||
|
||||
Write this file back to disk
|
||||
|
||||
### `markdownDocument.markdown`
|
||||
|
||||
(Property) The markdown contents of this document
|
||||
|
||||
### `markdownDocument.frontmatter`
|
||||
|
||||
(Property) The frontmatter contents of this document
|
||||
|
||||
### `markdownDocument.content`
|
||||
|
||||
(Property) The full content of this document
|
||||
|
|
6
package-lock.json
generated
6
package-lock.json
generated
|
@ -1,13 +1,13 @@
|
|||
{
|
||||
"name": "@endeavorance/hammerstone",
|
||||
"version": "0.0.1",
|
||||
"version": "0.0.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@endeavorance/hammerstone",
|
||||
"version": "0.0.1",
|
||||
"license": "ISC",
|
||||
"version": "0.0.4",
|
||||
"license": "CC BY-NC-SA 4.0",
|
||||
"dependencies": {
|
||||
"glob": "^10.3.12",
|
||||
"lodash-es": "^4.17.21",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@endeavorance/hammerstone",
|
||||
"version": "0.0.4",
|
||||
"version": "0.1.0",
|
||||
"description": "Load and manipulate Obsidian vault data",
|
||||
"type": "module",
|
||||
"exports": "./bin/hammerstone.js",
|
||||
|
|
93
src/_test/document.test.ts
Normal file
93
src/_test/document.test.ts
Normal file
|
@ -0,0 +1,93 @@
|
|||
import test, { describe } from "node:test";
|
||||
import assert from "node:assert";
|
||||
import MarkdownDocument from "../document.js";
|
||||
import Vault from "../vault.js";
|
||||
|
||||
function getTestDocument(): MarkdownDocument {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
|
||||
if (!vault.slugs.includes("testdoc")) {
|
||||
assert.fail("Could not find test document");
|
||||
}
|
||||
|
||||
return vault.index["testdoc"]!;
|
||||
}
|
||||
|
||||
describe("MarkdownDocument", () => {
|
||||
test("markdown and frontmatter are parsed out", () => {
|
||||
const doc = getTestDocument();
|
||||
assert.deepStrictEqual(doc.frontmatter, {
|
||||
property: "value",
|
||||
cssclasses: [],
|
||||
tags: [],
|
||||
aliases: [],
|
||||
});
|
||||
|
||||
assert.equal(
|
||||
doc.markdown.trim(),
|
||||
"This is a test doc for use with testing docs.",
|
||||
);
|
||||
});
|
||||
|
||||
test("changing the frontmatter updates the full document contents", () => {
|
||||
const doc = getTestDocument();
|
||||
const originalContents = doc.content;
|
||||
doc.setFrontmatter({
|
||||
...doc.frontmatter,
|
||||
example: "hello",
|
||||
});
|
||||
|
||||
assert.notEqual(doc.content, originalContents);
|
||||
});
|
||||
|
||||
test("changing the markdown updates the full document contents", () => {
|
||||
const doc = getTestDocument();
|
||||
const originalContents = doc.content;
|
||||
doc.setMarkdown("Hello, worlt.");
|
||||
|
||||
assert.notEqual(doc.content, originalContents);
|
||||
});
|
||||
|
||||
test("setting the full content updates markdown and frontmatter", () => {
|
||||
const doc = getTestDocument();
|
||||
doc.setContent(`---
|
||||
hello: world
|
||||
---
|
||||
|
||||
Testing!
|
||||
`);
|
||||
|
||||
assert.deepStrictEqual(doc.frontmatter, {
|
||||
hello: "world",
|
||||
tags: [],
|
||||
aliases: [],
|
||||
cssclasses: [],
|
||||
});
|
||||
assert.equal(doc.markdown.trim(), "Testing!");
|
||||
});
|
||||
|
||||
test("reverting works", () => {
|
||||
const doc = getTestDocument();
|
||||
const originalContents = doc.content;
|
||||
doc.setFrontmatter({
|
||||
...doc.frontmatter,
|
||||
example: "hello",
|
||||
});
|
||||
doc.revert();
|
||||
|
||||
assert.equal(doc.content, originalContents);
|
||||
});
|
||||
|
||||
test("checking for tags", () => {
|
||||
const doc = getTestDocument();
|
||||
const taggedDoc = doc.vault.index["directory-subdirectory-i-have-tags"]!;
|
||||
assert.equal(taggedDoc.hasTag("tags"), true);
|
||||
assert.equal(doc.hasTag("tags"), false);
|
||||
});
|
||||
test("checking for taxonomy", () => {
|
||||
const doc = getTestDocument();
|
||||
const taggedDoc = doc.vault.index["directory-subdirectory-i-have-tags"]!;
|
||||
assert.equal(taggedDoc.hasTaxonomy(["Directory", "Subdirectory"]), true);
|
||||
assert.equal(doc.hasTaxonomy(["Directory", "Subdirectory"]), false);
|
||||
});
|
||||
});
|
|
@ -1,29 +0,0 @@
|
|||
import test, { describe } from "node:test";
|
||||
import assert from "node:assert";
|
||||
import Vault from "../vault.js";
|
||||
|
||||
describe("Vault", () => {
|
||||
test("load a vault", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
assert.equal(vault.size, 4, "Unexpected number of documents in vault");
|
||||
});
|
||||
|
||||
test("ignore paths", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault", {
|
||||
ignorePatterns: ["**/Ignoreme/**"],
|
||||
});
|
||||
assert.equal(vault.size, 3, "Unexpected number of documents in vault");
|
||||
});
|
||||
|
||||
test("reading tags", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
const taggedFile = vault.index["directory-subdirectory-i-have-tags"];
|
||||
|
||||
if (taggedFile === undefined) {
|
||||
assert.fail("Expected file with tags");
|
||||
}
|
||||
|
||||
const expectedTags = ["propertytag", "tags", "hashtags"];
|
||||
assert.deepStrictEqual(taggedFile.tags, expectedTags);
|
||||
});
|
||||
});
|
|
@ -4,7 +4,10 @@ import process from "node:process";
|
|||
import path from "node:path";
|
||||
|
||||
run({
|
||||
files: [path.resolve("./bin/_test/main.test.js")],
|
||||
files: [
|
||||
path.resolve("./bin/_test/vault.test.js"),
|
||||
path.resolve("./bin/_test/document.test.js"),
|
||||
],
|
||||
})
|
||||
.on("test:fail", () => {
|
||||
process.exitCode = 1;
|
||||
|
|
5
src/_test/test-vault/TestDoc.md
Normal file
5
src/_test/test-vault/TestDoc.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
property: value
|
||||
---
|
||||
|
||||
This is a test doc for use with testing docs.
|
65
src/_test/vault.test.ts
Normal file
65
src/_test/vault.test.ts
Normal file
|
@ -0,0 +1,65 @@
|
|||
import test, { describe } from "node:test";
|
||||
import assert from "node:assert";
|
||||
import Vault from "../vault.js";
|
||||
import path from "node:path";
|
||||
|
||||
const DOCS_IN_TEST_VAULT = 5;
|
||||
const EXCLUDED_DOCS = 1;
|
||||
|
||||
describe("Vault", () => {
|
||||
test("load a vault", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
assert.equal(
|
||||
vault.size,
|
||||
DOCS_IN_TEST_VAULT,
|
||||
"Unexpected number of documents in vault",
|
||||
);
|
||||
});
|
||||
|
||||
test("vault paths are made absolute", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
assert.equal(
|
||||
path.isAbsolute(vault.vaultPath),
|
||||
true,
|
||||
"Vault path was not formatted to be absolute",
|
||||
);
|
||||
});
|
||||
|
||||
test("ignored paths are not loaded into the vault", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault", {
|
||||
ignorePatterns: ["**/Ignoreme/**"],
|
||||
});
|
||||
assert.equal(
|
||||
vault.size,
|
||||
DOCS_IN_TEST_VAULT - EXCLUDED_DOCS,
|
||||
"Unexpected number of documents in vault",
|
||||
);
|
||||
});
|
||||
|
||||
test("document tags are properly parsed", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
const taggedFile = vault.index["directory-subdirectory-i-have-tags"];
|
||||
|
||||
if (taggedFile === undefined) {
|
||||
assert.fail("Expected file with tags");
|
||||
}
|
||||
|
||||
const expectedTags = ["propertytag", "tags", "hashtags"];
|
||||
assert.deepStrictEqual(taggedFile.tags, expectedTags);
|
||||
});
|
||||
|
||||
test("views can generate filtered versions of a vault", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
const view = vault.scope((doc) => doc.hasTag("tags"));
|
||||
assert.equal(view.size, 1);
|
||||
});
|
||||
|
||||
test("processing documents", () => {
|
||||
const vault = new Vault("./bin/_test/test-vault");
|
||||
vault.process((doc) => doc.setMarkdown(""));
|
||||
|
||||
for (const doc of vault.documents) {
|
||||
assert.equal(doc.markdown, "");
|
||||
}
|
||||
});
|
||||
});
|
|
@ -26,6 +26,9 @@ export default class MarkdownDocument {
|
|||
/** The name of this document's file, without the file extension */
|
||||
readonly filename: string;
|
||||
|
||||
/** An array of tags found in this document from both the markdown and frontmatter */
|
||||
tags: string[];
|
||||
|
||||
/** A url-safe version of this file's name */
|
||||
readonly slug: string;
|
||||
|
||||
|
@ -33,7 +36,7 @@ export default class MarkdownDocument {
|
|||
private _content: string;
|
||||
|
||||
/** The history of this files contents after each modification */
|
||||
readonly contentHistory: string[] = [];
|
||||
readonly contentHistory: string[];
|
||||
|
||||
/** The parsed frontmatter of the file */
|
||||
private _frontmatter: FrontmatterShape;
|
||||
|
@ -44,8 +47,6 @@ export default class MarkdownDocument {
|
|||
/** The markdown portion of the file (without frontmatter) */
|
||||
private _markdown: string;
|
||||
|
||||
tags: string[];
|
||||
|
||||
constructor(filePath: string, vault: Vault) {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`);
|
||||
|
@ -67,12 +68,14 @@ export default class MarkdownDocument {
|
|||
this._markdown = extractMarkdown(rawFileContent);
|
||||
this.vault = vault;
|
||||
|
||||
// Identify tags from content and frontmatter
|
||||
const frontmatterTags = frontmatter.tags;
|
||||
const contentTags = extractTags(this._markdown);
|
||||
this.tags = frontmatterTags.concat(contentTags);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns A serializable shape for this object
|
||||
*/
|
||||
toJSON() {
|
||||
return {
|
||||
path: this.path,
|
||||
|
@ -85,47 +88,51 @@ export default class MarkdownDocument {
|
|||
};
|
||||
}
|
||||
|
||||
set markdown(newValue: string) {
|
||||
/**
|
||||
* Update the markdown content for the document
|
||||
* @param newValue The new markdown content for the document
|
||||
*/
|
||||
setMarkdown(newValue: string) {
|
||||
this._markdown = newValue;
|
||||
this._content = combineMarkdownAndFrontmatter(newValue, this.frontmatter);
|
||||
this.contentHistory.push(this._content);
|
||||
}
|
||||
|
||||
/** The markdown portion of the file (without frontmatter) */
|
||||
get markdown() {
|
||||
return this._markdown;
|
||||
}
|
||||
|
||||
set frontmatter(newValue: FrontmatterShape) {
|
||||
/**
|
||||
* Update the frontmatter of the document
|
||||
* @param newValue The new frontmatter shape for this document
|
||||
*/
|
||||
setFrontmatter(newValue: FrontmatterShape) {
|
||||
this._frontmatter = newValue;
|
||||
this._content = combineMarkdownAndFrontmatter(
|
||||
this._markdown,
|
||||
this._frontmatter,
|
||||
);
|
||||
this._content = combineMarkdownAndFrontmatter(this._markdown, newValue);
|
||||
this.contentHistory.push(this._content);
|
||||
}
|
||||
|
||||
/** The parsed frontmatter of the file */
|
||||
get frontmatter() {
|
||||
return this._frontmatter;
|
||||
}
|
||||
|
||||
/** The raw content of the file */
|
||||
get content() {
|
||||
return this._content;
|
||||
}
|
||||
|
||||
set content(newValue: string) {
|
||||
/**
|
||||
* Update the full content of the doc with a combination of markdown and yaml frontmatter
|
||||
* @param newValue The full content of the document to set
|
||||
*/
|
||||
setContent(newValue: string) {
|
||||
this._content = newValue;
|
||||
this._frontmatter = extractFrontmatter(newValue);
|
||||
this._markdown = extractMarkdown(newValue);
|
||||
this.contentHistory.push(this._content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this document has a specific tag
|
||||
* @param tag The tag to look for
|
||||
* @returns `true` If this document has the given tag
|
||||
*/
|
||||
hasTag(tag: string) {
|
||||
return this.tags.includes(tag);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an array of directory names, return `true` if this document has the taxonomy
|
||||
* @param dirs Directories to check for in the taxonomy
|
||||
* @returns `true` If this document shares the given taxonomy
|
||||
*/
|
||||
hasTaxonomy(dirs: string[]) {
|
||||
if (dirs.length < this.taxonomy.length) {
|
||||
return false;
|
||||
|
@ -140,6 +147,22 @@ export default class MarkdownDocument {
|
|||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Revert this document to its original loaded content
|
||||
*/
|
||||
revert() {
|
||||
const originalCopy = this.contentHistory[0];
|
||||
|
||||
if (originalCopy === undefined) {
|
||||
throw new Error("Document has no history to revert to.");
|
||||
}
|
||||
|
||||
this.setContent(originalCopy);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write this document back to disk
|
||||
*/
|
||||
write() {
|
||||
writeFileSync(
|
||||
this.path,
|
||||
|
@ -147,4 +170,19 @@ export default class MarkdownDocument {
|
|||
"utf-8",
|
||||
);
|
||||
}
|
||||
|
||||
/** The markdown portion of the file (without frontmatter) */
|
||||
get markdown() {
|
||||
return this._markdown;
|
||||
}
|
||||
|
||||
/** The parsed frontmatter of the file */
|
||||
get frontmatter() {
|
||||
return this._frontmatter;
|
||||
}
|
||||
|
||||
/** The raw content of the file */
|
||||
get content() {
|
||||
return this._content;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ const EMPTY_FRONTMATTER: FrontmatterShape = {
|
|||
};
|
||||
|
||||
const FRONTMATTER_REGEX = /^---[\s\S]*?---/gm;
|
||||
const TAG_REGEX = /#[A-Za-z0-9/\-_]+/g;
|
||||
|
||||
/**
|
||||
* Attempt to parse YAML frontmatter from a mixed yaml/md doc
|
||||
|
@ -49,20 +50,44 @@ export function extractMarkdown(content: string): string {
|
|||
return content.replace(FRONTMATTER_REGEX, "").trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Combine yaml frontmatter and markdown into a single string
|
||||
* @param markdown The markdown to combine with frontmatter
|
||||
* @param frontmatter The frontmatter shape to combine with markdown
|
||||
* @returns A combined document with yaml frontmatter and markdown below
|
||||
*/
|
||||
export function combineMarkdownAndFrontmatter(
|
||||
markdown: string,
|
||||
frontmatter: FrontmatterShape,
|
||||
) {
|
||||
const frontmatterToWrite: Partial<FrontmatterShape> =
|
||||
structuredClone(frontmatter);
|
||||
|
||||
if (frontmatterToWrite.aliases?.length === 0) {
|
||||
delete frontmatterToWrite.aliases;
|
||||
}
|
||||
|
||||
if (frontmatterToWrite.cssclasses?.length === 0) {
|
||||
delete frontmatterToWrite.cssclasses;
|
||||
}
|
||||
|
||||
if (frontmatterToWrite.tags?.length === 0) {
|
||||
delete frontmatterToWrite.tags;
|
||||
}
|
||||
|
||||
return `---
|
||||
${YAML.stringify(frontmatter)}
|
||||
${YAML.stringify(frontmatterToWrite)}
|
||||
---
|
||||
|
||||
${markdown.trim()}
|
||||
`;
|
||||
}
|
||||
|
||||
const TAG_REGEX = /#[A-Za-z0-9/\-_]+/g;
|
||||
|
||||
/**
|
||||
* Find hashtags in text content
|
||||
* @param content The content to search for tags in
|
||||
* @returns An array of discovered tags (hash excluded)
|
||||
*/
|
||||
export function extractTags(content: string): string[] {
|
||||
const results = content.match(TAG_REGEX);
|
||||
|
||||
|
|
30
src/vault.ts
30
src/vault.ts
|
@ -48,7 +48,7 @@ function buildVaultIndex(
|
|||
return index;
|
||||
}
|
||||
|
||||
export class VaultView {
|
||||
class VaultView {
|
||||
/** An array of all discovered Markdown documents */
|
||||
documents: MarkdownDocument[] = [];
|
||||
|
||||
|
@ -72,10 +72,23 @@ export class VaultView {
|
|||
this.size = documents.length;
|
||||
}
|
||||
|
||||
map(fn: (document: MarkdownDocument) => MarkdownDocument) {
|
||||
this.documents = this.documents.map(fn);
|
||||
/**
|
||||
* Map over each document in this view, modifying it as needed
|
||||
* @param fn A function to map over every document in this view
|
||||
* @returns A reference to this view to chain additional calls
|
||||
*/
|
||||
process(fn: (document: MarkdownDocument) => void) {
|
||||
this.documents.forEach(fn);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to chain function calls back to the original vault
|
||||
* @returns A reference to the vault that produced this view
|
||||
*/
|
||||
unscope(): Vault {
|
||||
return this.vault;
|
||||
}
|
||||
}
|
||||
|
||||
interface VaultOptions {
|
||||
|
@ -114,13 +127,18 @@ export default class Vault {
|
|||
this.index = buildVaultIndex(allFiles);
|
||||
}
|
||||
|
||||
view(fn: (document: MarkdownDocument) => boolean) {
|
||||
scope(fn: (document: MarkdownDocument) => boolean) {
|
||||
const matchingDocs = this.documents.filter(fn);
|
||||
return new VaultView(matchingDocs, this);
|
||||
}
|
||||
|
||||
map(fn: (document: MarkdownDocument) => MarkdownDocument) {
|
||||
this.documents = this.documents.map(fn);
|
||||
/**
|
||||
* Map over each document in this vault, modifying it as needed
|
||||
* @param fn A function to map over every document in this vault
|
||||
* @returns A reference to this vault to chain additional calls
|
||||
*/
|
||||
process(fn: (document: MarkdownDocument) => void) {
|
||||
this.documents.forEach(fn);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue