Fix circular reference in json
This commit is contained in:
parent
d745577d6a
commit
e089616deb
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@endeavorance/hammerstone",
|
||||
"version": "0.0.1",
|
||||
"version": "0.0.4",
|
||||
"description": "Load and manipulate Obsidian vault data",
|
||||
"type": "module",
|
||||
"exports": "./bin/hammerstone.js",
|
||||
|
@ -26,6 +26,9 @@
|
|||
"shx": "^0.3.4",
|
||||
"typescript": "^5.4.2"
|
||||
},
|
||||
"files": [
|
||||
"bin"
|
||||
],
|
||||
"prettier": {
|
||||
"bracketSpacing": true,
|
||||
"trailingComma": "all"
|
||||
|
|
|
@ -2,6 +2,7 @@ import { existsSync, readFileSync, writeFileSync } from "fs";
|
|||
import path from "node:path";
|
||||
import slug from "slug";
|
||||
import YAML from "yaml";
|
||||
import type Vault from "./vault.js";
|
||||
|
||||
export type FrontmatterShape = Record<string, string | string[]>;
|
||||
|
||||
|
@ -112,6 +113,9 @@ ${markdown.trim()}
|
|||
}
|
||||
|
||||
export default class MarkdownDocument {
|
||||
/** A reference to the vault containing this document */
|
||||
readonly vault: Vault;
|
||||
|
||||
/** The original file path to this document */
|
||||
readonly path: string;
|
||||
|
||||
|
@ -144,13 +148,14 @@ export default class MarkdownDocument {
|
|||
/** The markdown portion of the file (without frontmatter) */
|
||||
private _markdown: string;
|
||||
|
||||
constructor(filePath: string, vaultPath: string) {
|
||||
constructor(filePath: string, vault: Vault) {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`);
|
||||
}
|
||||
|
||||
const rawFileContent = readFileSync(filePath, "utf-8");
|
||||
const fileDirname = path.dirname(filePath);
|
||||
const vaultPath = vault.vaultPath;
|
||||
|
||||
this.path = filePath;
|
||||
this.dirname = path.relative(vaultPath, fileDirname);
|
||||
|
@ -162,6 +167,7 @@ export default class MarkdownDocument {
|
|||
this.contentHistory = [rawFileContent];
|
||||
this._frontmatter = processDocumentFrontmatter(rawFileContent);
|
||||
this._markdown = getDocumentMarkdown(rawFileContent);
|
||||
this.vault = vault;
|
||||
}
|
||||
|
||||
containsBlock(blockName: string): boolean {
|
||||
|
@ -176,6 +182,18 @@ export default class MarkdownDocument {
|
|||
return writeDocumentBlock(this, blockName, newContent);
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
path: this.path,
|
||||
dirname: this.dirname,
|
||||
taxonomy: this.taxonomy,
|
||||
filename: this.filename,
|
||||
slug: this.slug,
|
||||
frontmatter: this._frontmatter,
|
||||
markdown: this._markdown,
|
||||
};
|
||||
}
|
||||
|
||||
set markdown(newValue: string) {
|
||||
this._markdown = newValue;
|
||||
this._content = combineMarkdownAndFrontmatter(newValue, this.frontmatter);
|
||||
|
|
|
@ -8,17 +8,17 @@ import MarkdownDocument from "./document.js";
|
|||
* @returns A promise which resolves as an array of laoded Documents
|
||||
*/
|
||||
function loadVaultDocuments(
|
||||
vaultPath: string,
|
||||
vault: Vault,
|
||||
ignorePatterns: string[] = [],
|
||||
): MarkdownDocument[] {
|
||||
const discoveredMarkdownDocuments = globSync(`${vaultPath}/**/*.md`, {
|
||||
const discoveredMarkdownDocuments = globSync(`${vault.vaultPath}/**/*.md`, {
|
||||
ignore: ignorePatterns,
|
||||
});
|
||||
|
||||
const markdownDocuments: MarkdownDocument[] = [];
|
||||
|
||||
for (const filePath of discoveredMarkdownDocuments) {
|
||||
const file = new MarkdownDocument(filePath, vaultPath);
|
||||
const file = new MarkdownDocument(filePath, vault);
|
||||
|
||||
if (markdownDocuments.some((check) => check.slug === file.slug)) {
|
||||
throw new Error("Duplicate slug: " + file.slug);
|
||||
|
@ -65,7 +65,7 @@ export default class Vault {
|
|||
this.vaultPath = vaultRootPath;
|
||||
this.ignorePatterns = options?.ignorePatterns ?? [];
|
||||
|
||||
const allFiles = loadVaultDocuments(this.vaultPath, this.ignorePatterns);
|
||||
const allFiles = loadVaultDocuments(this, this.ignorePatterns);
|
||||
const allSlugs = allFiles.map((doc) => doc.slug);
|
||||
|
||||
this.documents = allFiles;
|
||||
|
|
Loading…
Reference in a new issue