307 lines
9.1 KiB
JavaScript
307 lines
9.1 KiB
JavaScript
import { default as Node } from "./node";
|
|
import { Transformer } from "./transformer";
|
|
import { unified } from "unified";
|
|
import markdown from "remark-parse";
|
|
import { toString } from "mdast-util-to-string";
|
|
import path from "path";
|
|
import fs from "fs";
|
|
|
|
const dirTree = require("directory-tree");
|
|
|
|
class Util {
|
|
_counter;
|
|
_cachedSlugMap;
|
|
_directoryData;
|
|
|
|
constructor() {
|
|
this._counter = 0;
|
|
this._cachedSlugMap = this.getSlugHashMap();
|
|
}
|
|
|
|
/**
|
|
* @returns {string | null}
|
|
* */
|
|
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
|
getContent(slug) {
|
|
const currentFilePath = this.toFilePath(slug);
|
|
if (currentFilePath === undefined || currentFilePath == null) return null;
|
|
return Node.readFileSync(currentFilePath);
|
|
}
|
|
|
|
getShortSummary(slug) {
|
|
const content = this.getContent(slug);
|
|
if (content === undefined || content === null) {
|
|
return;
|
|
}
|
|
|
|
const tree = unified().use(markdown).parse(content);
|
|
const plainText = toString(tree);
|
|
return plainText.split(" ").splice(0, 40).join(" ");
|
|
}
|
|
|
|
getAllMarkdownFiles() {
|
|
return Node.getFiles(Node.getMarkdownFolder());
|
|
}
|
|
|
|
getSinglePost(slug) {
|
|
// List of filenames that will provide existing links to wikilink
|
|
const currentFilePath = this.toFilePath(slug);
|
|
// console.log("currentFilePath: ", currentFilePath)
|
|
|
|
const fileContent = Node.readFileSync(currentFilePath);
|
|
|
|
// const currentFileFrontMatter = Transformer.getFrontMatterData(fileContent)
|
|
// console.log("===============\n\nFile is scanning: ", slug)
|
|
const [htmlContent] = Transformer.getHtmlContent(fileContent);
|
|
// console.log("==================================")
|
|
// console.log("hrmlcontents and backlinks")
|
|
return {
|
|
id: slug,
|
|
// ...currentFileFrontMatter,
|
|
data: htmlContent,
|
|
};
|
|
}
|
|
|
|
toFilePath(slug) {
|
|
return this._cachedSlugMap[slug];
|
|
}
|
|
|
|
getSlugHashMap() {
|
|
// This is to solve problem of converting between slug and filepath,
|
|
// where previously if I convert a slug to a file path sometime
|
|
// it does not always resolve to correct filepath, converting function is not bi-directional
|
|
// and not conflict-free, other solution was considered (hash file name into a hash, but this
|
|
// is not SEO-friendly and make url look ugly ==> I chose this
|
|
|
|
const slugMap = new Map();
|
|
this.getAllMarkdownFiles().forEach((aFile) => {
|
|
const aSlug = this.toSlug(aFile);
|
|
// if (slugMap.has(aSlug)) {
|
|
// slugMap[aSlug].push(aFile)
|
|
// } else {
|
|
// slugMap[aSlug] = [aFile]
|
|
// }
|
|
// Note: [Future improvement] Resolve conflict
|
|
slugMap[aSlug] = aFile;
|
|
});
|
|
|
|
const indexFile = "/🌎 Home.md";
|
|
slugMap.index = Node.getMarkdownFolder() + indexFile;
|
|
slugMap["/"] = Node.getMarkdownFolder() + indexFile;
|
|
|
|
return slugMap;
|
|
}
|
|
|
|
toSlug(filePath) {
|
|
const markdownFolder = Node.getMarkdownFolder();
|
|
const isFile = Node.isFile(filePath);
|
|
const isMarkdownFolder = filePath.includes(markdownFolder);
|
|
|
|
if (isFile && Boolean(isMarkdownFolder)) {
|
|
return filePath
|
|
.replace(markdownFolder, "")
|
|
.replaceAll("/", "_")
|
|
.replaceAll(" ", "+")
|
|
.replaceAll("&", "-")
|
|
.replace(".md", "");
|
|
} else {
|
|
// TODO handle this properly
|
|
return "/";
|
|
}
|
|
}
|
|
|
|
constructGraphData() {
|
|
const filepath = path.join(process.cwd(), "graph-data.json");
|
|
if (Node.isFile(filepath)) {
|
|
const data = fs.readFileSync(filepath);
|
|
return JSON.parse(String(data));
|
|
} else {
|
|
const filePaths = this.getAllMarkdownFiles();
|
|
const edges = [];
|
|
const nodes = [];
|
|
filePaths.forEach((aFilePath) => {
|
|
// const {currentFilePath} = getFileNames(filename)
|
|
const aNode = {
|
|
title: Transformer.parseFileNameFromPath(aFilePath),
|
|
slug: this.toSlug(aFilePath),
|
|
shortSummary: this.getShortSummary(this.toSlug(aFilePath)),
|
|
};
|
|
nodes.push(aNode);
|
|
|
|
// console.log("Constructing graph for node: " + aFilePath )
|
|
const internalLinks = Transformer.getInternalLinks(aFilePath);
|
|
internalLinks.forEach((aLink) => {
|
|
if (aLink.slug === null || aLink.slug.length === 0) return;
|
|
|
|
const anEdge = {
|
|
source: this.toSlug(aFilePath),
|
|
target: aLink.slug,
|
|
};
|
|
edges.push(anEdge);
|
|
// console.log("Source: " + anEdge.source)
|
|
// console.log("Target: " + anEdge.target)
|
|
});
|
|
// console.log("==============Constructing graph" )
|
|
});
|
|
const data = { nodes, edges };
|
|
fs.writeFileSync(filepath, JSON.stringify(data), "utf-8");
|
|
return data;
|
|
}
|
|
}
|
|
|
|
getLocalGraphData(currentNodeId) {
|
|
const { nodes, edges } = constructGraphData();
|
|
|
|
const newNodes = nodes.map((aNode) => ({
|
|
data: {
|
|
id: aNode.slug.toString(),
|
|
label: Transformer.parseFileNameFromPath(this.toFilePath(aNode.slug)),
|
|
},
|
|
}));
|
|
|
|
const newEdges = edges.map((anEdge) => ({
|
|
data: {
|
|
source: anEdge.source,
|
|
target: anEdge.target,
|
|
},
|
|
}));
|
|
|
|
const existingNodeIDs = newNodes.map((aNode) => aNode.data.id);
|
|
currentNodeId = currentNodeId === "index" ? "__index" : currentNodeId;
|
|
if (
|
|
currentNodeId != null &&
|
|
Boolean(existingNodeIDs.includes(currentNodeId))
|
|
) {
|
|
const outGoingNodeIds = newEdges
|
|
.filter((anEdge) => anEdge.data.source === currentNodeId)
|
|
.map((anEdge) => anEdge.data.target);
|
|
|
|
const incomingNodeIds = newEdges
|
|
.filter((anEdge) => anEdge.data.target === currentNodeId)
|
|
.map((anEdge) => anEdge.data.source);
|
|
|
|
outGoingNodeIds.push(currentNodeId);
|
|
|
|
const localNodeIds = incomingNodeIds.concat(
|
|
outGoingNodeIds.filter((item) => incomingNodeIds.indexOf(item) < 0),
|
|
);
|
|
if (localNodeIds.indexOf(currentNodeId) < 0) {
|
|
localNodeIds.push(currentNodeId);
|
|
}
|
|
|
|
const localNodes = newNodes.filter((aNode) =>
|
|
localNodeIds.includes(aNode.data.id),
|
|
);
|
|
let localEdges = newEdges
|
|
.filter((edge) => localNodeIds.includes(edge.data.source))
|
|
.filter((edge) => localNodeIds.includes(edge.data.target));
|
|
|
|
// Filter self-reference edges
|
|
localEdges = localEdges.filter(
|
|
(edge) => edge.data.source !== edge.data.target,
|
|
);
|
|
|
|
// TODO: Find out why target ==='/' in some case
|
|
localEdges = localEdges.filter((edge) => edge.data.target !== "/");
|
|
return {
|
|
nodes: localNodes,
|
|
edges: localEdges,
|
|
};
|
|
} else {
|
|
const filteredEdges = newEdges
|
|
.filter((edge) => existingNodeIDs.includes(edge.data.source))
|
|
.filter((edge) => existingNodeIDs.includes(edge.data.target));
|
|
|
|
return {
|
|
nodes: newNodes,
|
|
edges: filteredEdges,
|
|
};
|
|
}
|
|
}
|
|
|
|
getAllSlugs() {
|
|
// console.log("\n\nAll Posts are scanning")
|
|
// Get file names under /posts
|
|
const markdownFolder = Node.getMarkdownFolder();
|
|
const markdownFiles = Node.getFiles(markdownFolder);
|
|
const filePaths = markdownFiles.filter(
|
|
(file) =>
|
|
!(Boolean(file.endsWith("index")) || Boolean(file.endsWith("sidebar"))),
|
|
);
|
|
return filePaths.map((f) => this.toSlug(f));
|
|
}
|
|
|
|
/** Gets all directories - if cached already, gets cached */
|
|
getDirectoryData() {
|
|
if (this._directoryData) return this._directoryData;
|
|
const filteredDirectory = dirTree(Node.getMarkdownFolder(), {
|
|
extensions: /\.md/,
|
|
exclude: [/\.git/, /\.obsidian/],
|
|
});
|
|
const convertedDirectoryData = this.convertObject(filteredDirectory);
|
|
this._directoryData = convertedDirectoryData;
|
|
return this._directoryData;
|
|
}
|
|
|
|
convertObject(thisObject) {
|
|
const children = [];
|
|
const slugs = this.getAllSlugs();
|
|
|
|
function findFunc(_this, slug) {
|
|
const fileName = Transformer.parseFileNameFromPath(
|
|
_this.toFilePath(slug),
|
|
);
|
|
return (
|
|
Transformer.normalizeFileName(fileName) ===
|
|
Transformer.normalizeFileName(thisObject.name)
|
|
);
|
|
}
|
|
|
|
const foundSlugs = slugs.find((slug) => findFunc(this, slug));
|
|
const blacklist = [null, undefined];
|
|
|
|
let routerPath = !blacklist.includes(foundSlugs) ? foundSlugs : null;
|
|
|
|
routerPath = !blacklist.includes(routerPath)
|
|
? "/notes/" + routerPath
|
|
: null;
|
|
|
|
const newObject = {
|
|
name: thisObject.name,
|
|
children,
|
|
id: (this._counter++).toString(),
|
|
routePath: !blacklist.includes(routerPath) ? routerPath : null,
|
|
};
|
|
|
|
if (thisObject.children != null && thisObject.children.length > 0) {
|
|
thisObject.children.forEach((aChild) => {
|
|
const newChild = this.convertObject(aChild);
|
|
children.push(newChild);
|
|
});
|
|
return newObject;
|
|
} else {
|
|
return newObject;
|
|
}
|
|
}
|
|
|
|
flat = (array) => {
|
|
let result = [];
|
|
const outerThis = this;
|
|
// eslint-disable-next-line @typescript-eslint/space-before-function-paren
|
|
array.forEach(function (a) {
|
|
result.push(a);
|
|
if (Array.isArray(a.children)) {
|
|
result = result.concat(outerThis.flat(a.children));
|
|
}
|
|
});
|
|
return result;
|
|
};
|
|
|
|
getFlattenArray(thisObject) {
|
|
return this.flat(thisObject.children);
|
|
}
|
|
}
|
|
|
|
const util = new Util();
|
|
export default util;
|