fix a few utils bugs
- unified import no longer default import - make index file a variable but also be my actual entry file for my notes - create some variables to make reading easier - cleanup the file/directory replacement characters (what like "++++" which is ridiculous) - lots of formatting changes
This commit is contained in:
parent
44c5ca07b2
commit
c31428d8e4
384
lib/utils.js
384
lib/utils.js
@ -1,8 +1,8 @@
|
|||||||
import {Node} from "./node"
|
import { Node } from "./node"
|
||||||
import {Transformer} from "./transformer";
|
import { Transformer } from "./transformer";
|
||||||
import unified from "unified";
|
import { unified } from "unified";
|
||||||
import markdown from "remark-parse";
|
import markdown from "remark-parse";
|
||||||
import {toString} from 'mdast-util-to-string'
|
import { toString } from 'mdast-util-to-string'
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
|
||||||
@ -10,94 +10,98 @@ const dirTree = require("directory-tree");
|
|||||||
|
|
||||||
|
|
||||||
export function getContent(slug) {
|
export function getContent(slug) {
|
||||||
let currentFilePath = toFilePath(slug)
|
let currentFilePath = toFilePath(slug)
|
||||||
if (currentFilePath === undefined || currentFilePath == null) return null
|
if (currentFilePath === undefined || currentFilePath == null) return null
|
||||||
return Node.readFileSync(currentFilePath)
|
return Node.readFileSync(currentFilePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getShortSummary(slug) {
|
export function getShortSummary(slug) {
|
||||||
const content = getContent(slug)
|
const content = getContent(slug)
|
||||||
if (content === undefined || content === null) {
|
if (content === undefined || content === null) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const tree = unified().use(markdown)
|
const tree = unified().use(markdown)
|
||||||
.parse(content)
|
.parse(content)
|
||||||
let plainText = toString(tree)
|
let plainText = toString(tree)
|
||||||
return plainText.split(" ").splice(0, 40).join(" ")
|
return plainText.split(" ").splice(0, 40).join(" ")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export function getAllMarkdownFiles() {
|
export function getAllMarkdownFiles() {
|
||||||
return Node.getFiles(Node.getMarkdownFolder())
|
return Node.getFiles(Node.getMarkdownFolder())
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getSinglePost(slug) {
|
export function getSinglePost(slug) {
|
||||||
|
|
||||||
|
|
||||||
// List of filenames that will provide existing links to wikilink
|
// List of filenames that will provide existing links to wikilink
|
||||||
let currentFilePath = toFilePath(slug)
|
let currentFilePath = toFilePath(slug)
|
||||||
//console.log("currentFilePath: ", currentFilePath)
|
//console.log("currentFilePath: ", currentFilePath)
|
||||||
|
|
||||||
var fileContent = Node.readFileSync(currentFilePath)
|
var fileContent = Node.readFileSync(currentFilePath)
|
||||||
|
|
||||||
const currentFileFrontMatter = Transformer.getFrontMatterData(fileContent)
|
const currentFileFrontMatter = Transformer.getFrontMatterData(fileContent)
|
||||||
// console.log("===============\n\nFile is scanning: ", slug)
|
// console.log("===============\n\nFile is scanning: ", slug)
|
||||||
const [htmlContent] = Transformer.getHtmlContent(fileContent)
|
const [htmlContent] = Transformer.getHtmlContent(fileContent)
|
||||||
// console.log("==================================")
|
// console.log("==================================")
|
||||||
//console.log("hrmlcontents and backlinks")
|
//console.log("hrmlcontents and backlinks")
|
||||||
return {
|
return {
|
||||||
id: slug,
|
id: slug,
|
||||||
// ...currentFileFrontMatter,
|
// ...currentFileFrontMatter,
|
||||||
data: htmlContent,
|
data: htmlContent,
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const cachedSlugMap = getSlugHashMap()
|
const cachedSlugMap = getSlugHashMap()
|
||||||
|
|
||||||
export function toFilePath(slug) {
|
export function toFilePath(slug) {
|
||||||
return cachedSlugMap[slug]
|
return cachedSlugMap[slug]
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getSlugHashMap() {
|
export function getSlugHashMap() {
|
||||||
// This is to solve problem of converting between slug and filepath,
|
// This is to solve problem of converting between slug and filepath,
|
||||||
// where previously if I convert a slug to a file path sometime
|
// where previously if I convert a slug to a file path sometime
|
||||||
// it does not always resolve to correct filepath, converting function is not bi-directional
|
// it does not always resolve to correct filepath, converting function is not bi-directional
|
||||||
// and not conflict-free, other solution was considered (hash file name into a hash, but this
|
// and not conflict-free, other solution was considered (hash file name into a hash, but this
|
||||||
// is not SEO-friendly and make url look ugly ==> I chose this
|
// is not SEO-friendly and make url look ugly ==> I chose this
|
||||||
|
|
||||||
const slugMap = new Map()
|
const slugMap = new Map()
|
||||||
getAllMarkdownFiles().map(aFile => {
|
getAllMarkdownFiles().map(aFile => {
|
||||||
const aSlug = toSlug(aFile);
|
const aSlug = toSlug(aFile);
|
||||||
// if (slugMap.has(aSlug)) {
|
// if (slugMap.has(aSlug)) {
|
||||||
// slugMap[aSlug].push(aFile)
|
// slugMap[aSlug].push(aFile)
|
||||||
// } else {
|
// } else {
|
||||||
// slugMap[aSlug] = [aFile]
|
// slugMap[aSlug] = [aFile]
|
||||||
// }
|
// }
|
||||||
// Note: [Future improvement] Resolve conflict
|
// Note: [Future improvement] Resolve conflict
|
||||||
slugMap[aSlug] = aFile
|
slugMap[aSlug] = aFile
|
||||||
})
|
})
|
||||||
|
|
||||||
slugMap['index'] = Node.getMarkdownFolder() + "/index.md"
|
const indexFile = "/🌎 Home.md"
|
||||||
slugMap['/'] = Node.getMarkdownFolder() + "/index.md"
|
slugMap['index'] = Node.getMarkdownFolder() + indexFile
|
||||||
|
slugMap['/'] = Node.getMarkdownFolder() + indexFile
|
||||||
|
|
||||||
return slugMap
|
return slugMap
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export function toSlug(filePath) {
|
export function toSlug(filePath) {
|
||||||
|
const markdownFolder = Node.getMarkdownFolder()
|
||||||
|
const isFile = Node.isFile(filePath)
|
||||||
|
const isMarkdownFolder = filePath.includes(markdownFolder)
|
||||||
|
|
||||||
if (Node.isFile(filePath) && filePath.includes(Node.getMarkdownFolder())) {
|
if (isFile && isMarkdownFolder) {
|
||||||
return filePath.replace(Node.getMarkdownFolder(), '')
|
return filePath.replace(markdownFolder, '')
|
||||||
.replaceAll('/', '__')
|
.replaceAll('/', '_')
|
||||||
.replaceAll(' ', '++++')
|
.replaceAll(' ', '%')
|
||||||
.replaceAll('&', 'ambersand')
|
.replaceAll('&', '+')
|
||||||
.replace('.md', '')
|
.replace('.md', '')
|
||||||
} else {
|
} else {
|
||||||
//TODO handle this properly
|
//TODO handle this properly
|
||||||
return '/'
|
return '/'
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,164 +109,166 @@ export function toSlug(filePath) {
|
|||||||
|
|
||||||
export function constructGraphData() {
|
export function constructGraphData() {
|
||||||
|
|
||||||
const filepath = path.join(process.cwd(), "graph-data.json");
|
const filepath = path.join(process.cwd(), "graph-data.json");
|
||||||
if (Node.isFile(filepath)) {
|
if (Node.isFile(filepath)) {
|
||||||
const data = fs.readFileSync(filepath);
|
const data = fs.readFileSync(filepath);
|
||||||
return JSON.parse(String(data))
|
return JSON.parse(String(data))
|
||||||
} else {
|
} else {
|
||||||
const filePaths = getAllMarkdownFiles();
|
const filePaths = getAllMarkdownFiles();
|
||||||
const edges = []
|
const edges = []
|
||||||
const nodes = []
|
const nodes = []
|
||||||
filePaths
|
filePaths
|
||||||
.forEach(aFilePath => {
|
.forEach(aFilePath => {
|
||||||
// const {currentFilePath} = getFileNames(filename)
|
// const {currentFilePath} = getFileNames(filename)
|
||||||
const aNode = {
|
const aNode = {
|
||||||
title: Transformer.parseFileNameFromPath(aFilePath),
|
title: Transformer.parseFileNameFromPath(aFilePath),
|
||||||
slug: toSlug(aFilePath),
|
slug: toSlug(aFilePath),
|
||||||
shortSummary: getShortSummary(toSlug(aFilePath))
|
shortSummary: getShortSummary(toSlug(aFilePath))
|
||||||
}
|
}
|
||||||
nodes.push(aNode)
|
nodes.push(aNode)
|
||||||
|
|
||||||
// console.log("Constructing graph for node: " + aFilePath )
|
// console.log("Constructing graph for node: " + aFilePath )
|
||||||
const internalLinks = Transformer.getInternalLinks(aFilePath)
|
const internalLinks = Transformer.getInternalLinks(aFilePath)
|
||||||
internalLinks.forEach(aLink => {
|
internalLinks.forEach(aLink => {
|
||||||
|
|
||||||
if (aLink.slug === null || aLink.slug.length === 0) return
|
if (aLink.slug === null || aLink.slug.length === 0) return
|
||||||
|
|
||||||
const anEdge = {
|
const anEdge = {
|
||||||
source: toSlug(aFilePath),
|
source: toSlug(aFilePath),
|
||||||
target: aLink.slug,
|
target: aLink.slug,
|
||||||
}
|
}
|
||||||
edges.push(anEdge)
|
edges.push(anEdge)
|
||||||
// console.log("Source: " + anEdge.source)
|
// console.log("Source: " + anEdge.source)
|
||||||
// console.log("Target: " + anEdge.target)
|
// console.log("Target: " + anEdge.target)
|
||||||
})
|
})
|
||||||
// console.log("==============Constructing graph" )
|
// console.log("==============Constructing graph" )
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
const data = {nodes, edges};
|
const data = { nodes, edges };
|
||||||
fs.writeFileSync(filepath, JSON.stringify(data), "utf-8");
|
fs.writeFileSync(filepath, JSON.stringify(data), "utf-8");
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export function getLocalGraphData(currentNodeId) {
|
export function getLocalGraphData(currentNodeId) {
|
||||||
|
|
||||||
const {nodes, edges} = constructGraphData()
|
const { nodes, edges } = constructGraphData()
|
||||||
|
|
||||||
const newNodes = nodes.map(aNode => (
|
const newNodes = nodes.map(aNode => (
|
||||||
{
|
{
|
||||||
data: {
|
data: {
|
||||||
id: aNode.slug.toString(),
|
id: aNode.slug.toString(),
|
||||||
label: Transformer.parseFileNameFromPath(toFilePath(aNode.slug)),
|
label: Transformer.parseFileNameFromPath(toFilePath(aNode.slug)),
|
||||||
}
|
}
|
||||||
}
|
|
||||||
))
|
|
||||||
|
|
||||||
const newEdges = edges.map(anEdge => ({
|
|
||||||
data: {
|
|
||||||
source: anEdge.source,
|
|
||||||
target: anEdge.target,
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
|
|
||||||
|
|
||||||
const existingNodeIDs = newNodes.map(aNode => aNode.data.id)
|
|
||||||
currentNodeId = currentNodeId === 'index' ? '__index' : currentNodeId
|
|
||||||
if (currentNodeId != null && existingNodeIDs.includes(currentNodeId)) {
|
|
||||||
const outGoingNodeIds = newEdges
|
|
||||||
.filter(anEdge => anEdge.data.source === currentNodeId)
|
|
||||||
.map(anEdge => anEdge.data.target)
|
|
||||||
|
|
||||||
const incomingNodeIds = newEdges
|
|
||||||
.filter(anEdge => anEdge.data.target === currentNodeId)
|
|
||||||
.map(anEdge => anEdge.data.source)
|
|
||||||
|
|
||||||
outGoingNodeIds.push(currentNodeId)
|
|
||||||
|
|
||||||
const localNodeIds = incomingNodeIds.concat(outGoingNodeIds.filter(item => incomingNodeIds.indexOf(item) < 0))
|
|
||||||
if (localNodeIds.indexOf(currentNodeId) < 0) {
|
|
||||||
localNodeIds.push(currentNodeId)
|
|
||||||
}
|
|
||||||
|
|
||||||
const localNodes = newNodes.filter(aNode => localNodeIds.includes(aNode.data.id))
|
|
||||||
let localEdges = newEdges.filter(edge => localNodeIds.includes(edge.data.source)).filter(edge => localNodeIds.includes(edge.data.target));
|
|
||||||
|
|
||||||
// Filter self-reference edges
|
|
||||||
localEdges = localEdges.filter(edge => edge.data.source !== edge.data.target)
|
|
||||||
|
|
||||||
// TODO: Find out why target ==='/' in some case
|
|
||||||
localEdges = localEdges.filter(edge => edge.data.target !== '/')
|
|
||||||
return {
|
|
||||||
nodes: localNodes,
|
|
||||||
edges: localEdges
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const filteredEdges = newEdges
|
|
||||||
.filter(edge => existingNodeIDs.includes(edge.data.source))
|
|
||||||
.filter(edge => existingNodeIDs.includes(edge.data.target))
|
|
||||||
|
|
||||||
return {
|
|
||||||
nodes: newNodes,
|
|
||||||
edges: filteredEdges
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
))
|
||||||
|
|
||||||
|
const newEdges = edges.map(anEdge => ({
|
||||||
|
data: {
|
||||||
|
source: anEdge.source,
|
||||||
|
target: anEdge.target,
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
|
|
||||||
|
const existingNodeIDs = newNodes.map(aNode => aNode.data.id)
|
||||||
|
currentNodeId = currentNodeId === 'index' ? '__index' : currentNodeId
|
||||||
|
if (currentNodeId != null && existingNodeIDs.includes(currentNodeId)) {
|
||||||
|
const outGoingNodeIds = newEdges
|
||||||
|
.filter(anEdge => anEdge.data.source === currentNodeId)
|
||||||
|
.map(anEdge => anEdge.data.target)
|
||||||
|
|
||||||
|
const incomingNodeIds = newEdges
|
||||||
|
.filter(anEdge => anEdge.data.target === currentNodeId)
|
||||||
|
.map(anEdge => anEdge.data.source)
|
||||||
|
|
||||||
|
outGoingNodeIds.push(currentNodeId)
|
||||||
|
|
||||||
|
const localNodeIds = incomingNodeIds.concat(outGoingNodeIds.filter(item => incomingNodeIds.indexOf(item) < 0))
|
||||||
|
if (localNodeIds.indexOf(currentNodeId) < 0) {
|
||||||
|
localNodeIds.push(currentNodeId)
|
||||||
|
}
|
||||||
|
|
||||||
|
const localNodes = newNodes.filter(aNode => localNodeIds.includes(aNode.data.id))
|
||||||
|
let localEdges = newEdges.filter(edge => localNodeIds.includes(edge.data.source)).filter(edge => localNodeIds.includes(edge.data.target));
|
||||||
|
|
||||||
|
// Filter self-reference edges
|
||||||
|
localEdges = localEdges.filter(edge => edge.data.source !== edge.data.target)
|
||||||
|
|
||||||
|
// TODO: Find out why target ==='/' in some case
|
||||||
|
localEdges = localEdges.filter(edge => edge.data.target !== '/')
|
||||||
|
return {
|
||||||
|
nodes: localNodes,
|
||||||
|
edges: localEdges
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const filteredEdges = newEdges
|
||||||
|
.filter(edge => existingNodeIDs.includes(edge.data.source))
|
||||||
|
.filter(edge => existingNodeIDs.includes(edge.data.target))
|
||||||
|
|
||||||
|
return {
|
||||||
|
nodes: newNodes,
|
||||||
|
edges: filteredEdges
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getAllSlugs() {
|
export function getAllSlugs() {
|
||||||
//console.log("\n\nAll Posts are scanning")
|
//console.log("\n\nAll Posts are scanning")
|
||||||
// Get file names under /posts
|
// Get file names under /posts
|
||||||
const filePaths = Node.getFiles(Node.getMarkdownFolder()).filter(f => !(f.endsWith("index") || f.endsWith("sidebar")))
|
const markdownFolder = Node.getMarkdownFolder()
|
||||||
return filePaths.map(f => toSlug(f))
|
const markdownFiles = Node.getFiles(markdownFolder)
|
||||||
|
const filePaths = markdownFiles.filter(file => !(file.endsWith("index") || file.endsWith("sidebar")))
|
||||||
|
return filePaths.map(f => toSlug(f))
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getDirectoryData() {
|
export function getDirectoryData() {
|
||||||
const filteredDirectory = dirTree(Node.getMarkdownFolder(), {extensions: /\.md/});
|
const filteredDirectory = dirTree(Node.getMarkdownFolder(), { extensions: /\.md/, exclude: [/\.git/, /\.obsidian/] });
|
||||||
return convertObject(filteredDirectory)
|
return convertObject(filteredDirectory)
|
||||||
}
|
}
|
||||||
|
|
||||||
let _counter = 0;
|
let _counter = 0;
|
||||||
|
|
||||||
export function convertObject(thisObject) {
|
export function convertObject(thisObject) {
|
||||||
const children = []
|
const children = []
|
||||||
|
|
||||||
let routerPath = getAllSlugs().find(slug => {
|
let routerPath = getAllSlugs().find(slug => {
|
||||||
const fileName = Transformer.parseFileNameFromPath(toFilePath(slug))
|
const fileName = Transformer.parseFileNameFromPath(toFilePath(slug))
|
||||||
return Transformer.normalizeFileName(fileName) === Transformer.normalizeFileName(thisObject.name)
|
return Transformer.normalizeFileName(fileName) === Transformer.normalizeFileName(thisObject.name)
|
||||||
}) || null
|
}) || null
|
||||||
routerPath = routerPath ? '/note/' + routerPath : null
|
routerPath = routerPath ? '/note/' + routerPath : null
|
||||||
const newObject = {
|
const newObject = {
|
||||||
name: thisObject.name,
|
name: thisObject.name,
|
||||||
children: children,
|
children: children,
|
||||||
id: (_counter++).toString(),
|
id: (_counter++).toString(),
|
||||||
routePath: routerPath || null
|
routePath: routerPath || null
|
||||||
};
|
};
|
||||||
|
|
||||||
if (thisObject.children != null && thisObject.children.length > 0) {
|
if (thisObject.children != null && thisObject.children.length > 0) {
|
||||||
thisObject.children.forEach(aChild => {
|
thisObject.children.forEach(aChild => {
|
||||||
const newChild = convertObject(aChild)
|
const newChild = convertObject(aChild)
|
||||||
children.push(newChild)
|
children.push(newChild)
|
||||||
})
|
})
|
||||||
return newObject;
|
return newObject;
|
||||||
} else {
|
} else {
|
||||||
return newObject
|
return newObject
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function flat(array) {
|
function flat(array) {
|
||||||
var result = [];
|
var result = [];
|
||||||
array.forEach(function (a) {
|
array.forEach(function(a) {
|
||||||
result.push(a);
|
result.push(a);
|
||||||
if (Array.isArray(a.children)) {
|
if (Array.isArray(a.children)) {
|
||||||
result = result.concat(flat(a.children));
|
result = result.concat(flat(a.children));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getFlattenArray(thisObject) {
|
export function getFlattenArray(thisObject) {
|
||||||
return flat(thisObject.children)
|
return flat(thisObject.children)
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user