(debugging) nested links not working properly
This commit is contained in:
parent
dc0ec48db4
commit
b1f55fd898
@ -1,34 +1,33 @@
|
||||
import * as React from 'react';
|
||||
import { TreeView, TreeItem } from '@mui/x-tree-view';
|
||||
import { useRouter } from 'next/router'
|
||||
import { styled } from '@mui/material/styles';
|
||||
import * as React from "react";
|
||||
import { TreeView, TreeItem } from "@mui/x-tree-view";
|
||||
import { useRouter } from "next/router";
|
||||
import { styled } from "@mui/material/styles";
|
||||
|
||||
const TCTreeItem = styled(TreeItem)(({ theme }) => ({
|
||||
'& .MuiTreeItem-content': {
|
||||
borderRadius: '10px',
|
||||
'&:hover': {
|
||||
backgroundColor: 'rgba(25, 118, 210, 0.59)'
|
||||
"& .MuiTreeItem-content": {
|
||||
borderRadius: "10px",
|
||||
"&:hover": {
|
||||
backgroundColor: "rgba(25, 118, 210, 0.59)",
|
||||
},
|
||||
'& .MuiTreeItem-label': {
|
||||
fontSize: '1rem',
|
||||
paddingLeft: '6px',
|
||||
fontFamily: '-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif,',
|
||||
"& .MuiTreeItem-label": {
|
||||
fontSize: "1rem",
|
||||
paddingLeft: "6px",
|
||||
fontFamily:
|
||||
'-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif,',
|
||||
lineHeight: 2.0,
|
||||
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
}));
|
||||
|
||||
export default function FolderTree(props) {
|
||||
const router = useRouter()
|
||||
const router = useRouter();
|
||||
|
||||
const memoTree = React.useMemo(() => renderTree(props.tree), [props.tree])
|
||||
const expandedNodes = [props.tree.id]
|
||||
const memoTree = React.useMemo(() => renderTree(props.tree), [props.tree]);
|
||||
const expandedNodes = [props.tree.id];
|
||||
|
||||
return (
|
||||
<>
|
||||
<a href="http://localhost:3000" className="">{"<-"} Back To Portfolio</a>
|
||||
<a href="http://localhost:3000">{"<-"} Back To Portfolio</a>
|
||||
|
||||
<TreeView
|
||||
aria-label="rich object"
|
||||
@ -36,14 +35,16 @@ export default function FolderTree(props) {
|
||||
defaultExpanded={expandedNodes}
|
||||
defaultExpandIcon={<ChevronRightIcon />}
|
||||
onNodeSelect={(event, nodIds) => {
|
||||
const currentNode = props.flattenNodes.find(aNode => {
|
||||
return aNode.id === nodIds
|
||||
})
|
||||
// TODO: already sorted, impliment binary search
|
||||
const currentNode = props.flattenNodes.find((aNode) => {
|
||||
return aNode.id === nodIds;
|
||||
});
|
||||
console.log({ currentNode });
|
||||
if (currentNode != null && currentNode.routePath != null) {
|
||||
router.push(currentNode.routePath)
|
||||
router.push(currentNode.routePath);
|
||||
}
|
||||
}}
|
||||
sx={{ overflowY: 'scroll' }}
|
||||
sx={{ overflowY: "scroll" }}
|
||||
>
|
||||
{memoTree}
|
||||
</TreeView>
|
||||
@ -53,26 +54,55 @@ export default function FolderTree(props) {
|
||||
|
||||
function ChevronRightIcon() {
|
||||
return (
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" strokeWidth={1.5} stroke="currentColor" className="w-6 h-6">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="m8.25 4.5 7.5 7.5-7.5 7.5" />
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth={1.5}
|
||||
stroke="currentColor"
|
||||
height={10}
|
||||
width={10}
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="m8.25 4.5 7.5 7.5-7.5 7.5"
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function ChevronDownIcon() {
|
||||
return (
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" strokeWidth={1.5} stroke="currentColor" className="w-6 h-6">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="m19.5 8.25-7.5 7.5-7.5-7.5" />
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth={1.5}
|
||||
stroke="currentColor"
|
||||
height={10}
|
||||
width={10}
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="m19.5 8.25-7.5 7.5-7.5-7.5"
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function renderTree(nodes, layer) {
|
||||
function renderTree(nodes, layer = 0) {
|
||||
return (
|
||||
<TCTreeItem key={nodes.id} nodeId={nodes.id} label={nodes.name} sx={{ marginLeft: 1 * layer }}>
|
||||
<TCTreeItem
|
||||
key={nodes.id}
|
||||
nodeId={nodes.id}
|
||||
label={nodes.name}
|
||||
sx={{ marginLeft: 1 * layer }}
|
||||
>
|
||||
{Array.isArray(nodes.children)
|
||||
? nodes.children.map((node) => renderTree(node, layer + 1))
|
||||
: null}
|
||||
</TCTreeItem>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
292
lib/utils.js
292
lib/utils.js
@ -1,21 +1,21 @@
|
||||
import { default as Node } from './node'
|
||||
import { Transformer } from './transformer'
|
||||
import { unified } from 'unified'
|
||||
import markdown from 'remark-parse'
|
||||
import { toString } from 'mdast-util-to-string'
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import { default as Node } from "./node";
|
||||
import { Transformer } from "./transformer";
|
||||
import { unified } from "unified";
|
||||
import markdown from "remark-parse";
|
||||
import { toString } from "mdast-util-to-string";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
const dirTree = require("directory-tree");
|
||||
|
||||
class Util {
|
||||
_counter
|
||||
_cachedSlugMap
|
||||
_directoryData
|
||||
_counter;
|
||||
_cachedSlugMap;
|
||||
_directoryData;
|
||||
|
||||
constructor() {
|
||||
this._counter = 0
|
||||
this._cachedSlugMap = this.getSlugHashMap()
|
||||
this._counter = 0;
|
||||
this._cachedSlugMap = this.getSlugHashMap();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -23,48 +23,47 @@ class Util {
|
||||
* */
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
||||
getContent(slug) {
|
||||
const currentFilePath = this.toFilePath(slug)
|
||||
if (currentFilePath === undefined || currentFilePath == null) return null
|
||||
return Node.readFileSync(currentFilePath)
|
||||
const currentFilePath = this.toFilePath(slug);
|
||||
if (currentFilePath === undefined || currentFilePath == null) return null;
|
||||
return Node.readFileSync(currentFilePath);
|
||||
}
|
||||
|
||||
getShortSummary(slug) {
|
||||
const content = this.getContent(slug)
|
||||
const content = this.getContent(slug);
|
||||
if (content === undefined || content === null) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
const tree = unified().use(markdown)
|
||||
.parse(content)
|
||||
const plainText = toString(tree)
|
||||
return plainText.split(' ').splice(0, 40).join(' ')
|
||||
const tree = unified().use(markdown).parse(content);
|
||||
const plainText = toString(tree);
|
||||
return plainText.split(" ").splice(0, 40).join(" ");
|
||||
}
|
||||
|
||||
getAllMarkdownFiles() {
|
||||
return Node.getFiles(Node.getMarkdownFolder())
|
||||
return Node.getFiles(Node.getMarkdownFolder());
|
||||
}
|
||||
|
||||
getSinglePost(slug) {
|
||||
// List of filenames that will provide existing links to wikilink
|
||||
const currentFilePath = this.toFilePath(slug)
|
||||
const currentFilePath = this.toFilePath(slug);
|
||||
// console.log("currentFilePath: ", currentFilePath)
|
||||
|
||||
const fileContent = Node.readFileSync(currentFilePath)
|
||||
const fileContent = Node.readFileSync(currentFilePath);
|
||||
|
||||
// const currentFileFrontMatter = Transformer.getFrontMatterData(fileContent)
|
||||
// console.log("===============\n\nFile is scanning: ", slug)
|
||||
const [htmlContent] = Transformer.getHtmlContent(fileContent)
|
||||
const [htmlContent] = Transformer.getHtmlContent(fileContent);
|
||||
// console.log("==================================")
|
||||
// console.log("hrmlcontents and backlinks")
|
||||
return {
|
||||
id: slug,
|
||||
// ...currentFileFrontMatter,
|
||||
data: htmlContent
|
||||
}
|
||||
data: htmlContent,
|
||||
};
|
||||
}
|
||||
|
||||
toFilePath(slug) {
|
||||
return this._cachedSlugMap[slug]
|
||||
return this._cachedSlugMap[slug];
|
||||
}
|
||||
|
||||
getSlugHashMap() {
|
||||
@ -74,211 +73,234 @@ class Util {
|
||||
// and not conflict-free, other solution was considered (hash file name into a hash, but this
|
||||
// is not SEO-friendly and make url look ugly ==> I chose this
|
||||
|
||||
const slugMap = new Map()
|
||||
this.getAllMarkdownFiles().forEach(aFile => {
|
||||
const aSlug = this.toSlug(aFile)
|
||||
const slugMap = new Map();
|
||||
this.getAllMarkdownFiles().forEach((aFile) => {
|
||||
const aSlug = this.toSlug(aFile);
|
||||
// if (slugMap.has(aSlug)) {
|
||||
// slugMap[aSlug].push(aFile)
|
||||
// } else {
|
||||
// slugMap[aSlug] = [aFile]
|
||||
// }
|
||||
// Note: [Future improvement] Resolve conflict
|
||||
slugMap[aSlug] = aFile
|
||||
})
|
||||
slugMap[aSlug] = aFile;
|
||||
});
|
||||
|
||||
const indexFile = '/🌎 Home.md'
|
||||
slugMap.index = Node.getMarkdownFolder() + indexFile
|
||||
slugMap['/'] = Node.getMarkdownFolder() + indexFile
|
||||
const indexFile = "/🌎 Home.md";
|
||||
slugMap.index = Node.getMarkdownFolder() + indexFile;
|
||||
slugMap["/"] = Node.getMarkdownFolder() + indexFile;
|
||||
|
||||
return slugMap
|
||||
return slugMap;
|
||||
}
|
||||
|
||||
toSlug(filePath) {
|
||||
const markdownFolder = Node.getMarkdownFolder()
|
||||
const isFile = Node.isFile(filePath)
|
||||
const isMarkdownFolder = filePath.includes(markdownFolder)
|
||||
const markdownFolder = Node.getMarkdownFolder();
|
||||
const isFile = Node.isFile(filePath);
|
||||
const isMarkdownFolder = filePath.includes(markdownFolder);
|
||||
|
||||
if (isFile && Boolean(isMarkdownFolder)) {
|
||||
return filePath.replace(markdownFolder, '')
|
||||
.replaceAll('/', '_')
|
||||
.replaceAll(' ', '+')
|
||||
.replaceAll('&', '-')
|
||||
.replace('.md', '')
|
||||
return filePath
|
||||
.replace(markdownFolder, "")
|
||||
.replaceAll("/", "_")
|
||||
.replaceAll(" ", "+")
|
||||
.replaceAll("&", "-")
|
||||
.replace(".md", "");
|
||||
} else {
|
||||
// TODO handle this properly
|
||||
return '/'
|
||||
return "/";
|
||||
}
|
||||
}
|
||||
|
||||
constructGraphData() {
|
||||
const filepath = path.join(process.cwd(), 'graph-data.json')
|
||||
const filepath = path.join(process.cwd(), "graph-data.json");
|
||||
if (Node.isFile(filepath)) {
|
||||
const data = fs.readFileSync(filepath)
|
||||
return JSON.parse(String(data))
|
||||
const data = fs.readFileSync(filepath);
|
||||
return JSON.parse(String(data));
|
||||
} else {
|
||||
const filePaths = this.getAllMarkdownFiles()
|
||||
const edges = []
|
||||
const nodes = []
|
||||
filePaths
|
||||
.forEach(aFilePath => {
|
||||
const filePaths = this.getAllMarkdownFiles();
|
||||
const edges = [];
|
||||
const nodes = [];
|
||||
filePaths.forEach((aFilePath) => {
|
||||
// const {currentFilePath} = getFileNames(filename)
|
||||
const aNode = {
|
||||
title: Transformer.parseFileNameFromPath(aFilePath),
|
||||
slug: this.toSlug(aFilePath),
|
||||
shortSummary: this.getShortSummary(this.toSlug(aFilePath))
|
||||
}
|
||||
nodes.push(aNode)
|
||||
shortSummary: this.getShortSummary(this.toSlug(aFilePath)),
|
||||
};
|
||||
nodes.push(aNode);
|
||||
|
||||
// console.log("Constructing graph for node: " + aFilePath )
|
||||
const internalLinks = Transformer.getInternalLinks(aFilePath)
|
||||
internalLinks.forEach(aLink => {
|
||||
if (aLink.slug === null || aLink.slug.length === 0) return
|
||||
const internalLinks = Transformer.getInternalLinks(aFilePath);
|
||||
internalLinks.forEach((aLink) => {
|
||||
if (aLink.slug === null || aLink.slug.length === 0) return;
|
||||
|
||||
const anEdge = {
|
||||
source: this.toSlug(aFilePath),
|
||||
target: aLink.slug
|
||||
}
|
||||
edges.push(anEdge)
|
||||
target: aLink.slug,
|
||||
};
|
||||
edges.push(anEdge);
|
||||
// console.log("Source: " + anEdge.source)
|
||||
// console.log("Target: " + anEdge.target)
|
||||
})
|
||||
});
|
||||
// console.log("==============Constructing graph" )
|
||||
}
|
||||
)
|
||||
const data = { nodes, edges }
|
||||
fs.writeFileSync(filepath, JSON.stringify(data), 'utf-8')
|
||||
return data
|
||||
});
|
||||
const data = { nodes, edges };
|
||||
fs.writeFileSync(filepath, JSON.stringify(data), "utf-8");
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
getLocalGraphData(currentNodeId) {
|
||||
const { nodes, edges } = constructGraphData()
|
||||
const { nodes, edges } = constructGraphData();
|
||||
|
||||
const newNodes = nodes.map(aNode => (
|
||||
{
|
||||
const newNodes = nodes.map((aNode) => ({
|
||||
data: {
|
||||
id: aNode.slug.toString(),
|
||||
label: Transformer.parseFileNameFromPath(this.toFilePath(aNode.slug))
|
||||
}
|
||||
}
|
||||
))
|
||||
label: Transformer.parseFileNameFromPath(this.toFilePath(aNode.slug)),
|
||||
},
|
||||
}));
|
||||
|
||||
const newEdges = edges.map(anEdge => ({
|
||||
const newEdges = edges.map((anEdge) => ({
|
||||
data: {
|
||||
source: anEdge.source,
|
||||
target: anEdge.target
|
||||
}
|
||||
}))
|
||||
target: anEdge.target,
|
||||
},
|
||||
}));
|
||||
|
||||
const existingNodeIDs = newNodes.map(aNode => aNode.data.id)
|
||||
currentNodeId = currentNodeId === 'index' ? '__index' : currentNodeId
|
||||
if (currentNodeId != null && Boolean(existingNodeIDs.includes(currentNodeId))) {
|
||||
const existingNodeIDs = newNodes.map((aNode) => aNode.data.id);
|
||||
currentNodeId = currentNodeId === "index" ? "__index" : currentNodeId;
|
||||
if (
|
||||
currentNodeId != null &&
|
||||
Boolean(existingNodeIDs.includes(currentNodeId))
|
||||
) {
|
||||
const outGoingNodeIds = newEdges
|
||||
.filter(anEdge => anEdge.data.source === currentNodeId)
|
||||
.map(anEdge => anEdge.data.target)
|
||||
.filter((anEdge) => anEdge.data.source === currentNodeId)
|
||||
.map((anEdge) => anEdge.data.target);
|
||||
|
||||
const incomingNodeIds = newEdges
|
||||
.filter(anEdge => anEdge.data.target === currentNodeId)
|
||||
.map(anEdge => anEdge.data.source)
|
||||
.filter((anEdge) => anEdge.data.target === currentNodeId)
|
||||
.map((anEdge) => anEdge.data.source);
|
||||
|
||||
outGoingNodeIds.push(currentNodeId)
|
||||
outGoingNodeIds.push(currentNodeId);
|
||||
|
||||
const localNodeIds = incomingNodeIds.concat(outGoingNodeIds.filter(item => incomingNodeIds.indexOf(item) < 0))
|
||||
const localNodeIds = incomingNodeIds.concat(
|
||||
outGoingNodeIds.filter((item) => incomingNodeIds.indexOf(item) < 0),
|
||||
);
|
||||
if (localNodeIds.indexOf(currentNodeId) < 0) {
|
||||
localNodeIds.push(currentNodeId)
|
||||
localNodeIds.push(currentNodeId);
|
||||
}
|
||||
|
||||
const localNodes = newNodes.filter(aNode => localNodeIds.includes(aNode.data.id))
|
||||
let localEdges = newEdges.filter(edge => localNodeIds.includes(edge.data.source)).filter(edge => localNodeIds.includes(edge.data.target))
|
||||
const localNodes = newNodes.filter((aNode) =>
|
||||
localNodeIds.includes(aNode.data.id),
|
||||
);
|
||||
let localEdges = newEdges
|
||||
.filter((edge) => localNodeIds.includes(edge.data.source))
|
||||
.filter((edge) => localNodeIds.includes(edge.data.target));
|
||||
|
||||
// Filter self-reference edges
|
||||
localEdges = localEdges.filter(edge => edge.data.source !== edge.data.target)
|
||||
localEdges = localEdges.filter(
|
||||
(edge) => edge.data.source !== edge.data.target,
|
||||
);
|
||||
|
||||
// TODO: Find out why target ==='/' in some case
|
||||
localEdges = localEdges.filter(edge => edge.data.target !== '/')
|
||||
localEdges = localEdges.filter((edge) => edge.data.target !== "/");
|
||||
return {
|
||||
nodes: localNodes,
|
||||
edges: localEdges
|
||||
}
|
||||
edges: localEdges,
|
||||
};
|
||||
} else {
|
||||
const filteredEdges = newEdges
|
||||
.filter(edge => existingNodeIDs.includes(edge.data.source))
|
||||
.filter(edge => existingNodeIDs.includes(edge.data.target))
|
||||
.filter((edge) => existingNodeIDs.includes(edge.data.source))
|
||||
.filter((edge) => existingNodeIDs.includes(edge.data.target));
|
||||
|
||||
return {
|
||||
nodes: newNodes,
|
||||
edges: filteredEdges
|
||||
}
|
||||
edges: filteredEdges,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
getAllSlugs() {
|
||||
// console.log("\n\nAll Posts are scanning")
|
||||
// Get file names under /posts
|
||||
const markdownFolder = Node.getMarkdownFolder()
|
||||
const markdownFiles = Node.getFiles(markdownFolder)
|
||||
const filePaths = markdownFiles.filter(file => !(Boolean(file.endsWith('index')) || Boolean(file.endsWith('sidebar'))))
|
||||
return filePaths.map(f => this.toSlug(f))
|
||||
const markdownFolder = Node.getMarkdownFolder();
|
||||
const markdownFiles = Node.getFiles(markdownFolder);
|
||||
const filePaths = markdownFiles.filter(
|
||||
(file) =>
|
||||
!(Boolean(file.endsWith("index")) || Boolean(file.endsWith("sidebar"))),
|
||||
);
|
||||
return filePaths.map((f) => this.toSlug(f));
|
||||
}
|
||||
|
||||
/** Gets all directories - if cached already, gets cached */
|
||||
getDirectoryData() {
|
||||
if (this._directoryData) return this._directoryData
|
||||
const filteredDirectory = dirTree(Node.getMarkdownFolder(), { extensions: /\.md/, exclude: [/\.git/, /\.obsidian/] })
|
||||
const convertedDirectoryData = this.convertObject(filteredDirectory)
|
||||
this._directoryData = convertedDirectoryData
|
||||
return this._directoryData
|
||||
if (this._directoryData) return this._directoryData;
|
||||
const filteredDirectory = dirTree(Node.getMarkdownFolder(), {
|
||||
extensions: /\.md/,
|
||||
exclude: [/\.git/, /\.obsidian/],
|
||||
});
|
||||
const convertedDirectoryData = this.convertObject(filteredDirectory);
|
||||
this._directoryData = convertedDirectoryData;
|
||||
return this._directoryData;
|
||||
}
|
||||
|
||||
convertObject(thisObject) {
|
||||
const children = []
|
||||
const slugs = this.getAllSlugs()
|
||||
const children = [];
|
||||
const slugs = this.getAllSlugs();
|
||||
|
||||
function findFunc(_this, slug) {
|
||||
const fileName = Transformer.parseFileNameFromPath(_this.toFilePath(slug))
|
||||
return Transformer.normalizeFileName(fileName) === Transformer.normalizeFileName(thisObject.name)
|
||||
const fileName = Transformer.parseFileNameFromPath(
|
||||
_this.toFilePath(slug),
|
||||
);
|
||||
return (
|
||||
Transformer.normalizeFileName(fileName) ===
|
||||
Transformer.normalizeFileName(thisObject.name)
|
||||
);
|
||||
}
|
||||
|
||||
const foundSlugs = slugs.find(slug => findFunc(this, slug))
|
||||
const foundSlugs = slugs.find((slug) => findFunc(this, slug));
|
||||
const blacklist = [null, undefined];
|
||||
|
||||
let routerPath = foundSlugs !== (null | undefined) ? foundSlugs : null
|
||||
let routerPath = !blacklist.includes(foundSlugs) ? foundSlugs : null;
|
||||
|
||||
routerPath = !blacklist.includes(routerPath)
|
||||
? "/notes/" + routerPath
|
||||
: null;
|
||||
|
||||
routerPath = routerPath !== (null | undefined) ? '/notes/' + routerPath : null
|
||||
const newObject = {
|
||||
name: thisObject.name,
|
||||
children,
|
||||
id: (this._counter++).toString(),
|
||||
routePath: routerPath !== (null | undefined) ? routerPath : null
|
||||
}
|
||||
routePath: !blacklist.includes(routerPath) ? routerPath : null,
|
||||
};
|
||||
|
||||
if (thisObject.children != null && thisObject.children.length > 0) {
|
||||
thisObject.children.forEach(aChild => {
|
||||
const newChild = this.convertObject(aChild)
|
||||
children.push(newChild)
|
||||
})
|
||||
return newObject
|
||||
thisObject.children.forEach((aChild) => {
|
||||
const newChild = this.convertObject(aChild);
|
||||
children.push(newChild);
|
||||
});
|
||||
return newObject;
|
||||
} else {
|
||||
return newObject
|
||||
return newObject;
|
||||
}
|
||||
}
|
||||
|
||||
flat = (array) => {
|
||||
let result = []
|
||||
const outerThis = this
|
||||
let result = [];
|
||||
const outerThis = this;
|
||||
// eslint-disable-next-line @typescript-eslint/space-before-function-paren
|
||||
array.forEach(function(a) {
|
||||
result.push(a)
|
||||
array.forEach(function (a) {
|
||||
result.push(a);
|
||||
if (Array.isArray(a.children)) {
|
||||
result = result.concat(outerThis.flat(a.children))
|
||||
}
|
||||
})
|
||||
return result
|
||||
result = result.concat(outerThis.flat(a.children));
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
getFlattenArray(thisObject) {
|
||||
return this.flat(thisObject.children)
|
||||
return this.flat(thisObject.children);
|
||||
}
|
||||
}
|
||||
|
||||
const util = new Util()
|
||||
export default util
|
||||
const util = new Util();
|
||||
export default util;
|
||||
|
@ -28,25 +28,25 @@ export function getStaticProps() {
|
||||
const tree = Util.convertObject(Util.getDirectoryData())
|
||||
const contentData = Util.getSinglePost('index')
|
||||
const flattenNodes = Util.getFlattenArray(tree)
|
||||
const listOfEdges = edges
|
||||
.filter((anEdge) => (
|
||||
anEdge as { target: string }).target === 'index'
|
||||
)
|
||||
const internalLinks = listOfEdges.map(
|
||||
anEdge => nodes
|
||||
.find(
|
||||
aNode => (
|
||||
aNode as { slug: string }).slug === (anEdge as { source: string }).source))
|
||||
.filter(
|
||||
element => element !== undefined)
|
||||
const backLinks = [...new Set(internalLinks)]
|
||||
// const listOfEdges = edges
|
||||
// .filter((anEdge) => (
|
||||
// anEdge as { target: string }).target === 'index'
|
||||
// )
|
||||
// const internalLinks = listOfEdges.map(
|
||||
// anEdge => nodes
|
||||
// .find(
|
||||
// aNode => (
|
||||
// aNode as { slug: string }).slug === (anEdge as { source: string }).source))
|
||||
// .filter(
|
||||
// element => element !== undefined)
|
||||
//const backLinks = [...new Set(internalLinks)]
|
||||
|
||||
return {
|
||||
props: {
|
||||
content: contentData.data,
|
||||
tree,
|
||||
flattenNodes,
|
||||
backLinks,
|
||||
// backLinks,
|
||||
body_class_name: 'm-0 p-0'
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user