From e71bbc741e9857d65f1658fa791c0815beaa6cc9 Mon Sep 17 00:00:00 2001 From: Can <> Date: Mon, 30 Nov 2020 14:29:34 +0300 Subject: [PATCH] ... --- components/graph.js | 172 +++++++++ components/layout.js | 2 +- lib/node.js | 3 +- lib/post.js | 198 +++++++---- lib/remark.js | 58 --- lib/transformer.js | 143 ++++++++ package.json | 10 +- pages/_app.js | 3 +- pages/_document.js | 55 +-- pages/index.js | 37 +- pages/note/[id].js | 65 +++- posts/.obsidian/workspace | 8 +- posts/Code/CSS/Frost-Effect.md | 2 + posts/Code/Codesheet.md | 5 +- posts/Code/HTML/https-force-meta-tag.md | 1 + posts/Code/Svelte/External Script Loader.md | 2 +- posts/articles-eng/Collaborative Filtering.md | 1 - posts/articles-eng/Confluence Installation.md | 2 + posts/articles-tr/En İyi Blog Siteleri.md | 16 +- posts/articles-tr/Zettelkasten Metodu.md | 6 +- posts/sidebar.md | 6 +- settings.json | 3 + styles/webflow-layout.css | 79 ++++- yarn.lock | 329 +++++++++++++++++- 24 files changed, 1000 insertions(+), 206 deletions(-) create mode 100644 components/graph.js delete mode 100644 lib/remark.js create mode 100644 lib/transformer.js create mode 100644 posts/Code/HTML/https-force-meta-tag.md create mode 100644 settings.json diff --git a/components/graph.js b/components/graph.js new file mode 100644 index 0000000..249ec00 --- /dev/null +++ b/components/graph.js @@ -0,0 +1,172 @@ +import Cytoscape from "cytoscape"; +var nodeHtmlLabel = require('cytoscape-node-html-label'); + + +const Graph = ({ el, graphdata, current }) => { + nodeHtmlLabel( Cytoscape ); + + var cy = Cytoscape({ + container:el, + elements:graphdata, + style:[{ + selector: "node", + style:{ + "background-color" : el => el.data("id") === current ? '#5221c4' : "#666", + "font-size": "10px", + "width": "20px", + "height": "20px" + //"label": el => el.data("id") === current ? "" : el.data('title') ? el.data("title").slice(0,16) : el.data("id") + } + },{ + selector: "label", + style: {"font-size": "12px"}, + }, + { + selector: 'edge', + style: { + 'width': 2, + "height":200, + 'line-color': '#5221c4', + 'target-arrow-color': '#ccc', + 'target-arrow-shape': 'triangle', + 'curve-style': 'bezier' + } + }], + layout: { + name: 'circle', + + fit: true, // whether to fit the viewport to the graph + padding: 32, // the padding on fit + boundingBox: undefined, // constrain layout bounds; { x1, y1, x2, y2 } or { x1, y1, w, h } + avoidOverlap: true, // prevents node overlap, may overflow boundingBox and radius if not enough space + nodeDimensionsIncludeLabels: false, // Excludes the label when calculating node bounding boxes for the layout algorithm + spacingFactor: 0.9, // Applies a multiplicative factor (>0) to expand or compress the overall area that the nodes take up + radius: 180, // the radius of the circle + startAngle: -2 / 4 * Math.PI, // where nodes start in radians + //sweep: undefined, // how many radians should be between the first and last node (defaults to full circle) + clockwise: true, // whether the layout should go clockwise (true) or counterclockwise/anticlockwise (false) + sort: undefined, // a sorting function to order the nodes; e.g. function(a, b){ return a.data('weight') - b.data('weight') } + animate: false, // whether to transition the node positions + animationDuration: 500, // duration of animation in ms if enabled + animationEasing: undefined, // easing of animation if enabled + //animateFilter: function ( node, i ){ return true; }, // a function that determines whether the node should be animated. All nodes animated by default on animate enabled. Non-animated nodes are positioned immediately when the layout starts + ready: undefined, // callback on layoutready + stop: undefined, // callback on layoutstop + transform: function (node, position ){ return position; } // transform a given node position. Useful for changing flow direction in discrete layouts + + + + }, + zoom: 10, + hideEdgesOnViewport:false, + wheelSensitivity:0.2, + }) + + cy.nodeHtmlLabel( [{ + query: "node", + halign:"top", + valign:"center", + cssClass: 'label', + tpl: el => { + //el.data("id") === current ? "" : el.data('title') ? el.data("title").slice(0,16) : el.data("id") + const label = el.id === current ? "" : el.title ? el.title : el.id + return `
${label}
` + }}], + { + enablePointerEvents: true + } + ) + return cy +} + + +export const Network = ({ el, graphdata, current, router, allNodes }) => { + var jsnx = require('jsnetworkx'); + + + + var currentnode = graphdata.filter(g => g.data.id === current)[0] + currentnode = [currentnode.data.id, { href:current==="index" ? "/" : `/note/${currentnode.data.id}` }]; + + var othernodes, edges; + if (allNodes){ + othernodes = graphdata.filter(g => g.data.id !== current) + othernodes = othernodes.map(on => [on.data.id ,{ + title:on.data.title ? on.data.title : on.data.id, + href: current === "index" ? "/" : `/note/${on.data.id}` + } + ]) + edges = graphdata.filter(g => g.data.source) + edges = edges.map(e => [e.data.source, e.data.target]) + } + else { + var indexnode = graphdata.filter(g => g.data.id === "index")[0] + indexnode = ["Home", { + width:30, + height:30, + weight:10, + href:`/`, + title: "Home", + fill:"blueviolet", + + }] + + var currentRawEdges = graphdata.filter(g => g.data.source === current) + edges = currentRawEdges.map(ce => [ce.data.source, ce.data.target, {weight:5 } ]) + + var currentTargetNames = currentRawEdges.map(ie => ie.data.target) + var currentTargets = graphdata.filter(g => currentTargetNames.includes(g.data.id)) + othernodes = currentTargets.map(ct => [ct.data.id, {size:6, href:`/note/${ct.data.id}`}]) + othernodes = [indexnode, ...othernodes] + } + + + + var G = new jsnx.completeGraph(); + G.addNodesFrom( + [ + currentnode, + ...othernodes, + ], + {color: 'black', width:60, height:60} + ); + G.addEdgesFrom(edges); + + jsnx.draw(G, { + element: el, + withLabels: true, + labelStyle:{ + color:"#ffffff" + }, + labelAttr:{ + class: "node-label", + y:16, + click:function(l){ + this.addEventListener("click", function(){ + router.push(l.data.href) + }) + } + }, + layoutAttr:{ + linkDistance:260, + }, + nodeStyle: { + fill:"black" + }, + nodeAttr:{ + + click:function(l){ + //console.log("lll",this, "\n", l); + this.addEventListener("click", function(){ + router.push(l.data.href) + }) + } + }, + edgeStyle:{ + height:120 + } + }, true); + return G +} + +export default Graph; \ No newline at end of file diff --git a/components/layout.js b/components/layout.js index 1adf12f..400f1db 100644 --- a/components/layout.js +++ b/components/layout.js @@ -9,7 +9,7 @@ export default function Layout({ children, home }) { const [isOpen, setIsOpen] = useState(null) const toggle = () => setIsOpen(!isOpen) - const sidebarposition = isOpen ? "0px" : "-250px" + const sidebarposition = isOpen ? "0px" : "-350px" //console.log("effect: ", isOpen, sidebarposition) useEffect(()=>{ diff --git a/lib/node.js b/lib/node.js index dcf6b28..d951cdb 100644 --- a/lib/node.js +++ b/lib/node.js @@ -27,5 +27,6 @@ export const Node = { }, readFileSync:function(fullPath){ return fs.readFileSync(fullPath, "utf8") - } + }, + } \ No newline at end of file diff --git a/lib/post.js b/lib/post.js index d9cc200..c523a51 100644 --- a/lib/post.js +++ b/lib/post.js @@ -2,7 +2,7 @@ import path from 'path' import matter, { test } from 'gray-matter' import fs from "fs" import { Node } from "./node" -import { Remark } from "./remark"; +import { Transformer } from "./transformer"; import BiMap from "bimap"; @@ -10,110 +10,170 @@ import BiMap from "bimap"; const postsDirectory = path.join(process.cwd(), 'posts') -export function getSinglePost(filename, permalink) { +export function getSinglePost(filename) { + console.log("\n\nFile is scanning: ", filename) // Check if sidebar or not var filePaths = Node.getFiles(postsDirectory).filter(fn => fn.endsWith(".md")) - console.log("permalink", filename, filePaths) + + // IF filename is not sidebar.md THEN Exclude sidebar.md from file list filePaths = filename === "sidebar.md" ? filePaths : filePaths.filter(f => !f.endsWith("sidebar.md")) - const fileNames = filePaths.map(f => f.split("/")[f.split("/").length - 1].replace(".md", "")) - const filesFrontMatterData = filePaths.map(fp => Remark.getFrontMatterData(fp)) + // List of filenames that will provide existing links to wikilink + const fileNames = filePaths.map(f => Transformer.parseFileNameFromPath(f)) + + //console.log("\tDirectory is scanning to find corresponding filename") + const currentFilePath = Transformer.pairCurrentFile(filename, filePaths) + //console.log("\tScan is finished. Founded filepath", currentFilePath, "\n") + - const currentFile = filePaths.filter(f => { - var testFileName = f.split("/")[f.split("/").length - 1].replace(".md", "") - //testFileName = testFileName.replace("Ç","c").replace("ç","c").replace("ı","i").replace("ş","s") - const testFileNameAlternative = testFileName.toLowerCase().split(" ").join("-") - return (filename.replace(".md", "") === testFileName || filename.replace(".md", "") === testFileNameAlternative) - })[0] - console.log("currenFile: ", currentFile) - //const currentFileFrontMatter = filesFrontMatterData.filter(f => f.permalink === permalink)[0] - //console.log("Current File By Name: ", currentFile) - //const currentFileFrontMatter = Remark.getFrontMatterData(currentFile) - //console.log("Current File By FrontMatter: ", currentFileFrontMatter) + var fileContent = Node.readFileSync(currentFilePath) - const fileContent = fs.readFileSync(currentFile, 'utf8') + //console.log("\tSearching any front matter data") + const currentFileFrontMatter = Transformer.getFrontMatterData(fileContent) + //console.log("\tFounded front matter data: ", currentFileFrontMatter, "\n") - const [htmlContent, backlinks] = Remark.getHtmlContent(fileContent, { + //fileContent = fileContent.split("---").join("") + //console.log("filecontent end") + + const [htmlContent, backlinks] = Transformer.getHtmlContent(fileContent, { fileNames:fileNames, }) - + //console.log("hrmlcontents and backlinks") return { id:filename, - //...currentFileFrontMatter, + ...currentFileFrontMatter, data:htmlContent } } export function getAllBacklinks(){ + //console.log("\n\nBacklinks are scanning") //var bimap = new BiMap - var backlinkList = [] - //bimap.push("key", "value"); - //bimap.key("key"); // => "value" - //bimap.val("value"); // => "key" - //bimap.push("France", ["Paris", "Lyon", "Marseille"]); + var internalLinks = [] // Get file names under /posts const filePaths = Node.getFiles(postsDirectory).filter(fn => fn.endsWith(".md")).filter(f => !f.endsWith("sidebar.md")) - const fileNames = filePaths.map(f => f.split("/")[f.split("/").length - 1].replace(".md", "")) - //console.log("filePaths", fileNames) + const fileNames = filePaths.map(f => Transformer.parseFileNameFromPath(f)) + //console.log("\tFounded filePaths: ", fileNames) var allBacklinkData = filePaths.map(fileName => { - //console.log("filename", fileNames) // Remove ".md" from file name to get id - const slug = fileName.replace(/\.md$/, '').split("/")[fileName.split("/").length - 1] + const slug = Transformer.parseFileNameFromPath(fileName) + + //console.log("filename", fileNames) + const fileData = { + id:slug + } + //console.log("AllBacklinks slug", slug) // Read markdown file as string - const fileContent = fs.readFileSync(fileName, 'utf8') - - const [htmlContent, backlinks] = Remark.getHtmlContent(fileContent, { + var fileContent = Node.readFileSync(fileName, 'utf8') + + const frontmatterData = Transformer.getFrontMatterData(fileContent) + const requiredParameters = ["title", "description"] + requiredParameters.forEach(param => { + if (frontmatterData[param]) + fileData[param] = frontmatterData[param] + }) + + //fileContent = fileContent.split("---").join("") + const [htmlContent, backlinks] = Transformer.getHtmlContent(fileContent, { fileNames:fileNames, - }) - // Check if scanned slug post has any internal links - if (backlinks.length > 0){ - //console.log("backlinks",[ slug, [backlinks]] ) - //bimap.push(slug, backlinks) + }) + // Check if scanned slug post has any internal links + const existingInternalLink = backlinks.filter(bl => fileNames.includes(bl)) + fileData.to = existingInternalLink + fileData.href = slug === "index" ? "/" : `/note/${slug}` + //console.log("\n\nbacklinks",[ slug, [backlinks]] ) + //bimap.push(slug, backlinks) + + // Check if internal link exists + //const internalLinks = backlinks.filter(bl => fileNames.includes(bl)) + internalLinks.push(fileData) + //console.log("bimap: ", bimap.key(slug)) - // Check if internal link exists - const internalLinks = backlinks.filter(bl => fileNames.includes(bl)) - backlinkList.push([slug, internalLinks]) - //console.log("bimap: ", bimap.key(slug)) - } + // Combine the data with the slug + //return backlinkList.length > 0 ? JSON.stringify(backlinkList) : null + }) - // Combine the data with the slug - return backlinkList.length > 0 ? JSON.stringify(backlinkList) : null - }) - - return [allBacklinkData.filter(bl => bl !== null), JSON.stringify(fileNames)] + //console.log("founded internal links for ", internalLinks) + //console.log("\n\ninternal list: ", internalLinks) + return internalLinks + //return [allBacklinkData.filter(bl => bl !== null), JSON.stringify(fileNames)] +} + +export function getGraphData(){ + const backlinkData = getAllBacklinks() + + const elements = [] + + // First create Nodes + backlinkData.forEach(el => { + const node = {data: {id: el.id}}; + + if(el.title){ + node.data.title = el.title + } + if (el.description){ + node.data.description = el.description + } + elements.push(node) + } + ) + + + // Second create Edges + backlinkData.forEach(el => { + // check if has any internal link + if (el.to.length > 0){ + // create edge from element to its links + el.to.forEach(linkElement => { + const edge = { + data: { + id: `${el.id}-${linkElement}`, + source: el.id, + target: linkElement + } + } + elements.push(edge) + }) + } + }) + + return elements } export function getPostListData() { - // Get file names under /posts - const filePaths = Node.getFiles(postsDirectory).filter(fn => fn.endsWith(".md")) - const fileNames = filePaths.map(f => f.split("/")[f.split("/").length - 1].replace(".md", "")) - //console.log("filePaths", filePaths) + //console.log("\n\nAll Posts are scanning") + // Get file names under /posts + const filePaths = Node.getFiles(postsDirectory).filter(fn => fn.endsWith(".md")) + const fileNames = filePaths.map(f => Transformer.parseFileNameFromPath(f)) + //console.log("filePaths", filePaths) - var allPostsData = filePaths.map(fileName => { - //console.log("filename", fileNames) - // Remove ".md" from file name to get id - const slug = fileName.replace(/\.md$/, '').split("/")[fileName.split("/").length - 1] - //console.log("slug", slug) + var allPostsData = filePaths.map(filePath => { + //console.log("filePath", filePaths) + // Remove ".md" from file name to get id + const slug = Transformer.parseFileNameFromPath(filePath) + //console.log("slug", slug) - // Read markdown file as string - const fileContent = fs.readFileSync(fileName, 'utf8') - - // Use gray-matter to parse the post metadata section - const matterResult = Remark.getFrontMatterData(fileContent)// matter(fileContent).data - const permalink = matterResult.permalink - const content = fileContent.split("---\n")[fileContent.split("---").length -1 ] + // Read markdown file as string + var fileContent = Transformer.preprocessThreeDashes(Node.readFileSync(filePath)) + //console.log("all backlinks fn") + // Use gray-matter to parse the post metadata section + const matterResult = Transformer.getFrontMatterData(fileContent) || [] // matter(fileContent).data + //console.log("all post fn....") - // Combine the data with the slug - return { - id:slug.toLowerCase().split(" ").join("-"), - ...matterResult, - } - }) + //const permalink = matterResult.permalink + //const content = fileContent.split("---\n")[fileContent.split("---").length -1 ] + + // Combine the data with the slug + return { + id:slug.toLowerCase().split(" ").join("-"), + ...matterResult, + } + }) return allPostsData } \ No newline at end of file diff --git a/lib/remark.js b/lib/remark.js deleted file mode 100644 index 082b46e..0000000 --- a/lib/remark.js +++ /dev/null @@ -1,58 +0,0 @@ -import matter from 'gray-matter' -var remark = require('remark') -import path from 'path' -import fs from "fs" -import remark2react from 'remark-react' - -const unified = require('unified') -const markdown = require('remark-parse') -const { wikiLinkPlugin } = require('remark-wiki-link'); - -var guide = require('remark-preset-lint-markdown-style-guide') -var html = require('remark-html') -var report = require('vfile-reporter') -var frontmatter = require('remark-frontmatter') - - - -const postsDirectory = path.join(process.cwd(), 'posts') -const isFile = fileName => { - return fs.lstatSync(fileName).isFile() -} - -export const Remark = { - getFrontMatterData:function(filecontent){return matter(filecontent).data}, - - getHtmlContent:function(content, {fileNames}){ - let htmlContent = [] - let backlinks = [] - unified() - .use(markdown, { gfm: true }) - .use(frontmatter, ['yaml', 'toml']) - .use(wikiLinkPlugin, { - permalinks:fileNames, - pageResolver: function(pageName){ - const name = [pageName.replace(/ /g, "-").toLowerCase()] - backlinks.push(name[0]); - //console.log("backlinks", backlinks); - return name - }, - hrefTemplate: function(permalink){ - //console.log("wiki pemalink", permalink); - permalink = permalink.replace("ç","c").replace("ı","i").replace("ş","s") - return `/note/${permalink}` - } - }).use(html) - .process(content, - function (err, file) { - //console.log("asd", String(file).slice(0,50)) - //console.error("remark: ", report(err || file)) - htmlContent.push(String(file).replace("\n", "")) - } - ) - htmlContent = htmlContent.join("") - htmlContent = htmlContent.split("---") - //console.log("ffffff ", htmlContent) - return [htmlContent, backlinks] - } -} diff --git a/lib/transformer.js b/lib/transformer.js new file mode 100644 index 0000000..d4b77f0 --- /dev/null +++ b/lib/transformer.js @@ -0,0 +1,143 @@ +import matter from 'gray-matter' +import path from 'path' +import fs from "fs" +var remark = require('remark') +//import remark2react from 'remark-react' + +var remark2react = require("remark-react"); +const unified = require('unified') +const markdown = require('remark-parse') +const { wikiLinkPlugin } = require('remark-wiki-link'); + +var guide = require('remark-preset-lint-markdown-style-guide') +var html = require('remark-html') +var report = require('vfile-reporter') +var vfile = require('to-vfile') +var frontmatter = require('remark-frontmatter') +var stringify = require('remark-stringify') +var externalLinks = require('remark-external-links') +const highlight = require('remark-highlight.js') + + +const postsDirectory = path.join(process.cwd(), 'posts') +const isFile = fileName => { + return fs.lstatSync(fileName).isFile() +} + +export const Transformer = { + haveFrontMatter:function(content){ + //console.log("\t Front matter data content", content) + if (!content) return false + var indexOfFirst = content.indexOf("---") + //console.log("\t Front matter data firstIndex ", indexOfFirst) + //console.log("index first", indexOfFirst) + if (indexOfFirst === -1){ + return false + } + var indexOfSecond = content.indexOf("---", (indexOfFirst + 1)) + if (indexOfSecond !== -1) { + return true + } + return false + }, + getFrontMatterData:function(filecontent){ + if (Transformer.haveFrontMatter(filecontent)){ + return matter(filecontent).data + } + return {} + }, + + + getHtmlContent:function(content, {fileNames}){ + let htmlContent = [] + let internalLinks = [] + const sanitizedContent = Transformer.preprocessThreeDashes(content) + unified() + .use(markdown, { gfm: true }) + .use(highlight) + .use(externalLinks, {target: "_blank", rel: ['nofollow']}) + .use(frontmatter, ['yaml', 'toml']) + .use(wikiLinkPlugin, { + permalinks:fileNames, + pageResolver: function(pageName){ + const name = [Transformer.parseFileNameFromPath(pageName)] + //console.log("\n\nwiki internal links", Transformer.parseFileNameFromPath(name[0])); + internalLinks.push(Transformer.parseFileNameFromPath(name[0])); + return name + }, + hrefTemplate: function(permalink){ + permalink = Transformer.normalizeFileName(permalink) + permalink = permalink.replace("ç","c").replace("ı","i").replace("ş","s") + //console.log("wiki pemalink", permalink); + return `/note/${permalink}` + } + }).use(html) + .process(sanitizedContent, + function (err, file) { + //console.log("asd", String(file).slice(0,50)) + //console.error("remark: ", report(err || file)) + htmlContent.push(String(file).replace("\n", "")) + } + ) + htmlContent = htmlContent.join("") + htmlContent = htmlContent.split("---") + //console.log("ffffff ", htmlContent) + return [htmlContent, internalLinks] + }, + + /* SANITIZE MARKDOWN FOR --- */ + preprocessThreeDashes:function(content){ + var indexOfFirst = content.indexOf("---") + if (indexOfFirst === -1){ + return content + } + var indexOfSecond = content.indexOf("---", (indexOfFirst + 1)) + const frontPart = content.slice(0, indexOfSecond); + const contentPart = content.slice(indexOfSecond); + const processedContent = contentPart.split("---").join("") + //console.log("preprocess", indexOfFirst, indexOfSecond) + //return frontPart.concat(processedContent) + return processedContent + }, + + /* Normalize File Names */ + normalizeFileName:function(filename){ + var processedFileName = filename.replace(".md", "") + processedFileName = processedFileName.split(" ").join("-") + processedFileName = processedFileName.toLowerCase() + const conversionLetters = [["ç", "c"], ["ş","s"], ["ı", "i"], ["ü","u"], ["ö","o"], ["ğ","g"]]; + conversionLetters.forEach(letterPair => { + processedFileName = processedFileName.replace(letterPair[0], letterPair[1]) + } + ) + //console.log("filename", processedFileName) + return processedFileName + }, + /* Parse file name from path then sanitize it */ + parseFileNameFromPath:function(filepath){ + const parsedFileFromPath = filepath.split("/")[filepath.split("/").length - 1] + const parsedFileName = parsedFileFromPath.replace(".md", "") + return Transformer.normalizeFileName(parsedFileName) + }, + + /* Pair provided and existing Filenames*/ + pairCurrentFile: function(provided, ListOfFilePaths){ + //console.log(provided, ListOfFilePaths) + const providedSanitizedFileName = Transformer.normalizeFileName(provided); + + // Map file paths and return true if it pairs with provided + const possibleFilePath = ListOfFilePaths.filter(possibleFilePath => { + const possibleFileName = Transformer.parseFileNameFromPath(possibleFilePath); + const possibleSanitizedFileName = Transformer.normalizeFileName(possibleFileName) + //console.log("----", providedSanitizedFileName, possibleSanitizedFileName) + + //console.log("---", possibleSanitizedFileName, providedSanitizedFileName) + if (providedSanitizedFileName === possibleSanitizedFileName){ + return true + } + return false + }) + //console.log("p---", possibleFilePath) + return possibleFilePath[0] + } +} diff --git a/package.json b/package.json index 3fce826..70b3767 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,9 @@ "start": "next start" }, "dependencies": { + "cytoscape-d3-force": "^1.1.4", + "cytoscape-node-html-label": "^1.2.1", + "d3": "^6.2.0", "fs": "^0.0.1-security", "gray-matter": "^4.0.2", "jsnetworkx": "^0.3.4", @@ -16,16 +19,21 @@ "react": "16.13.1", "react-dom": "16.13.1", "remark": "^13.0.0", + "remark-external-links": "^8.0.0", + "remark-highlight.js": "^6.0.0", "remark-html": "^13.0.1", "remark-parse": "^9.0.0", "remark-preset-lint-markdown-style-guide": "^4.0.0", "remark-wiki-link": "^1.0.0", + "to-vfile": "^6.1.0", "unified": "^9.2.0", "vfile-reporter": "^6.0.1" }, "devDependencies": { "bimap": "^0.0.15", + "cytoscape": "^3.17.0", "remark-frontmatter": "^3.0.0", - "remark-react": "^8.0.0" + "remark-react": "^8.0.0", + "remark-stringify": "^9.0.0" } } diff --git a/pages/_app.js b/pages/_app.js index 17b1fe5..ef05811 100644 --- a/pages/_app.js +++ b/pages/_app.js @@ -3,5 +3,6 @@ import '../styles/webflow-layout.css' export default function App({ Component, pageProps }) { - return + + return } diff --git a/pages/_document.js b/pages/_document.js index b61eb25..ce31d56 100644 --- a/pages/_document.js +++ b/pages/_document.js @@ -1,42 +1,46 @@ import Document, { Html, Head, Main, NextScript } from 'next/document' +import { useRouter } from 'next/router' +import { useMemo } from "react"; import { getSinglePost } from "../lib/post"; -import Link from 'next/link' +import Link from 'next/link'; class MyDocument extends Document { + static async getInitialProps(ctx) { const initialProps = await Document.getInitialProps(ctx) + //console.log("doc", initialProps) const sidebar = getSinglePost("sidebar.md") - - //console.log("document: ", sidebar) return { sidebar, ...initialProps } } - + render(props) { + //console.log("document: ", this.props) return ( - - - + - + - {/* NAVBAR */} -