mirror of
https://github.com/noodlapp/noodl-docs.git
synced 2026-01-11 06:42:55 +01:00
Initial commit
Co-Authored-By: kotte <14197736+mrtamagotchi@users.noreply.github.com> Co-Authored-By: mikaeltellhed <2311083+mikaeltellhed@users.noreply.github.com> Co-Authored-By: Tore Knudsen <18231882+torekndsn@users.noreply.github.com> Co-Authored-By: Michael Cartner <32543275+michaelcartner@users.noreply.github.com>
This commit is contained in:
58
plugins/copy-node-markdowns.js
Normal file
58
plugins/copy-node-markdowns.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
function resolveImports(content,dir) {
|
||||
const includeMatch = content.matchAll(/@include\s\"(.*)\"/g)
|
||||
for(const _s of includeMatch) {
|
||||
const includePath = _s[1];
|
||||
const absPath = path.join(dir,includePath)
|
||||
|
||||
const include = fs.readFileSync(absPath)
|
||||
content = content.replace(_s[0],include)
|
||||
}
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
function copyNodeMarkdowns(dir) {
|
||||
fs.readdirSync(dir).forEach(file => {
|
||||
if( fs.lstatSync(dir + '/' + file).isDirectory() ) {
|
||||
copyNodeMarkdowns(dir + '/' + file)
|
||||
}
|
||||
else if(file.endsWith('.md')) {
|
||||
const content = fs.readFileSync(dir + '/' + file)
|
||||
const resolved = resolveImports(content.toString(),dir)
|
||||
const filePath = 'build/' + dir + '/' + file;
|
||||
if (!fs.existsSync('build/' + dir)){
|
||||
fs.mkdirSync('build/' + dir);
|
||||
}
|
||||
fs.writeFileSync(filePath, resolved)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = function(context, options) {
|
||||
return {
|
||||
name: 'docusaurus-copy-node-markdowns-plugin',
|
||||
configureWebpack(config, isServer, utils) {
|
||||
const {getJSLoader} = utils;
|
||||
return isServer?{
|
||||
plugins: [
|
||||
{
|
||||
apply: (compiler) => {
|
||||
compiler.hooks.afterEmit.tap('AfterEmitPlugin', (compilation) => {
|
||||
console.log('Copying node markdown files')
|
||||
|
||||
options.paths.forEach(path => {
|
||||
console.log(' - Processing path: ' + path)
|
||||
copyNodeMarkdowns(path)
|
||||
})
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
]
|
||||
}:{}
|
||||
},
|
||||
};
|
||||
};
|
||||
24
plugins/gtm.js
Normal file
24
plugins/gtm.js
Normal file
@@ -0,0 +1,24 @@
|
||||
module.exports = function (context, options) {
|
||||
return {
|
||||
name: 'docusaurus-plugin',
|
||||
injectHtmlTags() {
|
||||
return {
|
||||
headTags: [
|
||||
`<!-- Google Tag Manager -->
|
||||
<script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
|
||||
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
|
||||
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
|
||||
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
|
||||
})(window,document,'script','dataLayer','${options.id}');</script>
|
||||
<!-- End Google Tag Manager -->`,
|
||||
],
|
||||
postBodyTags: [
|
||||
`<!-- Google Tag Manager (noscript) -->
|
||||
<noscript><iframe src="https://www.googletagmanager.com/ns.html?id=${options.id}"
|
||||
height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript>
|
||||
<!-- End Google Tag Manager (noscript) -->`,
|
||||
],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
104
plugins/import-markdown.js
Normal file
104
plugins/import-markdown.js
Normal file
@@ -0,0 +1,104 @@
|
||||
/**
|
||||
* Inspired by https://github.com/hashicorp/web-platform-packages/tree/9723b25a054674f9c2bebc12928377c35957508f/packages/remark-plugins/plugins/include-markdown
|
||||
*
|
||||
* Problem with that page is that it is not ES6,
|
||||
* which Docusaurus doesn't support yet.
|
||||
*/
|
||||
|
||||
const path = require('path')
|
||||
const remark = require('remark')
|
||||
const remarkMdx = require('remark-mdx')
|
||||
const { _parseNoodlMarkupPlugin } = require('./markdown-syntax')
|
||||
const readFileSync = require('fs').readFileSync
|
||||
|
||||
function flatMap(ast, fn) {
|
||||
return transform(ast, 0, null)[0]
|
||||
|
||||
function transform(node, index, parent) {
|
||||
if (node.children) {
|
||||
var out = []
|
||||
for (var i = 0, n = node.children.length; i < n; i++) {
|
||||
var xs = transform(node.children[i], i, node)
|
||||
if (xs) {
|
||||
for (var j = 0, m = xs.length; j < m; j++) {
|
||||
out.push(xs[j])
|
||||
}
|
||||
}
|
||||
}
|
||||
node.children = out
|
||||
}
|
||||
|
||||
return fn(node, index, parent)
|
||||
}
|
||||
}
|
||||
|
||||
function includeMarkdownPlugin({
|
||||
resolveFrom,
|
||||
resolveMdx,
|
||||
} = {}) {
|
||||
return function transformer(tree, file) {
|
||||
return flatMap(tree, (node) => {
|
||||
if (node.type !== 'paragraph') return [node]
|
||||
// detect an `@include` statement
|
||||
const includeMatch =
|
||||
node.children[0].value &&
|
||||
node.children[0].value.match(/^@include\s['"](.*)['"]$/)
|
||||
if (!includeMatch) {
|
||||
//if (JSON.stringify(node).includes('.md'))
|
||||
// console.log('NO MATCH', node)
|
||||
return [node]
|
||||
}
|
||||
//console.log('MATCH', node)
|
||||
|
||||
// read the file contents
|
||||
const includePath = path.join(
|
||||
resolveFrom || file.dirname,
|
||||
includeMatch[1]
|
||||
)
|
||||
let includeContents
|
||||
try {
|
||||
includeContents = readFileSync(includePath, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`The @include file path at ${includePath} was not found.\n\nInclude Location: ${file.path}:${node.position.start.line}:${node.position.start.column}`
|
||||
)
|
||||
}
|
||||
|
||||
// if we are including a ".md" or ".mdx" file, we add the contents as processed markdown
|
||||
// if any other file type, they are embedded into a code block
|
||||
if (includePath.match(/\.md(?:x)?$/)) {
|
||||
// return the file contents in place of the @include
|
||||
// (takes a couple steps because we're processing includes with remark)
|
||||
const processor = remark().use(_parseNoodlMarkupPlugin)
|
||||
// NOTE: Use our _parseNoodlMarkupPlugin plugin
|
||||
|
||||
// use remark-mdx to process the include contents
|
||||
processor.use(remarkMdx)
|
||||
|
||||
// use the includeMarkdown plugin to allow recursive includes
|
||||
processor.use(includeMarkdownPlugin, {
|
||||
resolveFrom,
|
||||
resolveMdx
|
||||
})
|
||||
// Process the file contents, then return them
|
||||
const ast = processor.parse(includeContents)
|
||||
const res = processor.runSync(ast, includeContents)
|
||||
return res.children
|
||||
} else {
|
||||
// trim trailing newline
|
||||
includeContents.value = includeContents.value.trim()
|
||||
|
||||
// return contents wrapped inside a "code" node
|
||||
return [{
|
||||
type: 'code',
|
||||
lang: includePath.match(/\.(\w+)$/)[1],
|
||||
value: includeContents,
|
||||
}, ]
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = includeMarkdownPlugin
|
||||
38
plugins/markdown-syntax.js
Normal file
38
plugins/markdown-syntax.js
Normal file
@@ -0,0 +1,38 @@
|
||||
function _visit(children, fn) {
|
||||
children.forEach((ch) => {
|
||||
fn(ch)
|
||||
if (ch.children) _visit(ch.children, fn)
|
||||
})
|
||||
}
|
||||
|
||||
function _parseNoodlMarkupPlugin() {
|
||||
return function (ast) {
|
||||
_visit(ast.children, (node) => {
|
||||
if (typeof node.value === 'string') {
|
||||
node.value = node.value.replace(
|
||||
/(<|<)\#\#.*?\#\#(>|>)/g,
|
||||
''
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function _parseRenderedMarkupPlugin() {
|
||||
return function (ast) {
|
||||
_visit(ast.children, (node) => {
|
||||
if (node.type === 'element' && node.tagName === 'p') {
|
||||
const hasContent = node.children.find((child) =>
|
||||
Boolean(child.value)
|
||||
)
|
||||
|
||||
if (!hasContent) node.properties.class = 'is-hidden'
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
_parseNoodlMarkupPlugin,
|
||||
_parseRenderedMarkupPlugin,
|
||||
}
|
||||
Reference in New Issue
Block a user