Skip to content
Snippets Groups Projects
Verified Commit 3f7f5aa1 authored by Isabella Skořepová's avatar Isabella Skořepová
Browse files

Finish markdown parser and add tag collector

parent ccf83953
No related branches found
No related tags found
No related merge requests found
import _ from 'lodash/fp'
const collector = ({ pagination, warnings = {} }) => files => {
const { onlyOneArticle = false, sameURL = true } = warnings
const tags = {}
const tagList = []
let warned = false
files.forEach(file => {
if(file.metadata.tags)
for(const tag of file.metadata.tags) {
const tagname = _.flow(
_.deburr,
_.toLower,
_.replace(' ','-'),
_.replace(/[^a-zA-Z-0-9]+/g, ''),
)(tag)
if(!tags[tagname]) {
const obj = { deburr: tagname, tag, list: [] }
tags[tagname] = obj
tagList.push(obj)
}
tags[tagname].list.push(file)
const warn = () => { if(!warned) { console.log(); warned = true } }
if(sameURL && tags[tagname].tag !== tag) {
warn()
console.error(`WARNING: Name of tag "${tag}" is not same as "${tags[tagname].tag}" but they correspond to same url of "${tagname}"`)
console.error(` In files ${file.metadata.filename} and ${tags[tagname].list[0].metadata.filename}`)
}
if(onlyOneArticle && tags[tagname].list.length < 2) {
warn()
console.error(`WARNING: Tag "${tag}" only has one article. Consider removing it or adding it more articles.`)
console.error(` In file ${file.metadata.filename}`)
}
}
})
const tagPages = tagList.reduce((list, tag) => {
const chunks = _.chunk(pagination, tag.list)
const paginator = chunks.map((chunk, index) => '/tag/'+tag.deburr+(index?'/'+(index+1):''))
return list.concat(chunks.map(
(chunk, index) => ({
type: 'tag',
metadata: {
url: paginator[index],
name: tag.tag,
paginator,
},
content: chunk
})
))
}, [])
return files.map(
article => ({ ...article, type: 'article' })
).concat(tagPages)
//console.log(tagList)
}
export default collector
...@@ -4,16 +4,41 @@ import fs from 'fs' ...@@ -4,16 +4,41 @@ import fs from 'fs'
import {readFiles} from './readFiles' import {readFiles} from './readFiles'
import parser from './parser' import parser from './parser'
import markdownToJSON from './markdownToJSON' import markdownToJSON from './markdownToJSON'
import collectTags from './collectTags'
const buildStep = (desc, func) => f => {
const start = new Date();
const msg = `- ${desc}`
process.stdout.write(msg)
const ret = func(f)
process.stdout.write(_.repeat(30-msg.length,' '))
process.stdout.write(`${(new Date()-start)}ms\n`)
return ret
}
const map = (desc, func) => buildStep(desc, f => f.map(func))
let start
readFiles({ readFiles({
filter: file => !/\.git$/.exec(file) filter: file => !/\.git$/.exec(file)
})(change => console.log()) })(change => console.log(change))
.then(files => files.map(f => parser(f))) .then(f => (start = new Date()) && f)
.then(files => files.filter(f => f)) .then(map('Parsing files', f => parser(f)))
.then(files => files.map(f => ({ metadata: f.metadata, content: markdownToJSON(f.content) }))) .then(map('Parsing markdown', f => ({ metadata: f.metadata, content: markdownToJSON(f.content) })))
.then(files => files.map(f => { .then(map('Rewriting URLs', f => ({
console.log(_.repeat(80,'=')) metadata: {
console.log(f.metadata) ...f.metadata,
console.log(f.content) url: '/clanek/' + f.metadata.filename.replace(/\.md$/,'')
})) },
content: f.content
})))
.then(buildStep('Collect tags', collectTags({
pagination: 6,
warnings: {
onlyOneArticle: false,
sameURL: false,
}
})))
//.then(f => console.log(f))
.then(() => console.log(` Total: ${(new Date())-start}ms`))
.catch(e => console.log(e)) .catch(e => console.log(e))
import _ from 'lodash/fp'
const join = (a, b) => {
const anmap = an => Object.assign({}, an, {from: an.from+a.text.length})
const anotations = { ...a.anotations }
for(const anotation in b.anotations) {
if(anotations[anotation]) { // merge
anotations[anotation] = [
...a.anotations[anotation],
...b.anotations[anotation].map(anmap)
]
} else {
anotations[anotation] = b.anotations[anotation].map(anmap)
}
}
return {
text: a.text+b.text,
anotations
}
}
const mergeAnotations = (a, b) => {
throw new Error('Not implemented');
}
const toAnotatedText = (par) => {
if(!par) {
return { text: '', anotations: {} }
}
if(_.isString(par)) {
return { text: par, anotations: {} }
} else if(_.isArray(par)) {
return par.map(toAnotatedText).reduce(join)
return { text: 'TODO', anotations: {} }
} else { // object
const {name, opts} = par
const ant = toAnotatedText(par.children)
const anotation = { from: 0, length: ant.text.length, opts }
const ret = { text: ant.text, anotations: { ...ant.anotations }}
ret.anotations[name] = [
...(ant.anotations[name] || []),
anotation,
]
return ret
}
console.log(par)
throw new Error('Panic')
}
const mkmerger = merger => arg => {
for(const i in merger) {
}
}
export default a => {
//console.log(_.repeat(80,'='))
//console.log(a)
const ret = mkmerger({
})(
toAnotatedText(a.filter(o => o))
)
//console.log(JSON.stringify(ret, undefined, ' '))
return ret
}
...@@ -2,10 +2,12 @@ import _ from 'lodash/fp' ...@@ -2,10 +2,12 @@ import _ from 'lodash/fp'
import {toStr} from './renderer' import {toStr} from './renderer'
import Parser from './parser' import Parser from './parser'
import flatten from './flatten'
import kramed from 'kramed' import kramed from 'kramed'
const mtj = new Parser() const mtj = new Parser()
export default _.flow( export default _.flow(
a => kramed.lexer(a), a => kramed.lexer(a),
a => mtj.parse(a) a => mtj.parse(a),
flatten
) )
...@@ -82,7 +82,7 @@ Renderer.prototype.listitem = simple('li') ...@@ -82,7 +82,7 @@ Renderer.prototype.listitem = simple('li')
Renderer.prototype.paragraph = simple('p') Renderer.prototype.paragraph = simple('p')
Renderer.prototype.tablerow = simple('tr') Renderer.prototype.tablerow = simple('tr')
Renderer.prototype.table = (header, body) => fmt('table', body, {header}) Renderer.prototype.table = (header, body) => fmt('table', [fmt('thead', header), fmt('tbody', body)])
Renderer.prototype.list = function(body, ordered) { Renderer.prototype.list = function(body, ordered) {
var type = ordered ? 'ol' : 'ul'; var type = ordered ? 'ol' : 'ul';
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment