Run formatter and make correction to improve English listings
parent
ecbf40aa4c
commit
21dd110fe6
74
index.js
74
index.js
|
@ -11,12 +11,7 @@ const optionDefinitions = [
|
|||
{ name: "output", defaultValue: "./parser/fpb.json" },
|
||||
];
|
||||
|
||||
const excludes = [
|
||||
"README.md",
|
||||
"CONTRIBUTING.md",
|
||||
"CODE_OF_CONDUCT.md",
|
||||
"SUMMARY.md",
|
||||
];
|
||||
const excludes = ["README.md", "CONTRIBUTING.md", "CODE_OF_CONDUCT.md", "SUMMARY.md"];
|
||||
|
||||
/**
|
||||
* Parses a list item generated from remark-parse into a readable format.
|
||||
|
@ -56,11 +51,7 @@ function parseListItem(listItem) {
|
|||
entry.author = i.value.slice(3, parenIndex).trim(); // go from " - " until the first "("
|
||||
}
|
||||
}
|
||||
if (
|
||||
i.type === "emphasis" &&
|
||||
i.children[0].value.slice(0, 1) === "(" &&
|
||||
i.children[0].value.slice(-1) === ")"
|
||||
) {
|
||||
if (i.type === "emphasis" && i.children[0].value.slice(0, 1) === "(" && i.children[0].value.slice(-1) === ")") {
|
||||
// access notes found (currently assumes exactly one child, so far this is always the case)
|
||||
entry.accessNotes = i.children[0].value.slice(1, -1);
|
||||
}
|
||||
|
@ -105,9 +96,7 @@ function parseListItem(listItem) {
|
|||
s += i.value;
|
||||
} else {
|
||||
// finally, we have reached the end of the note
|
||||
entry.notes.push(
|
||||
stripParens(s + i.value.slice(0, rightParen + 1))
|
||||
);
|
||||
entry.notes.push(stripParens(s + i.value.slice(0, rightParen + 1)));
|
||||
s = "";
|
||||
// this is a copypaste of another block of code. probably not a good thing tbh.
|
||||
leftParen = i.value.indexOf("(");
|
||||
|
@ -118,9 +107,7 @@ function parseListItem(listItem) {
|
|||
s += i.value.slice(leftParen);
|
||||
break;
|
||||
}
|
||||
entry.notes.push(
|
||||
i.value.slice(leftParen + 1, rightParen)
|
||||
);
|
||||
entry.notes.push(i.value.slice(leftParen + 1, rightParen));
|
||||
leftParen = i.value.indexOf("(", rightParen);
|
||||
}
|
||||
}
|
||||
|
@ -140,13 +127,18 @@ function getLangFromFilename(filename) {
|
|||
const dash = filename.lastIndexOf("-");
|
||||
const dot = filename.lastIndexOf(".");
|
||||
let lang = filename.slice(dash + 1, dot).replace(/_/, "-");
|
||||
let isSubject = false;
|
||||
if (!languages.hasOwnProperty(lang)) {
|
||||
if (/^[a-z]{2}$/.test(lang) || /^[a-z]{2}-[A-Z]{2}$/.test(lang)) {
|
||||
return "";
|
||||
}
|
||||
lang = "en-US";
|
||||
// console.log(lang);
|
||||
if (lang === "subjects") {
|
||||
isSubject = true;
|
||||
}
|
||||
return lang;
|
||||
lang = "en";
|
||||
}
|
||||
return { lang: lang, isSubject: isSubject };
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -157,10 +149,7 @@ function getLangFromFilename(filename) {
|
|||
function getFilesFromDir(dir) {
|
||||
return fs
|
||||
.readdirSync(dir)
|
||||
.filter(
|
||||
(file) =>
|
||||
path.extname(file) === ".md" && excludes.indexOf(file) === -1
|
||||
)
|
||||
.filter((file) => path.extname(file) === ".md" && excludes.indexOf(file) === -1)
|
||||
.map((file) => path.join(dir, file));
|
||||
}
|
||||
|
||||
|
@ -198,8 +187,7 @@ function parseMarkdown(doc) {
|
|||
tree.slice(i).forEach((item) => {
|
||||
// Start iterating after Index
|
||||
try {
|
||||
if (item.type == "heading" && item.children[0].value == "Index")
|
||||
return;
|
||||
if (item.type == "heading" && item.children[0].value == "Index") return;
|
||||
|
||||
if (item.type == "heading") {
|
||||
if (item.depth == 3) {
|
||||
|
@ -218,9 +206,7 @@ function parseMarkdown(doc) {
|
|||
section: item.children[0].value, // Get the name of the subsection
|
||||
entries: [],
|
||||
};
|
||||
sections[sections.length - 1].subsections.push(
|
||||
newSubsection
|
||||
); // Add to subsection array of most recent h3
|
||||
sections[sections.length - 1].subsections.push(newSubsection); // Add to subsection array of most recent h3
|
||||
}
|
||||
} else if (item.type == "list") {
|
||||
item.children.forEach((listItem) => {
|
||||
|
@ -233,12 +219,9 @@ function parseMarkdown(doc) {
|
|||
sections[sections.length - 1].entries.push(contentJson); // add the entry to most recent h3
|
||||
} else if (currentDepth == 4) {
|
||||
let lastSection = sections.length - 1;
|
||||
let lastSubSec =
|
||||
sections[lastSection].subsections.length - 1;
|
||||
let lastSubSec = sections[lastSection].subsections.length - 1;
|
||||
let contentJson = parseListItem(content);
|
||||
sections[lastSection].subsections[
|
||||
lastSubSec
|
||||
].entries.push(contentJson); // add entry to most recent h4
|
||||
sections[lastSection].subsections[lastSubSec].entries.push(contentJson); // add entry to most recent h4
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -271,17 +254,18 @@ function parseDirectory(directory) {
|
|||
filenames.forEach((filename) => {
|
||||
const doc = fs.readFileSync(filename);
|
||||
let { sections, errors } = parseMarkdown(doc); // parse the markdown document
|
||||
const langCode = getLangFromFilename(filename);
|
||||
const { lang, isSubject } = getLangFromFilename(filename);
|
||||
|
||||
// Entries
|
||||
let docJson = {
|
||||
language: {
|
||||
code: langCode,
|
||||
name: languages[langCode],
|
||||
code: lang,
|
||||
name: languages[lang],
|
||||
},
|
||||
index: {},
|
||||
sections: sections,
|
||||
};
|
||||
if (lang === "en") docJson.language.isSubject = isSubject;
|
||||
dirChildren.push(docJson);
|
||||
|
||||
// Errors
|
||||
|
@ -343,15 +327,15 @@ function parseAll(directories, output) {
|
|||
console.log(err);
|
||||
}
|
||||
});
|
||||
fs.writeFileSync(
|
||||
"./parser/fpb.log",
|
||||
JSON.stringify(allErrors, null, 3),
|
||||
function (err) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
}
|
||||
}
|
||||
);
|
||||
// fs.writeFileSync(
|
||||
// "./parser/fpb.log",
|
||||
// JSON.stringify(allErrors, null, 3),
|
||||
// function (err) {
|
||||
// if (err) {
|
||||
// console.log(err);
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
}
|
||||
|
||||
let { input, output } = commandLineArgs(optionDefinitions);
|
||||
|
|
Loading…
Reference in New Issue