lint: low hanging bin/doc/*.js

This commit is contained in:
John McLear 2021-02-01 09:47:42 +00:00 committed by Richard Hansen
parent 5b701b97c3
commit 9987fab574
4 changed files with 67 additions and 64 deletions

View File

@ -1,4 +1,7 @@
#!/usr/bin/env node
'use strict';
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@ -20,7 +23,6 @@
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
const marked = require('marked');
const fs = require('fs');
const path = require('path');
@ -33,12 +35,12 @@ let template = null;
let inputFile = null;
args.forEach((arg) => {
if (!arg.match(/^\-\-/)) {
if (!arg.match(/^--/)) {
inputFile = arg;
} else if (arg.match(/^\-\-format=/)) {
format = arg.replace(/^\-\-format=/, '');
} else if (arg.match(/^\-\-template=/)) {
template = arg.replace(/^\-\-template=/, '');
} else if (arg.match(/^--format=/)) {
format = arg.replace(/^--format=/, '');
} else if (arg.match(/^--template=/)) {
template = arg.replace(/^--template=/, '');
}
});
@ -56,11 +58,11 @@ fs.readFile(inputFile, 'utf8', (er, input) => {
});
const includeExpr = /^@include\s+([A-Za-z0-9-_\/]+)(?:\.)?([a-zA-Z]*)$/gmi;
const includeExpr = /^@include\s+([A-Za-z0-9-_/]+)(?:\.)?([a-zA-Z]*)$/gmi;
const includeData = {};
function processIncludes(inputFile, input, cb) {
const processIncludes = (inputFile, input, cb) => {
const includes = input.match(includeExpr);
if (includes === null) return cb(null, input);
if (includes == null) return cb(null, input);
let errState = null;
console.error(includes);
let incCount = includes.length;
@ -70,7 +72,7 @@ function processIncludes(inputFile, input, cb) {
let fname = include.replace(/^@include\s+/, '');
if (!fname.match(/\.md$/)) fname += '.md';
if (includeData.hasOwnProperty(fname)) {
if (Object.prototype.hasOwnProperty.call(includeData, fname)) {
input = input.split(include).join(includeData[fname]);
incCount--;
if (incCount === 0) {
@ -94,10 +96,10 @@ function processIncludes(inputFile, input, cb) {
});
});
});
}
};
function next(er, input) {
const next = (er, input) => {
if (er) throw er;
switch (format) {
case 'json':
@ -117,4 +119,4 @@ function next(er, input) {
default:
throw new Error(`Invalid format: ${format}`);
}
}
};

View File

@ -1,3 +1,5 @@
'use strict';
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@ -23,17 +25,17 @@ const fs = require('fs');
const marked = require('marked');
const path = require('path');
module.exports = toHTML;
function toHTML(input, filename, template, cb) {
const toHTML = (input, filename, template, cb) => {
const lexed = marked.lexer(input);
fs.readFile(template, 'utf8', (er, template) => {
if (er) return cb(er);
render(lexed, filename, template, cb);
});
}
};
module.exports = toHTML;
function render(lexed, filename, template, cb) {
const render = (lexed, filename, template, cb) => {
// get the section
const section = getSection(lexed);
@ -52,23 +54,23 @@ function render(lexed, filename, template, cb) {
// content has to be the last thing we do with
// the lexed tokens, because it's destructive.
content = marked.parser(lexed);
const content = marked.parser(lexed);
template = template.replace(/__CONTENT__/g, content);
cb(null, template);
});
}
};
// just update the list item text in-place.
// lists that come right after a heading are what we're after.
function parseLists(input) {
const parseLists = (input) => {
let state = null;
let depth = 0;
const output = [];
output.links = input.links;
input.forEach((tok) => {
if (state === null) {
if (state == null) {
if (tok.type === 'heading') {
state = 'AFTERHEADING';
}
@ -112,29 +114,27 @@ function parseLists(input) {
});
return output;
}
};
function parseListItem(text) {
text = text.replace(/\{([^\}]+)\}/, '<span class="type">$1</span>');
const parseListItem = (text) => {
text = text.replace(/\{([^}]+)\}/, '<span class="type">$1</span>');
// XXX maybe put more stuff here?
return text;
}
};
// section is just the first heading
function getSection(lexed) {
const section = '';
const getSection = (lexed) => {
for (let i = 0, l = lexed.length; i < l; i++) {
const tok = lexed[i];
if (tok.type === 'heading') return tok.text;
}
return '';
}
};
function buildToc(lexed, filename, cb) {
const indent = 0;
const buildToc = (lexed, filename, cb) => {
let toc = [];
let depth = 0;
lexed.forEach((tok) => {
@ -155,18 +155,18 @@ function buildToc(lexed, filename, cb) {
toc = marked.parse(toc.join('\n'));
cb(null, toc);
}
};
const idCounters = {};
function getId(text) {
const getId = (text) => {
text = text.toLowerCase();
text = text.replace(/[^a-z0-9]+/g, '_');
text = text.replace(/^_+|_+$/, '');
text = text.replace(/^([^a-z])/, '_$1');
if (idCounters.hasOwnProperty(text)) {
if (Object.prototype.hasOwnProperty.call(idCounters, text)) {
text += `_${++idCounters[text]}`;
} else {
idCounters[text] = 0;
}
return text;
}
};

View File

@ -1,3 +1,4 @@
'use strict';
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@ -26,7 +27,7 @@ module.exports = doJSON;
const marked = require('marked');
function doJSON(input, filename, cb) {
const doJSON = (input, filename, cb) => {
const root = {source: filename};
const stack = [root];
let depth = 0;
@ -40,7 +41,7 @@ function doJSON(input, filename, cb) {
// <!-- type = module -->
// This is for cases where the markdown semantic structure is lacking.
if (type === 'paragraph' || type === 'html') {
const metaExpr = /<!--([^=]+)=([^\-]+)-->\n*/g;
const metaExpr = /<!--([^=]+)=([^-]+)-->\n*/g;
text = text.replace(metaExpr, (_0, k, v) => {
current[k.trim()] = v.trim();
return '';
@ -146,7 +147,7 @@ function doJSON(input, filename, cb) {
}
return cb(null, root);
}
};
// go from something like this:
@ -191,7 +192,7 @@ function doJSON(input, filename, cb) {
// desc: 'whether or not to send output to parent\'s stdio.',
// default: 'false' } ] } ]
function processList(section) {
const processList = (section) => {
const list = section.list;
const values = [];
let current;
@ -203,13 +204,13 @@ function processList(section) {
if (type === 'space') return;
if (type === 'list_item_start') {
if (!current) {
var n = {};
const n = {};
values.push(n);
current = n;
} else {
current.options = current.options || [];
stack.push(current);
var n = {};
const n = {};
current.options.push(n);
current = n;
}
@ -283,11 +284,11 @@ function processList(section) {
// section.listParsed = values;
delete section.list;
}
};
// textRaw = "someobject.someMethod(a, [b=100], [c])"
function parseSignature(text, sig) {
const parseSignature = (text, sig) => {
let params = text.match(paramExpr);
if (!params) return;
params = params[1];
@ -322,10 +323,10 @@ function parseSignature(text, sig) {
if (optional) param.optional = true;
if (def !== undefined) param.default = def;
});
}
};
function parseListItem(item) {
const parseListItem = (item) => {
if (item.options) item.options.forEach(parseListItem);
if (!item.textRaw) return;
@ -341,7 +342,7 @@ function parseListItem(item) {
item.name = 'return';
text = text.replace(retExpr, '');
} else {
const nameExpr = /^['`"]?([^'`": \{]+)['`"]?\s*:?\s*/;
const nameExpr = /^['`"]?([^'`": {]+)['`"]?\s*:?\s*/;
const name = text.match(nameExpr);
if (name) {
item.name = name[1];
@ -358,7 +359,7 @@ function parseListItem(item) {
}
text = text.trim();
const typeExpr = /^\{([^\}]+)\}/;
const typeExpr = /^\{([^}]+)\}/;
const type = text.match(typeExpr);
if (type) {
item.type = type[1];
@ -376,10 +377,10 @@ function parseListItem(item) {
text = text.replace(/^\s*-\s*/, '');
text = text.trim();
if (text) item.desc = text;
}
};
function finishSection(section, parent) {
const finishSection = (section, parent) => {
if (!section || !parent) {
throw new Error(`Invalid finishSection call\n${
JSON.stringify(section)}\n${
@ -479,50 +480,50 @@ function finishSection(section, parent) {
parent[plur] = parent[plur] || [];
parent[plur].push(section);
}
};
// Not a general purpose deep copy.
// But sufficient for these basic things.
function deepCopy(src, dest) {
const deepCopy = (src, dest) => {
Object.keys(src).filter((k) => !dest.hasOwnProperty(k)).forEach((k) => {
dest[k] = deepCopy_(src[k]);
});
}
};
function deepCopy_(src) {
const deepCopy_ = (src) => {
if (!src) return src;
if (Array.isArray(src)) {
var c = new Array(src.length);
const c = new Array(src.length);
src.forEach((v, i) => {
c[i] = deepCopy_(v);
});
return c;
}
if (typeof src === 'object') {
var c = {};
const c = {};
Object.keys(src).forEach((k) => {
c[k] = deepCopy_(src[k]);
});
return c;
}
return src;
}
};
// these parse out the contents of an H# tag
const eventExpr = /^Event(?::|\s)+['"]?([^"']+).*$/i;
const classExpr = /^Class:\s*([^ ]+).*?$/i;
const propExpr = /^(?:property:?\s*)?[^\.]+\.([^ \.\(\)]+)\s*?$/i;
const braceExpr = /^(?:property:?\s*)?[^\.\[]+(\[[^\]]+\])\s*?$/i;
const propExpr = /^(?:property:?\s*)?[^.]+\.([^ .()]+)\s*?$/i;
const braceExpr = /^(?:property:?\s*)?[^.[]+(\[[^\]]+\])\s*?$/i;
const classMethExpr =
/^class\s*method\s*:?[^\.]+\.([^ \.\(\)]+)\([^\)]*\)\s*?$/i;
/^class\s*method\s*:?[^.]+\.([^ .()]+)\([^)]*\)\s*?$/i;
const methExpr =
/^(?:method:?\s*)?(?:[^\.]+\.)?([^ \.\(\)]+)\([^\)]*\)\s*?$/i;
const newExpr = /^new ([A-Z][a-z]+)\([^\)]*\)\s*?$/;
var paramExpr = /\((.*)\);?$/;
/^(?:method:?\s*)?(?:[^.]+\.)?([^ .()]+)\([^)]*\)\s*?$/i;
const newExpr = /^new ([A-Z][a-z]+)\([^)]*\)\s*?$/;
const paramExpr = /\((.*)\);?$/;
function newSection(tok) {
const newSection = (tok) => {
const section = {};
// infer the type from the text.
const text = section.textRaw = tok.text;
@ -551,4 +552,4 @@ function newSection(tok) {
section.name = text;
}
return section;
}
};

View File

@ -4,7 +4,7 @@
"description": "Internal tool for generating Node.js API docs",
"version": "0.0.0",
"engines": {
"node": ">=0.6.10"
"node": ">=10.17.0"
},
"dependencies": {
"marked": "0.8.2"