Code formatting (#280)
This commit is contained in:
@@ -1,193 +1,205 @@
|
||||
//Imports
|
||||
import * as utils from "./utils.mjs"
|
||||
import ejs from "ejs"
|
||||
import util from "util"
|
||||
import SVGO from "svgo"
|
||||
import xmlformat from "xml-formatter"
|
||||
import ejs from "ejs"
|
||||
import SVGO from "svgo"
|
||||
import util from "util"
|
||||
import xmlformat from "xml-formatter"
|
||||
import * as utils from "./utils.mjs"
|
||||
|
||||
//Setup
|
||||
export default async function metrics({login, q}, {graphql, rest, plugins, conf, die = false, verify = false, convert = null}, {Plugins, Templates}) {
|
||||
//Compute rendering
|
||||
try {
|
||||
export default async function metrics({login, q}, {graphql, rest, plugins, conf, die = false, verify = false, convert = null}, {Plugins, Templates}) {
|
||||
//Compute rendering
|
||||
try {
|
||||
//Debug
|
||||
login = login.replace(/[\n\r]/g, "")
|
||||
console.debug(`metrics/compute/${login} > start`)
|
||||
console.debug(util.inspect(q, {depth:Infinity, maxStringLength:256}))
|
||||
|
||||
//Debug
|
||||
login = login.replace(/[\n\r]/g, "")
|
||||
console.debug(`metrics/compute/${login} > start`)
|
||||
console.debug(util.inspect(q, {depth:Infinity, maxStringLength:256}))
|
||||
//Load template
|
||||
const template = q.template || conf.settings.templates.default
|
||||
if ((!(template in Templates)) || (!(template in conf.templates)) || ((conf.settings.templates.enabled.length) && (!conf.settings.templates.enabled.includes(template))))
|
||||
throw new Error("unsupported template")
|
||||
const {image, style, fonts, views, partials} = conf.templates[template]
|
||||
const computer = Templates[template].default || Templates[template]
|
||||
convert = convert ?? conf.metadata.templates[template].formats[0] ?? null
|
||||
console.debug(`metrics/compute/${login} > output format set to ${convert}`)
|
||||
|
||||
//Load template
|
||||
const template = q.template || conf.settings.templates.default
|
||||
if ((!(template in Templates))||(!(template in conf.templates))||((conf.settings.templates.enabled.length)&&(!conf.settings.templates.enabled.includes(template))))
|
||||
throw new Error("unsupported template")
|
||||
const {image, style, fonts, views, partials} = conf.templates[template]
|
||||
const computer = Templates[template].default || Templates[template]
|
||||
convert = convert ?? conf.metadata.templates[template].formats[0] ?? null
|
||||
console.debug(`metrics/compute/${login} > output format set to ${convert}`)
|
||||
|
||||
//Initialization
|
||||
const pending = []
|
||||
const {queries} = conf
|
||||
const data = {animated:true, base:{}, config:{}, errors:[], plugins:{}, computed:{}}
|
||||
const imports = {plugins:Plugins, templates:Templates, metadata:conf.metadata, ...utils, ...(/markdown/.test(convert) ? {imgb64(url, options) {
|
||||
//Initialization
|
||||
const pending = []
|
||||
const {queries} = conf
|
||||
const data = {animated:true, base:{}, config:{}, errors:[], plugins:{}, computed:{}}
|
||||
const imports = {
|
||||
plugins:Plugins,
|
||||
templates:Templates,
|
||||
metadata:conf.metadata,
|
||||
...utils,
|
||||
...(/markdown/.test(convert)
|
||||
? {
|
||||
imgb64(url, options) {
|
||||
return options?.force ? utils.imgb64(...arguments) : url
|
||||
}} : null)}
|
||||
const experimental = new Set(decodeURIComponent(q["experimental.features"] ?? "").split(" ").map(x => x.trim().toLocaleLowerCase()).filter(x => x))
|
||||
if (conf.settings["debug.headless"])
|
||||
imports.puppeteer.headless = false
|
||||
},
|
||||
}
|
||||
: null),
|
||||
}
|
||||
const experimental = new Set(decodeURIComponent(q["experimental.features"] ?? "").split(" ").map(x => x.trim().toLocaleLowerCase()).filter(x => x))
|
||||
if (conf.settings["debug.headless"])
|
||||
imports.puppeteer.headless = false
|
||||
|
||||
//Partial parts
|
||||
{
|
||||
data.partials = new Set([
|
||||
...decodeURIComponent(q["config.order"] ?? "").split(",").map(x => x.trim().toLocaleLowerCase()).filter(partial => partials.includes(partial)),
|
||||
...partials,
|
||||
])
|
||||
console.debug(`metrics/compute/${login} > content order : ${[...data.partials]}`)
|
||||
}
|
||||
//Partial parts
|
||||
{
|
||||
data.partials = new Set([
|
||||
...decodeURIComponent(q["config.order"] ?? "").split(",").map(x => x.trim().toLocaleLowerCase()).filter(partial => partials.includes(partial)),
|
||||
...partials,
|
||||
])
|
||||
console.debug(`metrics/compute/${login} > content order : ${[...data.partials]}`)
|
||||
}
|
||||
|
||||
//Executing base plugin and compute metrics
|
||||
console.debug(`metrics/compute/${login} > compute`)
|
||||
await Plugins.base({login, q, data, rest, graphql, plugins, queries, pending, imports}, conf)
|
||||
await computer({login, q}, {conf, data, rest, graphql, plugins, queries, account:data.account, convert, template}, {pending, imports})
|
||||
const promised = await Promise.all(pending)
|
||||
//Executing base plugin and compute metrics
|
||||
console.debug(`metrics/compute/${login} > compute`)
|
||||
await Plugins.base({login, q, data, rest, graphql, plugins, queries, pending, imports}, conf)
|
||||
await computer({login, q}, {conf, data, rest, graphql, plugins, queries, account:data.account, convert, template}, {pending, imports})
|
||||
const promised = await Promise.all(pending)
|
||||
|
||||
//Check plugins errors
|
||||
const errors = [...promised.filter(({result = null}) => result?.error), ...data.errors]
|
||||
if (errors.length) {
|
||||
console.warn(`metrics/compute/${login} > ${errors.length} errors !`)
|
||||
if (die)
|
||||
throw new Error("An error occured during rendering, dying")
|
||||
else
|
||||
console.warn(util.inspect(errors, {depth:Infinity, maxStringLength:256}))
|
||||
}
|
||||
//Check plugins errors
|
||||
const errors = [...promised.filter(({result = null}) => result?.error), ...data.errors]
|
||||
if (errors.length) {
|
||||
console.warn(`metrics/compute/${login} > ${errors.length} errors !`)
|
||||
if (die)
|
||||
throw new Error("An error occured during rendering, dying")
|
||||
else
|
||||
console.warn(util.inspect(errors, {depth:Infinity, maxStringLength:256}))
|
||||
}
|
||||
|
||||
//JSON output
|
||||
if (convert === "json") {
|
||||
console.debug(`metrics/compute/${login} > json output`)
|
||||
return {rendered:data, mime:"application/json"}
|
||||
}
|
||||
//JSON output
|
||||
if (convert === "json") {
|
||||
console.debug(`metrics/compute/${login} > json output`)
|
||||
return {rendered:data, mime:"application/json"}
|
||||
}
|
||||
|
||||
//Markdown output
|
||||
if (/markdown/.test(convert)) {
|
||||
//Retrieving template source
|
||||
console.debug(`metrics/compute/${login} > markdown render`)
|
||||
let source = image
|
||||
try {
|
||||
let template = `${q.markdown}`.replace(/\n/g, "")
|
||||
if (!/^https:/.test(template)) {
|
||||
const {data:{default_branch:branch, full_name:repo}} = await rest.repos.get({owner:login, repo:q.repo||login})
|
||||
console.debug(`metrics/compute/${login} > on ${repo} with default branch ${branch}`)
|
||||
template = `https://raw.githubusercontent.com/${repo}/${branch}/${template}`
|
||||
}
|
||||
console.debug(`metrics/compute/${login} > fetching ${template}`)
|
||||
;({data:source} = await imports.axios.get(template, {headers:{Accept:"text/plain"}}))
|
||||
}
|
||||
catch (error) {
|
||||
console.debug(error)
|
||||
}
|
||||
//Embed method
|
||||
const embed = async(name, q = {}) => {
|
||||
//Check arguments
|
||||
if ((!name)||(typeof q !== "object")||(q === null)) {
|
||||
if (die)
|
||||
throw new Error("An error occured during embed rendering, dying")
|
||||
return "<p>⚠️ Failed to execute embed function: invalid arguments</p>"
|
||||
}
|
||||
//Translate action syntax to web syntax
|
||||
let parts = []
|
||||
if (q.base === true)
|
||||
({parts} = conf.settings.plugins.base)
|
||||
if (typeof q.base === "string")
|
||||
parts = q.base.split(",").map(x => x.trim())
|
||||
if (Array.isArray(q.base))
|
||||
parts = q.base
|
||||
for (const part of conf.settings.plugins.base.parts)
|
||||
q[`base.${part}`] = q[`base.${part}`] ?? parts.includes(part)
|
||||
if (convert === "markdown-pdf") {
|
||||
q["config.animations"] = false
|
||||
q.config_animations = false
|
||||
}
|
||||
q = Object.fromEntries([...Object.entries(q).map(([key, value]) => [key.replace(/^plugin_/, "").replace(/_/g, "."), value]), ["base", false]])
|
||||
//Enable required plugins
|
||||
const plugins = Object.fromEntries(Object.entries(arguments[1].plugins).map(([key, value]) => [key, {...value, enabled:true}]))
|
||||
//Compute rendering
|
||||
const {rendered} = await metrics({login, q}, {...arguments[1], plugins, convert:null}, arguments[2])
|
||||
return `<img class="metrics-cachable" data-name="${name}" src="data:image/svg+xml;base64,${Buffer.from(rendered).toString("base64")}">`
|
||||
}
|
||||
//Rendering template source
|
||||
let rendered = source.replace(/\{\{ (?<content>[\s\S]*?) \}\}/g, "{%= $<content> %}")
|
||||
console.debug(rendered)
|
||||
for (const delimiters of [{openDelimiter:"<", closeDelimiter:">"}, {openDelimiter:"{", closeDelimiter:"}"}])
|
||||
rendered = await ejs.render(rendered, {...data, s:imports.s, f:imports.format, embed}, {views, async:true, ...delimiters})
|
||||
console.debug(`metrics/compute/${login} > success`)
|
||||
//Output
|
||||
if (convert === "markdown-pdf") {
|
||||
return imports.svg.pdf(rendered, {
|
||||
paddings:q["config.padding"] || conf.settings.padding,
|
||||
style:(conf.settings.extras?.css ?? conf.settings.extras?.default ? q["extras.css"] ?? "" : ""),
|
||||
twemojis:q["config.twemoji"],
|
||||
gemojis:q["config.gemoji"],
|
||||
rest,
|
||||
})
|
||||
}
|
||||
return {rendered, mime:"text/html"}
|
||||
}
|
||||
|
||||
//Rendering
|
||||
console.debug(`metrics/compute/${login} > render`)
|
||||
let rendered = await ejs.render(image, {...data, s:imports.s, f:imports.format, style:style+(conf.settings.extras?.css ?? conf.settings.extras?.default ? q["extras.css"] ?? "" : ""), fonts}, {views, async:true})
|
||||
|
||||
//Additional transformations
|
||||
if (q["config.twemoji"])
|
||||
rendered = await imports.svg.twemojis(rendered)
|
||||
if (q["config.gemoji"])
|
||||
rendered = await imports.svg.gemojis(rendered, {rest})
|
||||
//Optimize rendering
|
||||
if (!q.raw)
|
||||
rendered = xmlformat(rendered, {lineSeparator:"\n", collapseContent:true})
|
||||
if ((conf.settings?.optimize)&&(!q.raw)) {
|
||||
console.debug(`metrics/compute/${login} > optimize`)
|
||||
if (experimental.has("--optimize")) {
|
||||
const {error, data:optimized} = await SVGO.optimize(rendered, {multipass:true, plugins:SVGO.extendDefaultPlugins([
|
||||
//Additional cleanup
|
||||
{name:"cleanupListOfValues"},
|
||||
{name:"removeRasterImages"},
|
||||
{name:"removeScriptElement"},
|
||||
//Force CSS style consistency
|
||||
{name:"inlineStyles", active:false},
|
||||
{name:"removeViewBox", active:false},
|
||||
])})
|
||||
if (error)
|
||||
throw new Error(`Could not optimize SVG: \n${error}`)
|
||||
rendered = optimized
|
||||
console.debug(`metrics/compute/${login} > optimize > success`)
|
||||
}
|
||||
else
|
||||
console.debug(`metrics/compute/${login} > optimize > this feature is currently disabled due to display issues (use --optimize flag in experimental features to force enable it)`)
|
||||
}
|
||||
//Verify svg
|
||||
if (verify) {
|
||||
console.debug(`metrics/compute/${login} > verify SVG`)
|
||||
const libxmljs = (await import("libxmljs2")).default
|
||||
const parsed = libxmljs.parseXml(rendered)
|
||||
if (parsed.errors.length)
|
||||
throw new Error(`Malformed SVG : \n${parsed.errors.join("\n")}`)
|
||||
console.debug(`metrics/compute/${login} > verified SVG, no parsing errors found`)
|
||||
}
|
||||
//Resizing
|
||||
const {resized, mime} = await imports.svg.resize(rendered, {paddings:q["config.padding"] || conf.settings.padding, convert:convert === "svg" ? null : convert})
|
||||
rendered = resized
|
||||
|
||||
//Result
|
||||
console.debug(`metrics/compute/${login} > success`)
|
||||
return {rendered, mime}
|
||||
//Markdown output
|
||||
if (/markdown/.test(convert)) {
|
||||
//Retrieving template source
|
||||
console.debug(`metrics/compute/${login} > markdown render`)
|
||||
let source = image
|
||||
try {
|
||||
let template = `${q.markdown}`.replace(/\n/g, "")
|
||||
if (!/^https:/.test(template)) {
|
||||
const {data:{default_branch:branch, full_name:repo}} = await rest.repos.get({owner:login, repo:q.repo || login})
|
||||
console.debug(`metrics/compute/${login} > on ${repo} with default branch ${branch}`)
|
||||
template = `https://raw.githubusercontent.com/${repo}/${branch}/${template}`
|
||||
}
|
||||
console.debug(`metrics/compute/${login} > fetching ${template}`)
|
||||
;({data:source} = await imports.axios.get(template, {headers:{Accept:"text/plain"}}))
|
||||
}
|
||||
//Internal error
|
||||
catch (error) {
|
||||
//User not found
|
||||
if (((Array.isArray(error.errors))&&(error.errors[0].type === "NOT_FOUND")))
|
||||
throw new Error("user not found")
|
||||
//Generic error
|
||||
throw error
|
||||
console.debug(error)
|
||||
}
|
||||
}
|
||||
//Embed method
|
||||
const embed = async (name, q = {}) => {
|
||||
//Check arguments
|
||||
if ((!name) || (typeof q !== "object") || (q === null)) {
|
||||
if (die)
|
||||
throw new Error("An error occured during embed rendering, dying")
|
||||
return "<p>⚠️ Failed to execute embed function: invalid arguments</p>"
|
||||
}
|
||||
//Translate action syntax to web syntax
|
||||
let parts = []
|
||||
if (q.base === true);
|
||||
({parts} = conf.settings.plugins.base)
|
||||
if (typeof q.base === "string")
|
||||
parts = q.base.split(",").map(x => x.trim())
|
||||
if (Array.isArray(q.base))
|
||||
parts = q.base
|
||||
for (const part of conf.settings.plugins.base.parts)
|
||||
q[`base.${part}`] = q[`base.${part}`] ?? parts.includes(part)
|
||||
if (convert === "markdown-pdf") {
|
||||
q["config.animations"] = false
|
||||
q.config_animations = false
|
||||
}
|
||||
q = Object.fromEntries([...Object.entries(q).map(([key, value]) => [key.replace(/^plugin_/, "").replace(/_/g, "."), value]), ["base", false]])
|
||||
//Enable required plugins
|
||||
const plugins = Object.fromEntries(Object.entries(arguments[1].plugins).map(([key, value]) => [key, {...value, enabled:true}]))
|
||||
//Compute rendering
|
||||
const {rendered} = await metrics({login, q}, {...arguments[1], plugins, convert:null}, arguments[2])
|
||||
return `<img class="metrics-cachable" data-name="${name}" src="data:image/svg+xml;base64,${Buffer.from(rendered).toString("base64")}">`
|
||||
}
|
||||
//Rendering template source
|
||||
let rendered = source.replace(/\{\{ (?<content>[\s\S]*?) \}\}/g, "{%= $<content> %}")
|
||||
console.debug(rendered)
|
||||
for (const delimiters of [{openDelimiter:"<", closeDelimiter:">"}, {openDelimiter:"{", closeDelimiter:"}"}])
|
||||
rendered = await ejs.render(rendered, {...data, s:imports.s, f:imports.format, embed}, {views, async:true, ...delimiters})
|
||||
console.debug(`metrics/compute/${login} > success`)
|
||||
//Output
|
||||
if (convert === "markdown-pdf") {
|
||||
return imports.svg.pdf(rendered, {
|
||||
paddings:q["config.padding"] || conf.settings.padding,
|
||||
style:(conf.settings.extras?.css ?? conf.settings.extras?.default ? q["extras.css"] ?? "" : ""),
|
||||
twemojis:q["config.twemoji"],
|
||||
gemojis:q["config.gemoji"],
|
||||
rest,
|
||||
})
|
||||
}
|
||||
return {rendered, mime:"text/html"}
|
||||
}
|
||||
|
||||
//Rendering
|
||||
console.debug(`metrics/compute/${login} > render`)
|
||||
let rendered = await ejs.render(image, {...data, s:imports.s, f:imports.format, style:style + (conf.settings.extras?.css ?? conf.settings.extras?.default ? q["extras.css"] ?? "" : ""), fonts}, {views, async:true})
|
||||
|
||||
//Additional transformations
|
||||
if (q["config.twemoji"])
|
||||
rendered = await imports.svg.twemojis(rendered)
|
||||
if (q["config.gemoji"])
|
||||
rendered = await imports.svg.gemojis(rendered, {rest})
|
||||
//Optimize rendering
|
||||
if (!q.raw)
|
||||
rendered = xmlformat(rendered, {lineSeparator:"\n", collapseContent:true})
|
||||
if ((conf.settings?.optimize) && (!q.raw)) {
|
||||
console.debug(`metrics/compute/${login} > optimize`)
|
||||
if (experimental.has("--optimize")) {
|
||||
const {error, data:optimized} = await SVGO.optimize(rendered, {
|
||||
multipass:true,
|
||||
plugins:SVGO.extendDefaultPlugins([
|
||||
//Additional cleanup
|
||||
{name:"cleanupListOfValues"},
|
||||
{name:"removeRasterImages"},
|
||||
{name:"removeScriptElement"},
|
||||
//Force CSS style consistency
|
||||
{name:"inlineStyles", active:false},
|
||||
{name:"removeViewBox", active:false},
|
||||
]),
|
||||
})
|
||||
if (error)
|
||||
throw new Error(`Could not optimize SVG: \n${error}`)
|
||||
rendered = optimized
|
||||
console.debug(`metrics/compute/${login} > optimize > success`)
|
||||
}
|
||||
else
|
||||
console.debug(`metrics/compute/${login} > optimize > this feature is currently disabled due to display issues (use --optimize flag in experimental features to force enable it)`)
|
||||
|
||||
}
|
||||
//Verify svg
|
||||
if (verify) {
|
||||
console.debug(`metrics/compute/${login} > verify SVG`)
|
||||
const libxmljs = (await import("libxmljs2")).default
|
||||
const parsed = libxmljs.parseXml(rendered)
|
||||
if (parsed.errors.length)
|
||||
throw new Error(`Malformed SVG : \n${parsed.errors.join("\n")}`)
|
||||
console.debug(`metrics/compute/${login} > verified SVG, no parsing errors found`)
|
||||
}
|
||||
//Resizing
|
||||
const {resized, mime} = await imports.svg.resize(rendered, {paddings:q["config.padding"] || conf.settings.padding, convert:convert === "svg" ? null : convert})
|
||||
rendered = resized
|
||||
|
||||
//Result
|
||||
console.debug(`metrics/compute/${login} > success`)
|
||||
return {rendered, mime}
|
||||
}
|
||||
//Internal error
|
||||
catch (error) {
|
||||
//User not found
|
||||
if (((Array.isArray(error.errors)) && (error.errors[0].type === "NOT_FOUND")))
|
||||
throw new Error("user not found")
|
||||
//Generic error
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,307 +1,313 @@
|
||||
//Imports
|
||||
import fs from "fs"
|
||||
import path from "path"
|
||||
import url from "url"
|
||||
import yaml from "js-yaml"
|
||||
import fs from "fs"
|
||||
import yaml from "js-yaml"
|
||||
import path from "path"
|
||||
import url from "url"
|
||||
|
||||
//Defined categories
|
||||
const categories = ["core", "github", "social", "other"]
|
||||
const categories = ["core", "github", "social", "other"]
|
||||
|
||||
/**Metadata descriptor parser */
|
||||
export default async function metadata({log = true} = {}) {
|
||||
//Paths
|
||||
const __metrics = path.join(path.dirname(url.fileURLToPath(import.meta.url)), "../../..")
|
||||
const __templates = path.join(__metrics, "source/templates")
|
||||
const __plugins = path.join(__metrics, "source/plugins")
|
||||
const __package = path.join(__metrics, "package.json")
|
||||
export default async function metadata({log = true} = {}) {
|
||||
//Paths
|
||||
const __metrics = path.join(path.dirname(url.fileURLToPath(import.meta.url)), "../../..")
|
||||
const __templates = path.join(__metrics, "source/templates")
|
||||
const __plugins = path.join(__metrics, "source/plugins")
|
||||
const __package = path.join(__metrics, "package.json")
|
||||
|
||||
//Init
|
||||
const logger = log ? console.debug : () => null
|
||||
//Init
|
||||
const logger = log ? console.debug : () => null
|
||||
|
||||
//Load plugins metadata
|
||||
let Plugins = {}
|
||||
logger("metrics/metadata > loading plugins metadata")
|
||||
for (const name of await fs.promises.readdir(__plugins)) {
|
||||
if (!(await fs.promises.lstat(path.join(__plugins, name))).isDirectory())
|
||||
continue
|
||||
logger(`metrics/metadata > loading plugin metadata [${name}]`)
|
||||
Plugins[name] = await metadata.plugin({__plugins, name, logger})
|
||||
}
|
||||
//Reorder keys
|
||||
const {base, core, ...plugins} = Plugins //eslint-disable-line no-unused-vars
|
||||
Plugins = Object.fromEntries(Object.entries(Plugins).sort(([_an, a], [_bn, b]) => a.categorie === b.categorie ? (a.index ?? Infinity) - (b.index ?? Infinity) : categories.indexOf(a.categorie) - categories.indexOf(b.categorie)))
|
||||
logger(`metrics/metadata > loaded [${Object.keys(Plugins).join(", ")}]`)
|
||||
//Load templates metadata
|
||||
let Templates = {}
|
||||
logger("metrics/metadata > loading templates metadata")
|
||||
for (const name of await fs.promises.readdir(__templates)) {
|
||||
if (!(await fs.promises.lstat(path.join(__templates, name))).isDirectory())
|
||||
continue
|
||||
if (/^@/.test(name))
|
||||
continue
|
||||
logger(`metrics/metadata > loading template metadata [${name}]`)
|
||||
Templates[name] = await metadata.template({__templates, name, plugins, logger})
|
||||
}
|
||||
//Reorder keys
|
||||
const {community, ...templates} = Templates
|
||||
Templates = {...Object.fromEntries(Object.entries(templates).sort(([_an, a], [_bn, b]) => (a.index ?? Infinity) - (b.index ?? Infinity))), community}
|
||||
|
||||
//Packaged metadata
|
||||
const packaged = JSON.parse(`${await fs.promises.readFile(__package)}`)
|
||||
|
||||
//Metadata
|
||||
return {plugins:Plugins, templates:Templates, packaged}
|
||||
//Load plugins metadata
|
||||
let Plugins = {}
|
||||
logger("metrics/metadata > loading plugins metadata")
|
||||
for (const name of await fs.promises.readdir(__plugins)) {
|
||||
if (!(await fs.promises.lstat(path.join(__plugins, name))).isDirectory())
|
||||
continue
|
||||
logger(`metrics/metadata > loading plugin metadata [${name}]`)
|
||||
Plugins[name] = await metadata.plugin({__plugins, name, logger})
|
||||
}
|
||||
//Reorder keys
|
||||
const {base, core, ...plugins} = Plugins //eslint-disable-line no-unused-vars
|
||||
Plugins = Object.fromEntries(Object.entries(Plugins).sort(([_an, a], [_bn, b]) => a.categorie === b.categorie ? (a.index ?? Infinity) - (b.index ?? Infinity) : categories.indexOf(a.categorie) - categories.indexOf(b.categorie)))
|
||||
logger(`metrics/metadata > loaded [${Object.keys(Plugins).join(", ")}]`)
|
||||
//Load templates metadata
|
||||
let Templates = {}
|
||||
logger("metrics/metadata > loading templates metadata")
|
||||
for (const name of await fs.promises.readdir(__templates)) {
|
||||
if (!(await fs.promises.lstat(path.join(__templates, name))).isDirectory())
|
||||
continue
|
||||
if (/^@/.test(name))
|
||||
continue
|
||||
logger(`metrics/metadata > loading template metadata [${name}]`)
|
||||
Templates[name] = await metadata.template({__templates, name, plugins, logger})
|
||||
}
|
||||
//Reorder keys
|
||||
const {community, ...templates} = Templates
|
||||
Templates = {...Object.fromEntries(Object.entries(templates).sort(([_an, a], [_bn, b]) => (a.index ?? Infinity) - (b.index ?? Infinity))), community}
|
||||
|
||||
//Packaged metadata
|
||||
const packaged = JSON.parse(`${await fs.promises.readFile(__package)}`)
|
||||
|
||||
//Metadata
|
||||
return {plugins:Plugins, templates:Templates, packaged}
|
||||
}
|
||||
|
||||
/**Metadata extractor for templates */
|
||||
metadata.plugin = async function({__plugins, name, logger}) {
|
||||
try {
|
||||
//Load meta descriptor
|
||||
const raw = `${await fs.promises.readFile(path.join(__plugins, name, "metadata.yml"), "utf-8")}`
|
||||
const {inputs, ...meta} = yaml.load(raw)
|
||||
metadata.plugin = async function({__plugins, name, logger}) {
|
||||
try {
|
||||
//Load meta descriptor
|
||||
const raw = `${await fs.promises.readFile(path.join(__plugins, name, "metadata.yml"), "utf-8")}`
|
||||
const {inputs, ...meta} = yaml.load(raw)
|
||||
|
||||
//Categorie
|
||||
if (!categories.includes(meta.categorie))
|
||||
meta.categorie = "other"
|
||||
//Categorie
|
||||
if (!categories.includes(meta.categorie))
|
||||
meta.categorie = "other"
|
||||
|
||||
//Inputs parser
|
||||
{
|
||||
meta.inputs = function({data:{user = null} = {}, q, account}, defaults = {}) {
|
||||
//Support check
|
||||
if (!account)
|
||||
logger(`metrics/inputs > account type not set for plugin ${name}!`)
|
||||
if (account !== "bypass") {
|
||||
const context = q.repo ? "repository" : account
|
||||
if (!meta.supports?.includes(context))
|
||||
throw {error:{message:`Not supported for: ${context}`, instance:new Error()}}
|
||||
}
|
||||
//Inputs checks
|
||||
const result = Object.fromEntries(Object.entries(inputs).map(([key, {type, format, default:defaulted, min, max, values}]) => [
|
||||
//Format key
|
||||
metadata.to.query(key, {name}),
|
||||
//Format value
|
||||
(defaulted => {
|
||||
//Default value
|
||||
let value = q[metadata.to.query(key)] ?? q[key] ?? defaulted
|
||||
//Apply type conversion
|
||||
switch (type) {
|
||||
//Booleans
|
||||
case "boolean":{
|
||||
if (/^(?:[Tt]rue|[Oo]n|[Yy]es|1)$/.test(value))
|
||||
return true
|
||||
if (/^(?:[Ff]alse|[Oo]ff|[Nn]o|0)$/.test(value))
|
||||
return false
|
||||
return defaulted
|
||||
}
|
||||
//Numbers
|
||||
case "number":{
|
||||
value = Number(value)
|
||||
if (!Number.isFinite(value))
|
||||
value = defaulted
|
||||
if (Number.isFinite(min))
|
||||
value = Math.max(min, value)
|
||||
if (Number.isFinite(max))
|
||||
value = Math.min(value, max)
|
||||
return value
|
||||
}
|
||||
//Array
|
||||
case "array":{
|
||||
try {
|
||||
value = decodeURIComponent(value)
|
||||
}
|
||||
catch {
|
||||
logger(`metrics/inputs > failed to decode uri : ${value}`)
|
||||
value = defaulted
|
||||
}
|
||||
const separators = {"comma-separated":",", "space-separated":" "}
|
||||
const separator = separators[[format].flat().filter(s => s in separators)[0]] ?? ","
|
||||
return value.split(separator).map(v => v.trim().toLocaleLowerCase()).filter(v => Array.isArray(values) ? values.includes(v) : true).filter(v => v)
|
||||
}
|
||||
//String
|
||||
case "string":{
|
||||
value = `${value}`.trim()
|
||||
if (user) {
|
||||
if (value === ".user.login")
|
||||
return user.login
|
||||
if (value === ".user.twitter")
|
||||
return user.twitterUsername
|
||||
if (value === ".user.website")
|
||||
return user.websiteUrl
|
||||
}
|
||||
if ((Array.isArray(values))&&(!values.includes(value)))
|
||||
return defaulted
|
||||
return value
|
||||
}
|
||||
//JSON
|
||||
case "json":{
|
||||
try {
|
||||
value = JSON.parse(value)
|
||||
}
|
||||
catch {
|
||||
logger(`metrics/inputs > failed to parse json : ${value}`)
|
||||
value = JSON.parse(defaulted)
|
||||
}
|
||||
return value
|
||||
}
|
||||
//Token
|
||||
case "token":{
|
||||
return value
|
||||
}
|
||||
//Default
|
||||
default:{
|
||||
return value
|
||||
}
|
||||
}
|
||||
})(defaults[key] ?? defaulted),
|
||||
]))
|
||||
logger(`metrics/inputs > ${name} > ${JSON.stringify(result)}`)
|
||||
return result
|
||||
}
|
||||
Object.assign(meta.inputs, inputs, Object.fromEntries(Object.entries(inputs).map(([key, value]) => [metadata.to.query(key, {name}), value])))
|
||||
//Inputs parser
|
||||
{
|
||||
meta.inputs = function({data:{user = null} = {}, q, account}, defaults = {}) {
|
||||
//Support check
|
||||
if (!account)
|
||||
logger(`metrics/inputs > account type not set for plugin ${name}!`)
|
||||
if (account !== "bypass") {
|
||||
const context = q.repo ? "repository" : account
|
||||
if (!meta.supports?.includes(context))
|
||||
throw {error:{message:`Not supported for: ${context}`, instance:new Error()}}
|
||||
}
|
||||
|
||||
//Action metadata
|
||||
{
|
||||
//Extract comments
|
||||
const comments = {}
|
||||
raw.split(/(?:\r?\n){2,}/m)
|
||||
.map(x => x.trim()).filter(x => x)
|
||||
.map(x => x.split("\n").map(y => y.trim()).join("\n"))
|
||||
.map(x => {
|
||||
const input = x.match(new RegExp(`^\\s*(?<input>${Object.keys(inputs).join("|")}):`, "m"))?.groups?.input ?? null
|
||||
if (input)
|
||||
comments[input] = x.match(new RegExp(`(?<comment>[\\s\\S]*?)(?=(?:${Object.keys(inputs).sort((a, b) => b.length - a.length).join("|")}):)`))?.groups?.comment
|
||||
})
|
||||
|
||||
//Action descriptor
|
||||
meta.action = Object.fromEntries(Object.entries(inputs).map(([key, value]) => [
|
||||
key,
|
||||
{
|
||||
comment:comments[key] ?? "",
|
||||
descriptor:yaml.dump({[key]:Object.fromEntries(Object.entries(value).filter(([key]) => ["description", "default", "required"].includes(key)))}, {quotingType:'"', noCompatMode:true}),
|
||||
},
|
||||
]))
|
||||
|
||||
//Action inputs
|
||||
meta.inputs.action = function({core}) {
|
||||
//Build query object from inputs
|
||||
const q = {}
|
||||
for (const key of Object.keys(inputs)) {
|
||||
const value = `${core.getInput(key)}`.trim()
|
||||
//Inputs checks
|
||||
const result = Object.fromEntries(
|
||||
Object.entries(inputs).map(([key, {type, format, default:defaulted, min, max, values}]) => [
|
||||
//Format key
|
||||
metadata.to.query(key, {name}),
|
||||
//Format value
|
||||
(defaulted => {
|
||||
//Default value
|
||||
let value = q[metadata.to.query(key)] ?? q[key] ?? defaulted
|
||||
//Apply type conversion
|
||||
switch (type) {
|
||||
//Booleans
|
||||
case "boolean": {
|
||||
if (/^(?:[Tt]rue|[Oo]n|[Yy]es|1)$/.test(value))
|
||||
return true
|
||||
if (/^(?:[Ff]alse|[Oo]ff|[Nn]o|0)$/.test(value))
|
||||
return false
|
||||
return defaulted
|
||||
}
|
||||
//Numbers
|
||||
case "number": {
|
||||
value = Number(value)
|
||||
if (!Number.isFinite(value))
|
||||
value = defaulted
|
||||
if (Number.isFinite(min))
|
||||
value = Math.max(min, value)
|
||||
if (Number.isFinite(max))
|
||||
value = Math.min(value, max)
|
||||
return value
|
||||
}
|
||||
//Array
|
||||
case "array": {
|
||||
try {
|
||||
q[key] = decodeURIComponent(value)
|
||||
value = decodeURIComponent(value)
|
||||
}
|
||||
catch {
|
||||
logger(`metrics/inputs > failed to decode uri : ${value}`)
|
||||
q[key] = value
|
||||
value = defaulted
|
||||
}
|
||||
const separators = {"comma-separated":",", "space-separated":" "}
|
||||
const separator = separators[[format].flat().filter(s => s in separators)[0]] ?? ","
|
||||
return value.split(separator).map(v => v.trim().toLocaleLowerCase()).filter(v => Array.isArray(values) ? values.includes(v) : true).filter(v => v)
|
||||
}
|
||||
return meta.inputs({q, account:"bypass"})
|
||||
}
|
||||
}
|
||||
|
||||
//Web metadata
|
||||
{
|
||||
meta.web = Object.fromEntries(Object.entries(inputs).map(([key, {type, description:text, example, default:defaulted, min = 0, max = 9999, values}]) => [
|
||||
//Format key
|
||||
metadata.to.query(key),
|
||||
//Value descriptor
|
||||
(() => {
|
||||
switch (type) {
|
||||
case "boolean":
|
||||
return {text, type:"boolean", defaulted:/^(?:[Tt]rue|[Oo]n|[Yy]es|1)$/.test(defaulted) ? true : /^(?:[Ff]alse|[Oo]ff|[Nn]o|0)$/.test(defaulted) ? false : defaulted}
|
||||
case "number":
|
||||
return {text, type:"number", min, max, defaulted}
|
||||
case "array":
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
case "string":{
|
||||
if (Array.isArray(values))
|
||||
return {text, type:"select", values, defaulted}
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
//String
|
||||
case "string": {
|
||||
value = `${value}`.trim()
|
||||
if (user) {
|
||||
if (value === ".user.login")
|
||||
return user.login
|
||||
if (value === ".user.twitter")
|
||||
return user.twitterUsername
|
||||
if (value === ".user.website")
|
||||
return user.websiteUrl
|
||||
}
|
||||
case "json":
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
default:
|
||||
return null
|
||||
if ((Array.isArray(values)) && (!values.includes(value)))
|
||||
return defaulted
|
||||
return value
|
||||
}
|
||||
})(),
|
||||
]).filter(([key, value]) => (value)&&(key !== name)))
|
||||
}
|
||||
|
||||
//Readme metadata
|
||||
{
|
||||
//Extract demos
|
||||
const raw = `${await fs.promises.readFile(path.join(__plugins, name, "README.md"), "utf-8")}`
|
||||
const demo = raw.match(/(?<demo><table>[\s\S]*?<[/]table>)/)?.groups?.demo?.replace(/<[/]?(?:table|tr)>/g, "")?.trim() ?? "<td></td>"
|
||||
|
||||
//Readme descriptor
|
||||
meta.readme = {demo}
|
||||
}
|
||||
|
||||
//Icon
|
||||
meta.icon = meta.name.split(" ")[0] ?? null
|
||||
|
||||
//Result
|
||||
return meta
|
||||
//JSON
|
||||
case "json": {
|
||||
try {
|
||||
value = JSON.parse(value)
|
||||
}
|
||||
catch {
|
||||
logger(`metrics/inputs > failed to parse json : ${value}`)
|
||||
value = JSON.parse(defaulted)
|
||||
}
|
||||
return value
|
||||
}
|
||||
//Token
|
||||
case "token": {
|
||||
return value
|
||||
}
|
||||
//Default
|
||||
default: {
|
||||
return value
|
||||
}
|
||||
}
|
||||
})(defaults[key] ?? defaulted),
|
||||
]),
|
||||
)
|
||||
logger(`metrics/inputs > ${name} > ${JSON.stringify(result)}`)
|
||||
return result
|
||||
}
|
||||
Object.assign(meta.inputs, inputs, Object.fromEntries(Object.entries(inputs).map(([key, value]) => [metadata.to.query(key, {name}), value])))
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/metadata > failed to load plugin ${name}: ${error}`)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**Metadata extractor for templates */
|
||||
metadata.template = async function({__templates, name, plugins, logger}) {
|
||||
try {
|
||||
//Load meta descriptor
|
||||
const raw = fs.existsSync(path.join(__templates, name, "metadata.yml")) ? `${await fs.promises.readFile(path.join(__templates, name, "metadata.yml"), "utf-8")}` : ""
|
||||
const readme = `${await fs.promises.readFile(path.join(__templates, name, "README.md"), "utf-8")}`
|
||||
const meta = yaml.load(raw) ?? {}
|
||||
//Action metadata
|
||||
{
|
||||
//Extract comments
|
||||
const comments = {}
|
||||
raw.split(/(?:\r?\n){2,}/m)
|
||||
.map(x => x.trim()).filter(x => x)
|
||||
.map(x => x.split("\n").map(y => y.trim()).join("\n"))
|
||||
.map(x => {
|
||||
const input = x.match(new RegExp(`^\\s*(?<input>${Object.keys(inputs).join("|")}):`, "m"))?.groups?.input ?? null
|
||||
if (input)
|
||||
comments[input] = x.match(new RegExp(`(?<comment>[\\s\\S]*?)(?=(?:${Object.keys(inputs).sort((a, b) => b.length - a.length).join("|")}):)`))?.groups?.comment
|
||||
})
|
||||
|
||||
//Compatibility
|
||||
const partials = path.join(__templates, name, "partials")
|
||||
const compatibility = Object.fromEntries(Object.entries(plugins).map(([key]) => [key, false]))
|
||||
if ((fs.existsSync(partials))&&((await fs.promises.lstat(partials)).isDirectory())) {
|
||||
for (let plugin of await fs.promises.readdir(partials)) {
|
||||
plugin = plugin.match(/(?<plugin>^[\s\S]+(?=[.]ejs$))/)?.groups?.plugin ?? null
|
||||
if (plugin in compatibility)
|
||||
compatibility[plugin] = true
|
||||
//Action descriptor
|
||||
meta.action = Object.fromEntries(
|
||||
Object.entries(inputs).map(([key, value]) => [
|
||||
key,
|
||||
{
|
||||
comment:comments[key] ?? "",
|
||||
descriptor:yaml.dump({[key]:Object.fromEntries(Object.entries(value).filter(([key]) => ["description", "default", "required"].includes(key)))}, {quotingType:'"', noCompatMode:true}),
|
||||
},
|
||||
]),
|
||||
)
|
||||
|
||||
//Action inputs
|
||||
meta.inputs.action = function({core}) {
|
||||
//Build query object from inputs
|
||||
const q = {}
|
||||
for (const key of Object.keys(inputs)) {
|
||||
const value = `${core.getInput(key)}`.trim()
|
||||
try {
|
||||
q[key] = decodeURIComponent(value)
|
||||
}
|
||||
catch {
|
||||
logger(`metrics/inputs > failed to decode uri : ${value}`)
|
||||
q[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
//Result
|
||||
return {
|
||||
name:meta.name ?? readme.match(/^### (?<name>[\s\S]+?)\n/)?.groups?.name?.trim(),
|
||||
index:meta.index ?? null,
|
||||
formats:meta.formats ?? null,
|
||||
supports:meta.supports ?? null,
|
||||
readme:{
|
||||
demo:readme.match(/(?<demo><table>[\s\S]*?<[/]table>)/)?.groups?.demo?.replace(/<[/]?(?:table|tr)>/g, "")?.trim() ?? (name === "community" ? '<td align="center" colspan="2">See <a href="/source/templates/community/README.md">documentation</a> 🌍</td>' : "<td></td>"),
|
||||
compatibility:{...compatibility, base:true},
|
||||
},
|
||||
check({q, account = "bypass", format = null}) {
|
||||
//Support check
|
||||
if (account !== "bypass") {
|
||||
const context = q.repo ? "repository" : account
|
||||
if ((Array.isArray(this.supports))&&(!this.supports.includes(context)))
|
||||
throw new Error(`not supported for: ${context}`)
|
||||
}
|
||||
//Format check
|
||||
if ((format)&&(Array.isArray(this.formats))&&(!this.formats.includes(format)))
|
||||
throw new Error(`not supported for: ${format}`)
|
||||
},
|
||||
}
|
||||
return meta.inputs({q, account:"bypass"})
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/metadata > failed to load template ${name}: ${error}`)
|
||||
return null
|
||||
|
||||
//Web metadata
|
||||
{
|
||||
meta.web = Object.fromEntries(
|
||||
Object.entries(inputs).map(([key, {type, description:text, example, default:defaulted, min = 0, max = 9999, values}]) => [
|
||||
//Format key
|
||||
metadata.to.query(key),
|
||||
//Value descriptor
|
||||
(() => {
|
||||
switch (type) {
|
||||
case "boolean":
|
||||
return {text, type:"boolean", defaulted:/^(?:[Tt]rue|[Oo]n|[Yy]es|1)$/.test(defaulted) ? true : /^(?:[Ff]alse|[Oo]ff|[Nn]o|0)$/.test(defaulted) ? false : defaulted}
|
||||
case "number":
|
||||
return {text, type:"number", min, max, defaulted}
|
||||
case "array":
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
case "string": {
|
||||
if (Array.isArray(values))
|
||||
return {text, type:"select", values, defaulted}
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
}
|
||||
case "json":
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
default:
|
||||
return null
|
||||
}
|
||||
})(),
|
||||
]).filter(([key, value]) => (value) && (key !== name)),
|
||||
)
|
||||
}
|
||||
|
||||
//Readme metadata
|
||||
{
|
||||
//Extract demos
|
||||
const raw = `${await fs.promises.readFile(path.join(__plugins, name, "README.md"), "utf-8")}`
|
||||
const demo = raw.match(/(?<demo><table>[\s\S]*?<[/]table>)/)?.groups?.demo?.replace(/<[/]?(?:table|tr)>/g, "")?.trim() ?? "<td></td>"
|
||||
|
||||
//Readme descriptor
|
||||
meta.readme = {demo}
|
||||
}
|
||||
|
||||
//Icon
|
||||
meta.icon = meta.name.split(" ")[0] ?? null
|
||||
|
||||
//Result
|
||||
return meta
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/metadata > failed to load plugin ${name}: ${error}`)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**Metadata extractor for templates */
|
||||
metadata.template = async function({__templates, name, plugins, logger}) {
|
||||
try {
|
||||
//Load meta descriptor
|
||||
const raw = fs.existsSync(path.join(__templates, name, "metadata.yml")) ? `${await fs.promises.readFile(path.join(__templates, name, "metadata.yml"), "utf-8")}` : ""
|
||||
const readme = `${await fs.promises.readFile(path.join(__templates, name, "README.md"), "utf-8")}`
|
||||
const meta = yaml.load(raw) ?? {}
|
||||
|
||||
//Compatibility
|
||||
const partials = path.join(__templates, name, "partials")
|
||||
const compatibility = Object.fromEntries(Object.entries(plugins).map(([key]) => [key, false]))
|
||||
if ((fs.existsSync(partials)) && ((await fs.promises.lstat(partials)).isDirectory())) {
|
||||
for (let plugin of await fs.promises.readdir(partials)) {
|
||||
plugin = plugin.match(/(?<plugin>^[\s\S]+(?=[.]ejs$))/)?.groups?.plugin ?? null
|
||||
if (plugin in compatibility)
|
||||
compatibility[plugin] = true
|
||||
}
|
||||
}
|
||||
|
||||
//Result
|
||||
return {
|
||||
name:meta.name ?? readme.match(/^### (?<name>[\s\S]+?)\n/)?.groups?.name?.trim(),
|
||||
index:meta.index ?? null,
|
||||
formats:meta.formats ?? null,
|
||||
supports:meta.supports ?? null,
|
||||
readme:{
|
||||
demo:readme.match(/(?<demo><table>[\s\S]*?<[/]table>)/)?.groups?.demo?.replace(/<[/]?(?:table|tr)>/g, "")?.trim() ?? (name === "community" ? '<td align="center" colspan="2">See <a href="/source/templates/community/README.md">documentation</a> 🌍</td>' : "<td></td>"),
|
||||
compatibility:{...compatibility, base:true},
|
||||
},
|
||||
check({q, account = "bypass", format = null}) {
|
||||
//Support check
|
||||
if (account !== "bypass") {
|
||||
const context = q.repo ? "repository" : account
|
||||
if ((Array.isArray(this.supports)) && (!this.supports.includes(context)))
|
||||
throw new Error(`not supported for: ${context}`)
|
||||
}
|
||||
//Format check
|
||||
if ((format) && (Array.isArray(this.formats)) && (!this.formats.includes(format)))
|
||||
throw new Error(`not supported for: ${format}`)
|
||||
},
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/metadata > failed to load template ${name}: ${error}`)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**Metadata converters */
|
||||
metadata.to = {
|
||||
query(key, {name = null} = {}) {
|
||||
key = key.replace(/^plugin_/, "").replace(/_/g, ".")
|
||||
return name ? key.replace(new RegExp(`^(${name}.)`, "g"), "") : key
|
||||
},
|
||||
}
|
||||
metadata.to = {
|
||||
query(key, {name = null} = {}) {
|
||||
key = key.replace(/^plugin_/, "").replace(/_/g, ".")
|
||||
return name ? key.replace(new RegExp(`^(${name}.)`, "g"), "") : key
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,231 +1,238 @@
|
||||
//Imports
|
||||
import fs from "fs"
|
||||
import metadata from "./metadata.mjs"
|
||||
import path from "path"
|
||||
import processes from "child_process"
|
||||
import util from "util"
|
||||
import url from "url"
|
||||
import yaml from "js-yaml"
|
||||
import OctokitRest from "@octokit/rest"
|
||||
import OctokitRest from "@octokit/rest"
|
||||
import processes from "child_process"
|
||||
import fs from "fs"
|
||||
import yaml from "js-yaml"
|
||||
import path from "path"
|
||||
import url from "url"
|
||||
import util from "util"
|
||||
import metadata from "./metadata.mjs"
|
||||
|
||||
//Templates and plugins
|
||||
const Templates = {}
|
||||
const Plugins = {}
|
||||
const Templates = {}
|
||||
const Plugins = {}
|
||||
|
||||
/**Setup */
|
||||
export default async function({log = true, nosettings = false, community = {}} = {}) {
|
||||
|
||||
//Paths
|
||||
const __metrics = path.join(path.dirname(url.fileURLToPath(import.meta.url)), "../../..")
|
||||
const __statics = path.join(__metrics, "source/app/web/statics")
|
||||
const __templates = path.join(__metrics, "source/templates")
|
||||
const __plugins = path.join(__metrics, "source/plugins")
|
||||
const __package = path.join(__metrics, "package.json")
|
||||
const __settings = path.join(__metrics, "settings.json")
|
||||
const __modules = path.join(__metrics, "node_modules")
|
||||
|
||||
//Init
|
||||
const logger = log ? console.debug : () => null
|
||||
logger("metrics/setup > setup")
|
||||
const conf = {
|
||||
authenticated:null,
|
||||
templates:{},
|
||||
queries:{},
|
||||
settings:{},
|
||||
metadata:{},
|
||||
paths:{
|
||||
statics:__statics,
|
||||
templates:__templates,
|
||||
node_modules:__modules,
|
||||
},
|
||||
}
|
||||
|
||||
//Load settings
|
||||
logger("metrics/setup > load settings.json")
|
||||
if (fs.existsSync(__settings)) {
|
||||
if (nosettings)
|
||||
logger("metrics/setup > load settings.json > skipped because no settings is enabled")
|
||||
else {
|
||||
conf.settings = JSON.parse(`${await fs.promises.readFile(__settings)}`)
|
||||
logger("metrics/setup > load settings.json > success")
|
||||
}
|
||||
}
|
||||
else
|
||||
logger("metrics/setup > load settings.json > (missing)")
|
||||
if (!conf.settings.templates)
|
||||
conf.settings.templates = {default:"classic", enabled:[]}
|
||||
if (!conf.settings.plugins)
|
||||
conf.settings.plugins = {}
|
||||
conf.settings.community = {...conf.settings.community, ...community}
|
||||
conf.settings.plugins.base = {parts:["header", "activity", "community", "repositories", "metadata"]}
|
||||
if (conf.settings.debug)
|
||||
logger(util.inspect(conf.settings, {depth:Infinity, maxStringLength:256}))
|
||||
|
||||
//Load package settings
|
||||
logger("metrics/setup > load package.json")
|
||||
conf.package = JSON.parse(`${await fs.promises.readFile(__package)}`)
|
||||
logger("metrics/setup > load package.json > success")
|
||||
|
||||
//Load community templates
|
||||
if ((typeof conf.settings.community.templates === "string")&&(conf.settings.community.templates.length)) {
|
||||
logger("metrics/setup > parsing community templates list")
|
||||
conf.settings.community.templates = [...new Set([...decodeURIComponent(conf.settings.community.templates).split(",").map(v => v.trim().toLocaleLowerCase()).filter(v => v)])]
|
||||
}
|
||||
if ((Array.isArray(conf.settings.community.templates))&&(conf.settings.community.templates.length)) {
|
||||
//Clean remote repository
|
||||
logger(`metrics/setup > ${conf.settings.community.templates.length} community templates to install`)
|
||||
await fs.promises.rmdir(path.join(__templates, ".community"), {recursive:true})
|
||||
//Download community templates
|
||||
for (const template of conf.settings.community.templates) {
|
||||
try {
|
||||
//Parse community template
|
||||
logger(`metrics/setup > load community template ${template}`)
|
||||
const {repo, branch, name, trust = false} = template.match(/^(?<repo>[\s\S]+?)@(?<branch>[\s\S]+?):(?<name>[\s\S]+?)(?<trust>[+]trust)?$/)?.groups ?? null
|
||||
const command = `git clone --single-branch --branch ${branch} https://github.com/${repo}.git ${path.join(__templates, ".community")}`
|
||||
logger(`metrics/setup > run ${command}`)
|
||||
//Clone remote repository
|
||||
processes.execSync(command, {stdio:"ignore"})
|
||||
//Extract template
|
||||
logger(`metrics/setup > extract ${name} from ${repo}@${branch}`)
|
||||
await fs.promises.rmdir(path.join(__templates, `@${name}`), {recursive:true})
|
||||
await fs.promises.rename(path.join(__templates, ".community/source/templates", name), path.join(__templates, `@${name}`))
|
||||
//JavaScript file
|
||||
if (trust)
|
||||
logger(`metrics/setup > keeping @${name}/template.mjs (unsafe mode is enabled)`)
|
||||
else if (fs.existsSync(path.join(__templates, `@${name}`, "template.mjs"))) {
|
||||
logger(`metrics/setup > removing @${name}/template.mjs`)
|
||||
await fs.promises.unlink(path.join(__templates, `@${name}`, "template.mjs"))
|
||||
const inherit = yaml.load(`${fs.promises.readFile(path.join(__templates, `@${name}`, "metadata.yml"))}`).extends ?? null
|
||||
if (inherit) {
|
||||
logger(`metrics/setup > @${name} extends from ${inherit}`)
|
||||
if (fs.existsSync(path.join(__templates, inherit, "template.mjs"))) {
|
||||
logger(`metrics/setup > @${name} extended from ${inherit}`)
|
||||
await fs.promises.copyFile(path.join(__templates, inherit, "template.mjs"), path.join(__templates, `@${name}`, "template.mjs"))
|
||||
}
|
||||
else
|
||||
logger(`metrics/setup > @${name} could not extends ${inherit} as it does not exist`)
|
||||
}
|
||||
}
|
||||
else
|
||||
logger(`metrics/setup > @${name}/template.mjs does not exist`)
|
||||
//Clean remote repository
|
||||
logger(`metrics/setup > clean ${repo}@${branch}`)
|
||||
await fs.promises.rmdir(path.join(__templates, ".community"), {recursive:true})
|
||||
logger(`metrics/setup > loaded community template ${name}`)
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/setup > failed to load community template ${template}`)
|
||||
logger(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
logger("metrics/setup > no community templates to install")
|
||||
|
||||
//Load templates
|
||||
for (const name of await fs.promises.readdir(__templates)) {
|
||||
//Search for templates
|
||||
const directory = path.join(__templates, name)
|
||||
if ((!(await fs.promises.lstat(directory)).isDirectory())||(!fs.existsSync(path.join(directory, "partials/_.json"))))
|
||||
continue
|
||||
logger(`metrics/setup > load template [${name}]`)
|
||||
//Cache templates files
|
||||
const files = ["image.svg", "style.css", "fonts.css"].map(file => path.join(__templates, (fs.existsSync(path.join(directory, file)) ? name : "classic"), file))
|
||||
const [image, style, fonts] = await Promise.all(files.map(async file => `${await fs.promises.readFile(file)}`))
|
||||
const partials = JSON.parse(`${await fs.promises.readFile(path.join(directory, "partials/_.json"))}`)
|
||||
conf.templates[name] = {image, style, fonts, partials, views:[directory]}
|
||||
|
||||
//Cache templates scripts
|
||||
Templates[name] = await (async() => {
|
||||
const template = path.join(directory, "template.mjs")
|
||||
const fallback = path.join(__templates, "classic", "template.mjs")
|
||||
return (await import(url.pathToFileURL(fs.existsSync(template) ? template : fallback).href)).default
|
||||
})()
|
||||
logger(`metrics/setup > load template [${name}] > success`)
|
||||
//Debug
|
||||
if (conf.settings.debug) {
|
||||
Object.defineProperty(conf.templates, name, {
|
||||
get() {
|
||||
logger(`metrics/setup > reload template [${name}]`)
|
||||
const [image, style, fonts] = files.map(file => `${fs.readFileSync(file)}`)
|
||||
const partials = JSON.parse(`${fs.readFileSync(path.join(directory, "partials/_.json"))}`)
|
||||
logger(`metrics/setup > reload template [${name}] > success`)
|
||||
return {image, style, fonts, partials, views:[directory]}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//Load plugins
|
||||
for (const name of await fs.promises.readdir(__plugins)) {
|
||||
//Search for plugins
|
||||
const directory = path.join(__plugins, name)
|
||||
if (!(await fs.promises.lstat(directory)).isDirectory())
|
||||
continue
|
||||
//Cache plugins scripts
|
||||
logger(`metrics/setup > load plugin [${name}]`)
|
||||
Plugins[name] = (await import(url.pathToFileURL(path.join(directory, "index.mjs")).href)).default
|
||||
logger(`metrics/setup > load plugin [${name}] > success`)
|
||||
//Register queries
|
||||
const __queries = path.join(directory, "queries")
|
||||
if (fs.existsSync(__queries)) {
|
||||
//Alias for default query
|
||||
const queries = function() {
|
||||
if (!queries[name])
|
||||
throw new ReferenceError(`Default query for ${name} undefined`)
|
||||
return queries[name](...arguments)
|
||||
}
|
||||
conf.queries[name] = queries
|
||||
//Load queries
|
||||
for (const file of await fs.promises.readdir(__queries)) {
|
||||
//Cache queries
|
||||
const query = file.replace(/[.]graphql$/, "")
|
||||
logger(`metrics/setup > load query [${name}/${query}]`)
|
||||
queries[`_${query}`] = `${await fs.promises.readFile(path.join(__queries, file))}`
|
||||
logger(`metrics/setup > load query [${name}/${query}] > success`)
|
||||
//Debug
|
||||
if (conf.settings.debug) {
|
||||
Object.defineProperty(queries, `_${query}`, {
|
||||
get() {
|
||||
logger(`metrics/setup > reload query [${name}/${query}]`)
|
||||
const raw = `${fs.readFileSync(path.join(__queries, file))}`
|
||||
logger(`metrics/setup > reload query [${name}/${query}] > success`)
|
||||
return raw
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
//Create queries formatters
|
||||
Object.keys(queries).map(query => queries[query.substring(1)] = (vars = {}) => {
|
||||
let queried = queries[query]
|
||||
for (const [key, value] of Object.entries(vars))
|
||||
queried = queried.replace(new RegExp(`[$]${key}`, "g"), value)
|
||||
return queried
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//Load metadata
|
||||
conf.metadata = await metadata({log})
|
||||
|
||||
//Store authenticated user
|
||||
if (conf.settings.token) {
|
||||
try {
|
||||
conf.authenticated = (await (new OctokitRest.Octokit({auth:conf.settings.token})).users.getAuthenticated()).data.login
|
||||
logger(`metrics/setup > setup > authenticated as ${conf.authenticated}`)
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/setup > setup > could not verify authentication : ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
//Set no token property
|
||||
Object.defineProperty(conf.settings, "notoken", {get() {
|
||||
return conf.settings.token === "NOT_NEEDED"
|
||||
}})
|
||||
|
||||
//Conf
|
||||
logger("metrics/setup > setup > success")
|
||||
return {Templates, Plugins, conf}
|
||||
export default async function({log = true, nosettings = false, community = {}} = {}) {
|
||||
//Paths
|
||||
const __metrics = path.join(path.dirname(url.fileURLToPath(import.meta.url)), "../../..")
|
||||
const __statics = path.join(__metrics, "source/app/web/statics")
|
||||
const __templates = path.join(__metrics, "source/templates")
|
||||
const __plugins = path.join(__metrics, "source/plugins")
|
||||
const __package = path.join(__metrics, "package.json")
|
||||
const __settings = path.join(__metrics, "settings.json")
|
||||
const __modules = path.join(__metrics, "node_modules")
|
||||
|
||||
//Init
|
||||
const logger = log ? console.debug : () => null
|
||||
logger("metrics/setup > setup")
|
||||
const conf = {
|
||||
authenticated:null,
|
||||
templates:{},
|
||||
queries:{},
|
||||
settings:{},
|
||||
metadata:{},
|
||||
paths:{
|
||||
statics:__statics,
|
||||
templates:__templates,
|
||||
node_modules:__modules,
|
||||
},
|
||||
}
|
||||
|
||||
//Load settings
|
||||
logger("metrics/setup > load settings.json")
|
||||
if (fs.existsSync(__settings)) {
|
||||
if (nosettings)
|
||||
logger("metrics/setup > load settings.json > skipped because no settings is enabled")
|
||||
else {
|
||||
conf.settings = JSON.parse(`${await fs.promises.readFile(__settings)}`)
|
||||
logger("metrics/setup > load settings.json > success")
|
||||
}
|
||||
}
|
||||
else
|
||||
logger("metrics/setup > load settings.json > (missing)")
|
||||
|
||||
|
||||
if (!conf.settings.templates)
|
||||
conf.settings.templates = {default:"classic", enabled:[]}
|
||||
if (!conf.settings.plugins)
|
||||
conf.settings.plugins = {}
|
||||
conf.settings.community = {...conf.settings.community, ...community}
|
||||
conf.settings.plugins.base = {parts:["header", "activity", "community", "repositories", "metadata"]}
|
||||
if (conf.settings.debug)
|
||||
logger(util.inspect(conf.settings, {depth:Infinity, maxStringLength:256}))
|
||||
|
||||
//Load package settings
|
||||
logger("metrics/setup > load package.json")
|
||||
conf.package = JSON.parse(`${await fs.promises.readFile(__package)}`)
|
||||
logger("metrics/setup > load package.json > success")
|
||||
|
||||
//Load community templates
|
||||
if ((typeof conf.settings.community.templates === "string") && (conf.settings.community.templates.length)) {
|
||||
logger("metrics/setup > parsing community templates list")
|
||||
conf.settings.community.templates = [...new Set([...decodeURIComponent(conf.settings.community.templates).split(",").map(v => v.trim().toLocaleLowerCase()).filter(v => v)])]
|
||||
}
|
||||
if ((Array.isArray(conf.settings.community.templates)) && (conf.settings.community.templates.length)) {
|
||||
//Clean remote repository
|
||||
logger(`metrics/setup > ${conf.settings.community.templates.length} community templates to install`)
|
||||
await fs.promises.rmdir(path.join(__templates, ".community"), {recursive:true})
|
||||
//Download community templates
|
||||
for (const template of conf.settings.community.templates) {
|
||||
try {
|
||||
//Parse community template
|
||||
logger(`metrics/setup > load community template ${template}`)
|
||||
const {repo, branch, name, trust = false} = template.match(/^(?<repo>[\s\S]+?)@(?<branch>[\s\S]+?):(?<name>[\s\S]+?)(?<trust>[+]trust)?$/)?.groups ?? null
|
||||
const command = `git clone --single-branch --branch ${branch} https://github.com/${repo}.git ${path.join(__templates, ".community")}`
|
||||
logger(`metrics/setup > run ${command}`)
|
||||
//Clone remote repository
|
||||
processes.execSync(command, {stdio:"ignore"})
|
||||
//Extract template
|
||||
logger(`metrics/setup > extract ${name} from ${repo}@${branch}`)
|
||||
await fs.promises.rmdir(path.join(__templates, `@${name}`), {recursive:true})
|
||||
await fs.promises.rename(path.join(__templates, ".community/source/templates", name), path.join(__templates, `@${name}`))
|
||||
//JavaScript file
|
||||
if (trust)
|
||||
logger(`metrics/setup > keeping @${name}/template.mjs (unsafe mode is enabled)`)
|
||||
else if (fs.existsSync(path.join(__templates, `@${name}`, "template.mjs"))) {
|
||||
logger(`metrics/setup > removing @${name}/template.mjs`)
|
||||
await fs.promises.unlink(path.join(__templates, `@${name}`, "template.mjs"))
|
||||
const inherit = yaml.load(`${fs.promises.readFile(path.join(__templates, `@${name}`, "metadata.yml"))}`).extends ?? null
|
||||
if (inherit) {
|
||||
logger(`metrics/setup > @${name} extends from ${inherit}`)
|
||||
if (fs.existsSync(path.join(__templates, inherit, "template.mjs"))) {
|
||||
logger(`metrics/setup > @${name} extended from ${inherit}`)
|
||||
await fs.promises.copyFile(path.join(__templates, inherit, "template.mjs"), path.join(__templates, `@${name}`, "template.mjs"))
|
||||
}
|
||||
else
|
||||
logger(`metrics/setup > @${name} could not extends ${inherit} as it does not exist`)
|
||||
|
||||
}
|
||||
}
|
||||
else
|
||||
logger(`metrics/setup > @${name}/template.mjs does not exist`)
|
||||
|
||||
|
||||
//Clean remote repository
|
||||
logger(`metrics/setup > clean ${repo}@${branch}`)
|
||||
await fs.promises.rmdir(path.join(__templates, ".community"), {recursive:true})
|
||||
logger(`metrics/setup > loaded community template ${name}`)
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/setup > failed to load community template ${template}`)
|
||||
logger(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
logger("metrics/setup > no community templates to install")
|
||||
|
||||
|
||||
//Load templates
|
||||
for (const name of await fs.promises.readdir(__templates)) {
|
||||
//Search for templates
|
||||
const directory = path.join(__templates, name)
|
||||
if ((!(await fs.promises.lstat(directory)).isDirectory()) || (!fs.existsSync(path.join(directory, "partials/_.json"))))
|
||||
continue
|
||||
logger(`metrics/setup > load template [${name}]`)
|
||||
//Cache templates files
|
||||
const files = ["image.svg", "style.css", "fonts.css"].map(file => path.join(__templates, (fs.existsSync(path.join(directory, file)) ? name : "classic"), file))
|
||||
const [image, style, fonts] = await Promise.all(files.map(async file => `${await fs.promises.readFile(file)}`))
|
||||
const partials = JSON.parse(`${await fs.promises.readFile(path.join(directory, "partials/_.json"))}`)
|
||||
conf.templates[name] = {image, style, fonts, partials, views:[directory]}
|
||||
|
||||
//Cache templates scripts
|
||||
Templates[name] = await (async () => {
|
||||
const template = path.join(directory, "template.mjs")
|
||||
const fallback = path.join(__templates, "classic", "template.mjs")
|
||||
return (await import(url.pathToFileURL(fs.existsSync(template) ? template : fallback).href)).default
|
||||
})()
|
||||
logger(`metrics/setup > load template [${name}] > success`)
|
||||
//Debug
|
||||
if (conf.settings.debug) {
|
||||
Object.defineProperty(conf.templates, name, {
|
||||
get() {
|
||||
logger(`metrics/setup > reload template [${name}]`)
|
||||
const [image, style, fonts] = files.map(file => `${fs.readFileSync(file)}`)
|
||||
const partials = JSON.parse(`${fs.readFileSync(path.join(directory, "partials/_.json"))}`)
|
||||
logger(`metrics/setup > reload template [${name}] > success`)
|
||||
return {image, style, fonts, partials, views:[directory]}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//Load plugins
|
||||
for (const name of await fs.promises.readdir(__plugins)) {
|
||||
//Search for plugins
|
||||
const directory = path.join(__plugins, name)
|
||||
if (!(await fs.promises.lstat(directory)).isDirectory())
|
||||
continue
|
||||
//Cache plugins scripts
|
||||
logger(`metrics/setup > load plugin [${name}]`)
|
||||
Plugins[name] = (await import(url.pathToFileURL(path.join(directory, "index.mjs")).href)).default
|
||||
logger(`metrics/setup > load plugin [${name}] > success`)
|
||||
//Register queries
|
||||
const __queries = path.join(directory, "queries")
|
||||
if (fs.existsSync(__queries)) {
|
||||
//Alias for default query
|
||||
const queries = function() {
|
||||
if (!queries[name])
|
||||
throw new ReferenceError(`Default query for ${name} undefined`)
|
||||
return queries[name](...arguments)
|
||||
}
|
||||
conf.queries[name] = queries
|
||||
//Load queries
|
||||
for (const file of await fs.promises.readdir(__queries)) {
|
||||
//Cache queries
|
||||
const query = file.replace(/[.]graphql$/, "")
|
||||
logger(`metrics/setup > load query [${name}/${query}]`)
|
||||
queries[`_${query}`] = `${await fs.promises.readFile(path.join(__queries, file))}`
|
||||
logger(`metrics/setup > load query [${name}/${query}] > success`)
|
||||
//Debug
|
||||
if (conf.settings.debug) {
|
||||
Object.defineProperty(queries, `_${query}`, {
|
||||
get() {
|
||||
logger(`metrics/setup > reload query [${name}/${query}]`)
|
||||
const raw = `${fs.readFileSync(path.join(__queries, file))}`
|
||||
logger(`metrics/setup > reload query [${name}/${query}] > success`)
|
||||
return raw
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
//Create queries formatters
|
||||
Object.keys(queries).map(query => queries[query.substring(1)] = (vars = {}) => {
|
||||
let queried = queries[query]
|
||||
for (const [key, value] of Object.entries(vars))
|
||||
queried = queried.replace(new RegExp(`[$]${key}`, "g"), value)
|
||||
return queried
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
//Load metadata
|
||||
conf.metadata = await metadata({log})
|
||||
|
||||
//Store authenticated user
|
||||
if (conf.settings.token) {
|
||||
try {
|
||||
conf.authenticated = (await (new OctokitRest.Octokit({auth:conf.settings.token})).users.getAuthenticated()).data.login
|
||||
logger(`metrics/setup > setup > authenticated as ${conf.authenticated}`)
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/setup > setup > could not verify authentication : ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
//Set no token property
|
||||
Object.defineProperty(conf.settings, "notoken", {
|
||||
get() {
|
||||
return conf.settings.token === "NOT_NEEDED"
|
||||
},
|
||||
})
|
||||
|
||||
//Conf
|
||||
logger("metrics/setup > setup > success")
|
||||
return {Templates, Plugins, conf}
|
||||
}
|
||||
|
||||
@@ -1,422 +1,429 @@
|
||||
//Imports
|
||||
import fs from "fs/promises"
|
||||
import fss from "fs"
|
||||
import os from "os"
|
||||
import paths from "path"
|
||||
import url from "url"
|
||||
import util from "util"
|
||||
import processes from "child_process"
|
||||
import axios from "axios"
|
||||
import _puppeteer from "puppeteer"
|
||||
import git from "simple-git"
|
||||
import twemojis from "twemoji-parser"
|
||||
import jimp from "jimp"
|
||||
import opengraph from "open-graph-scraper"
|
||||
import rss from "rss-parser"
|
||||
import nodechartist from "node-chartist"
|
||||
import GIFEncoder from "gifencoder"
|
||||
import PNG from "png-js"
|
||||
import marked from "marked"
|
||||
import htmlsanitize from "sanitize-html"
|
||||
import prism from "prismjs"
|
||||
import prism_lang from "prismjs/components/index.js"
|
||||
prism_lang()
|
||||
import fs from "fs/promises"
|
||||
import prism_lang from "prismjs/components/index.js"
|
||||
import axios from "axios"
|
||||
import processes from "child_process"
|
||||
import fss from "fs"
|
||||
import GIFEncoder from "gifencoder"
|
||||
import jimp from "jimp"
|
||||
import marked from "marked"
|
||||
import nodechartist from "node-chartist"
|
||||
import opengraph from "open-graph-scraper"
|
||||
import os from "os"
|
||||
import paths from "path"
|
||||
import PNG from "png-js"
|
||||
import prism from "prismjs"
|
||||
import _puppeteer from "puppeteer"
|
||||
import rss from "rss-parser"
|
||||
import htmlsanitize from "sanitize-html"
|
||||
import git from "simple-git"
|
||||
import twemojis from "twemoji-parser"
|
||||
import url from "url"
|
||||
import util from "util"
|
||||
prism_lang()
|
||||
|
||||
//Exports
|
||||
export {fs, os, paths, url, util, processes, axios, git, opengraph, jimp, rss}
|
||||
export {axios, fs, git, jimp, opengraph, os, paths, processes, rss, url, util}
|
||||
|
||||
/**Returns module __dirname */
|
||||
export function __module(module) {
|
||||
return paths.join(paths.dirname(url.fileURLToPath(module)))
|
||||
}
|
||||
export function __module(module) {
|
||||
return paths.join(paths.dirname(url.fileURLToPath(module)))
|
||||
}
|
||||
|
||||
/**Puppeteer instantier */
|
||||
export const puppeteer = {
|
||||
async launch() {
|
||||
return _puppeteer.launch({
|
||||
headless:this.headless,
|
||||
executablePath:process.env.PUPPETEER_BROWSER_PATH,
|
||||
args:this.headless ? ["--no-sandbox", "--disable-extensions", "--disable-setuid-sandbox", "--disable-dev-shm-usage"] : [],
|
||||
ignoreDefaultArgs:["--disable-extensions"],
|
||||
})
|
||||
},
|
||||
headless:true,
|
||||
}
|
||||
export const puppeteer = {
|
||||
async launch() {
|
||||
return _puppeteer.launch({
|
||||
headless:this.headless,
|
||||
executablePath:process.env.PUPPETEER_BROWSER_PATH,
|
||||
args:this.headless ? ["--no-sandbox", "--disable-extensions", "--disable-setuid-sandbox", "--disable-dev-shm-usage"] : [],
|
||||
ignoreDefaultArgs:["--disable-extensions"],
|
||||
})
|
||||
},
|
||||
headless:true,
|
||||
}
|
||||
|
||||
/**Plural formatter */
|
||||
export function s(value, end = "") {
|
||||
return value !== 1 ? {y:"ies", "":"s"}[end] : end
|
||||
}
|
||||
export function s(value, end = "") {
|
||||
return value !== 1 ? {y:"ies", "":"s"}[end] : end
|
||||
}
|
||||
|
||||
/**Formatter */
|
||||
export function format(n, {sign = false, unit = true, fixed} = {}) {
|
||||
if (unit) {
|
||||
for (const {u, v} of [{u:"b", v:10**9}, {u:"m", v:10**6}, {u:"k", v:10**3}]) {
|
||||
if (n/v >= 1)
|
||||
return `${(sign)&&(n > 0) ? "+" : ""}${(n/v).toFixed(fixed ?? 2).substr(0, 4).replace(/[.]0*$/, "")}${u}`
|
||||
}
|
||||
export function format(n, {sign = false, unit = true, fixed} = {}) {
|
||||
if (unit) {
|
||||
for (const {u, v} of [{u:"b", v:10 ** 9}, {u:"m", v:10 ** 6}, {u:"k", v:10 ** 3}]) {
|
||||
if (n / v >= 1)
|
||||
return `${(sign) && (n > 0) ? "+" : ""}${(n / v).toFixed(fixed ?? 2).substr(0, 4).replace(/[.]0*$/, "")}${u}`
|
||||
}
|
||||
return `${(sign)&&(n > 0) ? "+" : ""}${fixed ? n.toFixed(fixed) : n}`
|
||||
}
|
||||
return `${(sign) && (n > 0) ? "+" : ""}${fixed ? n.toFixed(fixed) : n}`
|
||||
}
|
||||
|
||||
/**Bytes formatter */
|
||||
export function bytes(n) {
|
||||
for (const {u, v} of [{u:"E", v:10**18}, {u:"P", v:10**15}, {u:"T", v:10**12}, {u:"G", v:10**9}, {u:"M", v:10**6}, {u:"k", v:10**3}]) {
|
||||
if (n/v >= 1)
|
||||
return `${(n/v).toFixed(2).substr(0, 4).replace(/[.]0*$/, "")} ${u}B`
|
||||
}
|
||||
return `${n} byte${n > 1 ? "s" : ""}`
|
||||
export function bytes(n) {
|
||||
for (const {u, v} of [{u:"E", v:10 ** 18}, {u:"P", v:10 ** 15}, {u:"T", v:10 ** 12}, {u:"G", v:10 ** 9}, {u:"M", v:10 ** 6}, {u:"k", v:10 ** 3}]) {
|
||||
if (n / v >= 1)
|
||||
return `${(n / v).toFixed(2).substr(0, 4).replace(/[.]0*$/, "")} ${u}B`
|
||||
}
|
||||
format.bytes = bytes
|
||||
return `${n} byte${n > 1 ? "s" : ""}`
|
||||
}
|
||||
format.bytes = bytes
|
||||
|
||||
/**Percentage formatter */
|
||||
export function percentage(n, {rescale = true} = {}) {
|
||||
return `${(n*(rescale ? 100 : 1)).toFixed(2)
|
||||
export function percentage(n, {rescale = true} = {}) {
|
||||
return `${
|
||||
(n * (rescale ? 100 : 1)).toFixed(2)
|
||||
.replace(/(?<=[.])(?<decimal>[1-9]*)0+$/, "$<decimal>")
|
||||
.replace(/[.]$/, "")}%`
|
||||
}
|
||||
format.percentage = percentage
|
||||
.replace(/[.]$/, "")
|
||||
}%`
|
||||
}
|
||||
format.percentage = percentage
|
||||
|
||||
/**Text ellipsis formatter */
|
||||
export function ellipsis(text, {length = 20} = {}) {
|
||||
text = `${text}`
|
||||
if (text.length < length)
|
||||
return text
|
||||
return `${text.substring(0, length)}…`
|
||||
}
|
||||
format.ellipsis = ellipsis
|
||||
export function ellipsis(text, {length = 20} = {}) {
|
||||
text = `${text}`
|
||||
if (text.length < length)
|
||||
return text
|
||||
return `${text.substring(0, length)}…`
|
||||
}
|
||||
format.ellipsis = ellipsis
|
||||
|
||||
/**Date formatter */
|
||||
export function date(string, options) {
|
||||
return new Intl.DateTimeFormat("en-GB", options).format(new Date(string))
|
||||
}
|
||||
format.date = date
|
||||
export function date(string, options) {
|
||||
return new Intl.DateTimeFormat("en-GB", options).format(new Date(string))
|
||||
}
|
||||
format.date = date
|
||||
|
||||
/**Array shuffler */
|
||||
export function shuffle(array) {
|
||||
for (let i = array.length-1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random()*(i+1))
|
||||
;[array[i], array[j]] = [array[j], array[i]]
|
||||
}
|
||||
return array
|
||||
export function shuffle(array) {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1))
|
||||
;[array[i], array[j]] = [array[j], array[i]]
|
||||
}
|
||||
return array
|
||||
}
|
||||
|
||||
/**Escape html */
|
||||
export function htmlescape(string, u = {"&":true, "<":true, ">":true, '"':true, "'":true}) {
|
||||
return string
|
||||
.replace(/&(?!(?:amp|lt|gt|quot|apos);)/g, u["&"] ? "&" : "&")
|
||||
.replace(/</g, u["<"] ? "<" : "<")
|
||||
.replace(/>/g, u[">"] ? ">" : ">")
|
||||
.replace(/"/g, u['"'] ? """ : '"')
|
||||
.replace(/'/g, u["'"] ? "'" : "'")
|
||||
}
|
||||
export function htmlescape(string, u = {"&":true, "<":true, ">":true, '"':true, "'":true}) {
|
||||
return string
|
||||
.replace(/&(?!(?:amp|lt|gt|quot|apos);)/g, u["&"] ? "&" : "&")
|
||||
.replace(/</g, u["<"] ? "<" : "<")
|
||||
.replace(/>/g, u[">"] ? ">" : ">")
|
||||
.replace(/"/g, u['"'] ? """ : '"')
|
||||
.replace(/'/g, u["'"] ? "'" : "'")
|
||||
}
|
||||
|
||||
/**Unescape html */
|
||||
export function htmlunescape(string, u = {"&":true, "<":true, ">":true, '"':true, "'":true}) {
|
||||
return string
|
||||
.replace(/</g, u["<"] ? "<" : "<")
|
||||
.replace(/>/g, u[">"] ? ">" : ">")
|
||||
.replace(/"/g, u['"'] ? '"' : """)
|
||||
.replace(/&(?:apos|#39);/g, u["'"] ? "'" : "'")
|
||||
.replace(/&/g, u["&"] ? "&" : "&")
|
||||
}
|
||||
export function htmlunescape(string, u = {"&":true, "<":true, ">":true, '"':true, "'":true}) {
|
||||
return string
|
||||
.replace(/</g, u["<"] ? "<" : "<")
|
||||
.replace(/>/g, u[">"] ? ">" : ">")
|
||||
.replace(/"/g, u['"'] ? '"' : """)
|
||||
.replace(/&(?:apos|#39);/g, u["'"] ? "'" : "'")
|
||||
.replace(/&/g, u["&"] ? "&" : "&")
|
||||
}
|
||||
|
||||
/**Chartist */
|
||||
export async function chartist() {
|
||||
const css = `<style>${await fs.readFile(paths.join(__module(import.meta.url), "../../../node_modules", "node-chartist/dist/main.css")).catch(_ => "")}</style>`
|
||||
return (await nodechartist(...arguments))
|
||||
.replace(/class="ct-chart-line">/, `class="ct-chart-line">${css}`)
|
||||
}
|
||||
export async function chartist() {
|
||||
const css = `<style>${await fs.readFile(paths.join(__module(import.meta.url), "../../../node_modules", "node-chartist/dist/main.css")).catch(_ => "")}</style>`
|
||||
return (await nodechartist(...arguments))
|
||||
.replace(/class="ct-chart-line">/, `class="ct-chart-line">${css}`)
|
||||
}
|
||||
|
||||
/**Run command */
|
||||
export async function run(command, options, {prefixed = true} = {}) {
|
||||
const prefix = {win32:"wsl"}[process.platform] ?? ""
|
||||
command = `${prefixed ? prefix : ""} ${command}`.trim()
|
||||
return new Promise((solve, reject) => {
|
||||
console.debug(`metrics/command > ${command}`)
|
||||
const child = processes.exec(command, options)
|
||||
let [stdout, stderr] = ["", ""]
|
||||
child.stdout.on("data", data => stdout += data)
|
||||
child.stderr.on("data", data => stderr += data)
|
||||
child.on("close", code => {
|
||||
console.debug(`metrics/command > ${command} > exited with code ${code}`)
|
||||
console.debug(stdout)
|
||||
console.debug(stderr)
|
||||
return code === 0 ? solve(stdout) : reject(stderr)
|
||||
})
|
||||
export async function run(command, options, {prefixed = true} = {}) {
|
||||
const prefix = {win32:"wsl"}[process.platform] ?? ""
|
||||
command = `${prefixed ? prefix : ""} ${command}`.trim()
|
||||
return new Promise((solve, reject) => {
|
||||
console.debug(`metrics/command > ${command}`)
|
||||
const child = processes.exec(command, options)
|
||||
let [stdout, stderr] = ["", ""]
|
||||
child.stdout.on("data", data => stdout += data)
|
||||
child.stderr.on("data", data => stderr += data)
|
||||
child.on("close", code => {
|
||||
console.debug(`metrics/command > ${command} > exited with code ${code}`)
|
||||
console.debug(stdout)
|
||||
console.debug(stderr)
|
||||
return code === 0 ? solve(stdout) : reject(stderr)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**Check command existance */
|
||||
export async function which(command) {
|
||||
try {
|
||||
console.debug(`metrics/command > checking existence of ${command}`)
|
||||
await run(`which ${command}`)
|
||||
return true
|
||||
}
|
||||
catch {
|
||||
console.debug(`metrics/command > checking existence of ${command} > failed`)
|
||||
}
|
||||
return false
|
||||
export async function which(command) {
|
||||
try {
|
||||
console.debug(`metrics/command > checking existence of ${command}`)
|
||||
await run(`which ${command}`)
|
||||
return true
|
||||
}
|
||||
catch {
|
||||
console.debug(`metrics/command > checking existence of ${command} > failed`)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**Markdown-html sanitizer-interpreter */
|
||||
export async function markdown(text, {mode = "inline", codelines = Infinity} = {}) {
|
||||
//Sanitize user input once to prevent injections and parse into markdown
|
||||
let rendered = await marked(htmlunescape(htmlsanitize(text)), {
|
||||
highlight(code, lang) {
|
||||
return lang in prism.languages ? prism.highlight(code, prism.languages[lang]) : code
|
||||
export async function markdown(text, {mode = "inline", codelines = Infinity} = {}) {
|
||||
//Sanitize user input once to prevent injections and parse into markdown
|
||||
let rendered = await marked(htmlunescape(htmlsanitize(text)), {
|
||||
highlight(code, lang) {
|
||||
return lang in prism.languages ? prism.highlight(code, prism.languages[lang]) : code
|
||||
},
|
||||
silent:true,
|
||||
xhtml:true,
|
||||
})
|
||||
//Markdown mode
|
||||
switch (mode) {
|
||||
case "inline": {
|
||||
rendered = htmlsanitize(
|
||||
htmlsanitize(rendered, {
|
||||
allowedTags:["h1", "h2", "h3", "h4", "h5", "h6", "br", "blockquote", "code", "span"],
|
||||
allowedAttributes:{code:["class"], span:["class"]},
|
||||
}),
|
||||
{
|
||||
allowedAttributes:{code:["class"], span:["class"]},
|
||||
transformTags:{h1:"b", h2:"b", h3:"b", h4:"b", h5:"b", h6:"b", blockquote:"i"},
|
||||
},
|
||||
silent:true,
|
||||
xhtml:true,
|
||||
})
|
||||
//Markdown mode
|
||||
switch (mode) {
|
||||
case "inline":{
|
||||
rendered = htmlsanitize(htmlsanitize(rendered, {
|
||||
allowedTags:["h1", "h2", "h3", "h4", "h5", "h6", "br", "blockquote", "code", "span"],
|
||||
allowedAttributes:{code:["class"], span:["class"]},
|
||||
}), {
|
||||
allowedAttributes:{code:["class"], span:["class"]},
|
||||
transformTags:{h1:"b", h2:"b", h3:"b", h4:"b", h5:"b", h6:"b", blockquote:"i"},
|
||||
})
|
||||
break
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
//Trim code snippets
|
||||
rendered = rendered.replace(/(?<open><code[\s\S]*?>)(?<code>[\s\S]*?)(?<close><\/code>)/g, (m, open, code, close) => { //eslint-disable-line max-params
|
||||
const lines = code.trim().split("\n")
|
||||
if ((lines.length > 1)&&(!/class="[\s\S]*"/.test(open)))
|
||||
open = open.replace(/>/g, ' class="language-multiline">')
|
||||
return `${open}${lines.slice(0, codelines).join("\n")}${lines.length > codelines ? `\n<span class="token trimmed">(${lines.length-codelines} more ${lines.length-codelines === 1 ? "line was" : "lines were"} trimmed)</span>` : ""}${close}`
|
||||
})
|
||||
return rendered
|
||||
)
|
||||
break
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
//Trim code snippets
|
||||
rendered = rendered.replace(/(?<open><code[\s\S]*?>)(?<code>[\s\S]*?)(?<close><\/code>)/g, (m, open, code, close) => { //eslint-disable-line max-params
|
||||
const lines = code.trim().split("\n")
|
||||
if ((lines.length > 1) && (!/class="[\s\S]*"/.test(open)))
|
||||
open = open.replace(/>/g, ' class="language-multiline">')
|
||||
return `${open}${lines.slice(0, codelines).join("\n")}${lines.length > codelines ? `\n<span class="token trimmed">(${lines.length - codelines} more ${lines.length - codelines === 1 ? "line was" : "lines were"} trimmed)</span>` : ""}${close}`
|
||||
})
|
||||
return rendered
|
||||
}
|
||||
|
||||
/**Check GitHub filter against object */
|
||||
export function ghfilter(text, object) {
|
||||
console.debug(`metrics/svg/ghquery > checking ${text} against ${JSON.stringify(object)}`)
|
||||
const result = text.split(" ").map(x => x.trim()).filter(x => x).map(criteria => {
|
||||
const [key, filters] = criteria.split(":")
|
||||
const value = object[key]
|
||||
console.debug(`metrics/svg/ghquery > checking ${criteria} against ${value}`)
|
||||
return filters.split(",").map(x => x.trim()).filter(x => x).map(filter => {
|
||||
switch (true) {
|
||||
case /^>\d+$/.test(filter):
|
||||
return value > Number(filter.substring(1))
|
||||
case /^<\d+$/.test(filter):
|
||||
return value < Number(filter.substring(1))
|
||||
case /^\d+$/.test(filter):
|
||||
return value === Number(filter)
|
||||
case /^\d+..\d+$/.test(filter):{
|
||||
const [a, b] = filter.split("..").map(Number)
|
||||
return (value >= a)&&(value <= b)
|
||||
}
|
||||
default:
|
||||
return false
|
||||
export function ghfilter(text, object) {
|
||||
console.debug(`metrics/svg/ghquery > checking ${text} against ${JSON.stringify(object)}`)
|
||||
const result = text.split(" ").map(x => x.trim()).filter(x => x).map(criteria => {
|
||||
const [key, filters] = criteria.split(":")
|
||||
const value = object[key]
|
||||
console.debug(`metrics/svg/ghquery > checking ${criteria} against ${value}`)
|
||||
return filters.split(",").map(x => x.trim()).filter(x => x).map(filter => {
|
||||
switch (true) {
|
||||
case /^>\d+$/.test(filter):
|
||||
return value > Number(filter.substring(1))
|
||||
case /^<\d+$/.test(filter):
|
||||
return value < Number(filter.substring(1))
|
||||
case /^\d+$/.test(filter):
|
||||
return value === Number(filter)
|
||||
case /^\d+..\d+$/.test(filter): {
|
||||
const [a, b] = filter.split("..").map(Number)
|
||||
return (value >= a) && (value <= b)
|
||||
}
|
||||
}).reduce((a, b) => a||b, false)
|
||||
}).reduce((a, b) => a&&b, true)
|
||||
console.debug(`metrics/svg/ghquery > ${result ? "matching" : "not matching"}`)
|
||||
return result
|
||||
}
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}).reduce((a, b) => a || b, false)
|
||||
}).reduce((a, b) => a && b, true)
|
||||
console.debug(`metrics/svg/ghquery > ${result ? "matching" : "not matching"}`)
|
||||
return result
|
||||
}
|
||||
|
||||
/**Image to base64 */
|
||||
export async function imgb64(image, {width, height, fallback = true} = {}) {
|
||||
//Undefined image
|
||||
if (!image)
|
||||
return fallback ? "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==" : null
|
||||
//Load image
|
||||
image = await jimp.read(image)
|
||||
//Resize image
|
||||
if ((width)&&(height))
|
||||
image = image.resize(width, height)
|
||||
return image.getBase64Async(jimp.AUTO)
|
||||
}
|
||||
export async function imgb64(image, {width, height, fallback = true} = {}) {
|
||||
//Undefined image
|
||||
if (!image)
|
||||
return fallback ? "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==" : null
|
||||
//Load image
|
||||
image = await jimp.read(image)
|
||||
//Resize image
|
||||
if ((width) && (height))
|
||||
image = image.resize(width, height)
|
||||
return image.getBase64Async(jimp.AUTO)
|
||||
}
|
||||
|
||||
/**SVG utils */
|
||||
export const svg = {
|
||||
/**Render as pdf */
|
||||
async pdf(rendered, {paddings = "", style = "", twemojis = false, gemojis = false, rest = null} = {}) {
|
||||
//Instantiate browser if needed
|
||||
if (!svg.resize.browser) {
|
||||
svg.resize.browser = await puppeteer.launch()
|
||||
console.debug(`metrics/svg/pdf > started ${await svg.resize.browser.version()}`)
|
||||
}
|
||||
//Additional transformations
|
||||
if (twemojis)
|
||||
rendered = await svg.twemojis(rendered, {custom:false})
|
||||
if ((gemojis)&&(rest))
|
||||
rendered = await svg.gemojis(rendered, {rest})
|
||||
rendered = marked(rendered)
|
||||
//Render through browser and print pdf
|
||||
console.debug("metrics/svg/pdf > loading svg")
|
||||
const page = await svg.resize.browser.newPage()
|
||||
page.on("console", ({_text:text}) => console.debug(`metrics/svg/pdf > puppeteer > ${text}`))
|
||||
await page.setContent(`<main class="markdown-body">${rendered}</main>`, {waitUntil:["load", "domcontentloaded", "networkidle2"]})
|
||||
console.debug("metrics/svg/pdf > loaded svg successfully")
|
||||
await page.addStyleTag({content:`
|
||||
export const svg = {
|
||||
/**Render as pdf */
|
||||
async pdf(rendered, {paddings = "", style = "", twemojis = false, gemojis = false, rest = null} = {}) {
|
||||
//Instantiate browser if needed
|
||||
if (!svg.resize.browser) {
|
||||
svg.resize.browser = await puppeteer.launch()
|
||||
console.debug(`metrics/svg/pdf > started ${await svg.resize.browser.version()}`)
|
||||
}
|
||||
//Additional transformations
|
||||
if (twemojis)
|
||||
rendered = await svg.twemojis(rendered, {custom:false})
|
||||
if ((gemojis) && (rest))
|
||||
rendered = await svg.gemojis(rendered, {rest})
|
||||
rendered = marked(rendered)
|
||||
//Render through browser and print pdf
|
||||
console.debug("metrics/svg/pdf > loading svg")
|
||||
const page = await svg.resize.browser.newPage()
|
||||
page.on("console", ({_text:text}) => console.debug(`metrics/svg/pdf > puppeteer > ${text}`))
|
||||
await page.setContent(`<main class="markdown-body">${rendered}</main>`, {waitUntil:["load", "domcontentloaded", "networkidle2"]})
|
||||
console.debug("metrics/svg/pdf > loaded svg successfully")
|
||||
await page.addStyleTag({
|
||||
content:`
|
||||
main { margin: ${(Array.isArray(paddings) ? paddings : paddings.split(",")).join(" ")}; }
|
||||
main svg { height: 1em; width: 1em; }
|
||||
${await fs.readFile(paths.join(__module(import.meta.url), "../../../node_modules", "@primer/css/dist/markdown.css")).catch(_ => "")}${style}
|
||||
`})
|
||||
rendered = await page.pdf()
|
||||
`,
|
||||
})
|
||||
rendered = await page.pdf()
|
||||
//Result
|
||||
await page.close()
|
||||
console.debug("metrics/svg/pdf > rendering complete")
|
||||
return {rendered, mime:"application/pdf"}
|
||||
},
|
||||
/**Render and resize svg */
|
||||
async resize(rendered, {paddings, convert}) {
|
||||
//Instantiate browser if needed
|
||||
if (!svg.resize.browser) {
|
||||
svg.resize.browser = await puppeteer.launch()
|
||||
console.debug(`metrics/svg/resize > started ${await svg.resize.browser.version()}`)
|
||||
}
|
||||
//Format padding
|
||||
const [pw = 1, ph] = (Array.isArray(paddings) ? paddings : `${paddings}`.split(",").map(x => x.trim())).map(padding => `${padding}`.substring(0, padding.length - 1)).map(value => 1 + Number(value) / 100)
|
||||
const padding = {width:pw, height:(ph ?? pw)}
|
||||
if (!Number.isFinite(padding.width))
|
||||
padding.width = 1
|
||||
if (!Number.isFinite(padding.height))
|
||||
padding.height = 1
|
||||
console.debug(`metrics/svg/resize > padding width*${padding.width}, height*${padding.height}`)
|
||||
//Render through browser and resize height
|
||||
console.debug("metrics/svg/resize > loading svg")
|
||||
const page = await svg.resize.browser.newPage()
|
||||
page.on("console", ({_text:text}) => console.debug(`metrics/svg/resize > puppeteer > ${text}`))
|
||||
await page.setContent(rendered, {waitUntil:["load", "domcontentloaded", "networkidle2"]})
|
||||
console.debug("metrics/svg/resize > loaded svg successfully")
|
||||
await page.addStyleTag({content:"body { margin: 0; padding: 0; }"})
|
||||
let mime = "image/svg+xml"
|
||||
console.debug("metrics/svg/resize > resizing svg")
|
||||
let height, resized, width
|
||||
try {
|
||||
({resized, width, height} = await page.evaluate(async padding => {
|
||||
//Disable animations
|
||||
const animated = !document.querySelector("svg").classList.contains("no-animations")
|
||||
if (animated)
|
||||
document.querySelector("svg").classList.add("no-animations")
|
||||
console.debug(`animations are ${animated ? "enabled" : "disabled"}`)
|
||||
await new Promise(solve => setTimeout(solve, 2400))
|
||||
//Get bounds and resize
|
||||
let {y:height, width} = document.querySelector("svg #metrics-end").getBoundingClientRect()
|
||||
console.debug(`bounds width=${width}, height=${height}`)
|
||||
height = Math.ceil(height * padding.height)
|
||||
width = Math.ceil(width * padding.width)
|
||||
console.debug(`bounds after applying padding width=${width} (*${padding.width}), height=${height} (*${padding.height})`)
|
||||
//Resize svg
|
||||
document.querySelector("svg").setAttribute("height", height)
|
||||
//Enable animations
|
||||
if (animated)
|
||||
document.querySelector("svg").classList.remove("no-animations")
|
||||
//Result
|
||||
await page.close()
|
||||
console.debug("metrics/svg/pdf > rendering complete")
|
||||
return {rendered, mime:"application/pdf"}
|
||||
},
|
||||
/**Render and resize svg */
|
||||
async resize(rendered, {paddings, convert}) {
|
||||
//Instantiate browser if needed
|
||||
if (!svg.resize.browser) {
|
||||
svg.resize.browser = await puppeteer.launch()
|
||||
console.debug(`metrics/svg/resize > started ${await svg.resize.browser.version()}`)
|
||||
}
|
||||
//Format padding
|
||||
const [pw = 1, ph] = (Array.isArray(paddings) ? paddings : `${paddings}`.split(",").map(x => x.trim())).map(padding => `${padding}`.substring(0, padding.length-1)).map(value => 1+Number(value)/100)
|
||||
const padding = {width:pw, height:(ph ?? pw)}
|
||||
if (!Number.isFinite(padding.width))
|
||||
padding.width = 1
|
||||
if (!Number.isFinite(padding.height))
|
||||
padding.height = 1
|
||||
console.debug(`metrics/svg/resize > padding width*${padding.width}, height*${padding.height}`)
|
||||
//Render through browser and resize height
|
||||
console.debug("metrics/svg/resize > loading svg")
|
||||
const page = await svg.resize.browser.newPage()
|
||||
page.on("console", ({_text:text}) => console.debug(`metrics/svg/resize > puppeteer > ${text}`))
|
||||
await page.setContent(rendered, {waitUntil:["load", "domcontentloaded", "networkidle2"]})
|
||||
console.debug("metrics/svg/resize > loaded svg successfully")
|
||||
await page.addStyleTag({content:"body { margin: 0; padding: 0; }"})
|
||||
let mime = "image/svg+xml"
|
||||
console.debug("metrics/svg/resize > resizing svg")
|
||||
let height, resized, width
|
||||
try {
|
||||
({resized, width, height} = await page.evaluate(async padding => {
|
||||
//Disable animations
|
||||
const animated = !document.querySelector("svg").classList.contains("no-animations")
|
||||
if (animated)
|
||||
document.querySelector("svg").classList.add("no-animations")
|
||||
console.debug(`animations are ${animated ? "enabled" : "disabled"}`)
|
||||
await new Promise(solve => setTimeout(solve, 2400))
|
||||
//Get bounds and resize
|
||||
let {y:height, width} = document.querySelector("svg #metrics-end").getBoundingClientRect()
|
||||
console.debug(`bounds width=${width}, height=${height}`)
|
||||
height = Math.ceil(height*padding.height)
|
||||
width = Math.ceil(width*padding.width)
|
||||
console.debug(`bounds after applying padding width=${width} (*${padding.width}), height=${height} (*${padding.height})`)
|
||||
//Resize svg
|
||||
document.querySelector("svg").setAttribute("height", height)
|
||||
//Enable animations
|
||||
if (animated)
|
||||
document.querySelector("svg").classList.remove("no-animations")
|
||||
//Result
|
||||
return {resized:new XMLSerializer().serializeToString(document.querySelector("svg")), height, width}
|
||||
}, padding))
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error)
|
||||
console.debug(`metrics/svg/resize > an error occured: ${error}`)
|
||||
throw error
|
||||
}
|
||||
//Convert if required
|
||||
if (convert) {
|
||||
console.debug(`metrics/svg/resize > convert to ${convert}`)
|
||||
resized = await page.screenshot({type:convert, clip:{x:0, y:0, width, height}, omitBackground:true})
|
||||
mime = `image/${convert}`
|
||||
}
|
||||
//Result
|
||||
await page.close()
|
||||
console.debug("metrics/svg/resize > rendering complete")
|
||||
return {resized, mime}
|
||||
},
|
||||
/**Render twemojis */
|
||||
async twemojis(rendered, {custom = true} = {}) {
|
||||
//Load emojis
|
||||
console.debug("metrics/svg/twemojis > rendering twemojis")
|
||||
const emojis = new Map()
|
||||
for (const {text:emoji, url} of twemojis.parse(rendered)) {
|
||||
if (!emojis.has(emoji))
|
||||
emojis.set(emoji, (await axios.get(url)).data.replace(/^<svg /, '<svg class="twemoji" '))
|
||||
}
|
||||
//Apply replacements
|
||||
for (const [emoji, twemoji] of emojis) {
|
||||
if (custom)
|
||||
rendered = rendered.replace(new RegExp(`<metrics[ ]*(?<attributes>[^>]*)>${emoji}</metrics>`, "g"), twemoji.replace(/(<svg class="twemoji" [\s\S]+?)(>)/, "$1 $<attributes> $2"))
|
||||
rendered = rendered.replace(new RegExp(emoji, "g"), twemoji)
|
||||
}
|
||||
return rendered
|
||||
},
|
||||
/**Render github emojis */
|
||||
async gemojis(rendered, {rest}) {
|
||||
//Load gemojis
|
||||
console.debug("metrics/svg/gemojis > rendering gemojis")
|
||||
const emojis = new Map()
|
||||
try {
|
||||
for (const [emoji, url] of Object.entries((await rest.emojis.get()).data).map(([key, value]) => [`:${key}:`, value])) {
|
||||
if (((!emojis.has(emoji)))&&(new RegExp(emoji, "g").test(rendered)))
|
||||
emojis.set(emoji, `<img class="gemoji" src="${await imgb64(url)}" height="16" width="16" alt="">`)
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.debug("metrics/svg/gemojis > could not load gemojis")
|
||||
console.debug(error)
|
||||
}
|
||||
//Apply replacements
|
||||
for (const [emoji, gemoji] of emojis)
|
||||
rendered = rendered.replace(new RegExp(emoji, "g"), gemoji)
|
||||
return rendered
|
||||
},
|
||||
}
|
||||
return {resized:new XMLSerializer().serializeToString(document.querySelector("svg")), height, width}
|
||||
}, padding))
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error)
|
||||
console.debug(`metrics/svg/resize > an error occured: ${error}`)
|
||||
throw error
|
||||
}
|
||||
//Convert if required
|
||||
if (convert) {
|
||||
console.debug(`metrics/svg/resize > convert to ${convert}`)
|
||||
resized = await page.screenshot({type:convert, clip:{x:0, y:0, width, height}, omitBackground:true})
|
||||
mime = `image/${convert}`
|
||||
}
|
||||
//Result
|
||||
await page.close()
|
||||
console.debug("metrics/svg/resize > rendering complete")
|
||||
return {resized, mime}
|
||||
},
|
||||
/**Render twemojis */
|
||||
async twemojis(rendered, {custom = true} = {}) {
|
||||
//Load emojis
|
||||
console.debug("metrics/svg/twemojis > rendering twemojis")
|
||||
const emojis = new Map()
|
||||
for (const {text:emoji, url} of twemojis.parse(rendered)) {
|
||||
if (!emojis.has(emoji))
|
||||
emojis.set(emoji, (await axios.get(url)).data.replace(/^<svg /, '<svg class="twemoji" '))
|
||||
}
|
||||
//Apply replacements
|
||||
for (const [emoji, twemoji] of emojis) {
|
||||
if (custom)
|
||||
rendered = rendered.replace(new RegExp(`<metrics[ ]*(?<attributes>[^>]*)>${emoji}</metrics>`, "g"), twemoji.replace(/(<svg class="twemoji" [\s\S]+?)(>)/, "$1 $<attributes> $2"))
|
||||
rendered = rendered.replace(new RegExp(emoji, "g"), twemoji)
|
||||
}
|
||||
return rendered
|
||||
},
|
||||
/**Render github emojis */
|
||||
async gemojis(rendered, {rest}) {
|
||||
//Load gemojis
|
||||
console.debug("metrics/svg/gemojis > rendering gemojis")
|
||||
const emojis = new Map()
|
||||
try {
|
||||
for (const [emoji, url] of Object.entries((await rest.emojis.get()).data).map(([key, value]) => [`:${key}:`, value])) {
|
||||
if (((!emojis.has(emoji))) && (new RegExp(emoji, "g").test(rendered)))
|
||||
emojis.set(emoji, `<img class="gemoji" src="${await imgb64(url)}" height="16" width="16" alt="">`)
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.debug("metrics/svg/gemojis > could not load gemojis")
|
||||
console.debug(error)
|
||||
}
|
||||
//Apply replacements
|
||||
for (const [emoji, gemoji] of emojis)
|
||||
rendered = rendered.replace(new RegExp(emoji, "g"), gemoji)
|
||||
return rendered
|
||||
},
|
||||
}
|
||||
|
||||
/**Wait */
|
||||
export async function wait(seconds) {
|
||||
await new Promise(solve => setTimeout(solve, seconds*1000))
|
||||
}
|
||||
export async function wait(seconds) {
|
||||
await new Promise(solve => setTimeout(solve, seconds * 1000))
|
||||
}
|
||||
|
||||
/**Create record from puppeteer browser */
|
||||
export async function record({page, width, height, frames, scale = 1, quality = 80, x = 0, y = 0, delay = 150, background = true}) {
|
||||
//Register images frames
|
||||
const images = []
|
||||
for (let i = 0; i < frames; i++) {
|
||||
images.push(await page.screenshot({type:"png", clip:{width, height, x, y}, omitBackground:background}))
|
||||
await wait(delay/1000)
|
||||
if (i%10 === 0)
|
||||
console.debug(`metrics/record > processed ${i}/${frames} frames`)
|
||||
}
|
||||
console.debug(`metrics/record > processed ${frames}/${frames} frames`)
|
||||
//Post-processing
|
||||
console.debug("metrics/record > applying post-processing")
|
||||
return Promise.all(images.map(async buffer => (await jimp.read(buffer)).scale(scale).quality(quality).getBase64Async("image/png")))
|
||||
export async function record({page, width, height, frames, scale = 1, quality = 80, x = 0, y = 0, delay = 150, background = true}) {
|
||||
//Register images frames
|
||||
const images = []
|
||||
for (let i = 0; i < frames; i++) {
|
||||
images.push(await page.screenshot({type:"png", clip:{width, height, x, y}, omitBackground:background}))
|
||||
await wait(delay / 1000)
|
||||
if (i % 10 === 0)
|
||||
console.debug(`metrics/record > processed ${i}/${frames} frames`)
|
||||
}
|
||||
console.debug(`metrics/record > processed ${frames}/${frames} frames`)
|
||||
//Post-processing
|
||||
console.debug("metrics/record > applying post-processing")
|
||||
return Promise.all(images.map(async buffer => (await jimp.read(buffer)).scale(scale).quality(quality).getBase64Async("image/png")))
|
||||
}
|
||||
|
||||
/**Create gif from puppeteer browser*/
|
||||
export async function gif({page, width, height, frames, x = 0, y = 0, repeat = true, delay = 150, quality = 10}) {
|
||||
//Create temporary stream
|
||||
const path = paths.join(os.tmpdir(), `${Math.round(Math.random()*1000000000)}.gif`)
|
||||
console.debug(`metrics/puppeteergif > set write stream to "${path}"`)
|
||||
if (fss.existsSync(path))
|
||||
await fs.unlink(path)
|
||||
//Create encoder
|
||||
const encoder = new GIFEncoder(width, height)
|
||||
encoder.createWriteStream().pipe(fss.createWriteStream(path))
|
||||
encoder.start()
|
||||
encoder.setRepeat(repeat ? 0 : -1)
|
||||
encoder.setDelay(delay)
|
||||
encoder.setQuality(quality)
|
||||
//Register frames
|
||||
for (let i = 0; i < frames; i++) {
|
||||
const buffer = new PNG(await page.screenshot({clip:{width, height, x, y}}))
|
||||
encoder.addFrame(await new Promise(solve => buffer.decode(pixels => solve(pixels))))
|
||||
if (frames%10 === 0)
|
||||
console.debug(`metrics/puppeteergif > processed ${i}/${frames} frames`)
|
||||
}
|
||||
console.debug(`metrics/puppeteergif > processed ${frames}/${frames} frames`)
|
||||
//Close encoder and convert to base64
|
||||
encoder.finish()
|
||||
const result = await fs.readFile(path, "base64")
|
||||
await fs.unlink(path)
|
||||
return `data:image/gif;base64,${result}`
|
||||
}
|
||||
export async function gif({page, width, height, frames, x = 0, y = 0, repeat = true, delay = 150, quality = 10}) {
|
||||
//Create temporary stream
|
||||
const path = paths.join(os.tmpdir(), `${Math.round(Math.random() * 1000000000)}.gif`)
|
||||
console.debug(`metrics/puppeteergif > set write stream to "${path}"`)
|
||||
if (fss.existsSync(path))
|
||||
await fs.unlink(path)
|
||||
//Create encoder
|
||||
const encoder = new GIFEncoder(width, height)
|
||||
encoder.createWriteStream().pipe(fss.createWriteStream(path))
|
||||
encoder.start()
|
||||
encoder.setRepeat(repeat ? 0 : -1)
|
||||
encoder.setDelay(delay)
|
||||
encoder.setQuality(quality)
|
||||
//Register frames
|
||||
for (let i = 0; i < frames; i++) {
|
||||
const buffer = new PNG(await page.screenshot({clip:{width, height, x, y}}))
|
||||
encoder.addFrame(await new Promise(solve => buffer.decode(pixels => solve(pixels))))
|
||||
if (frames % 10 === 0)
|
||||
console.debug(`metrics/puppeteergif > processed ${i}/${frames} frames`)
|
||||
}
|
||||
console.debug(`metrics/puppeteergif > processed ${frames}/${frames} frames`)
|
||||
//Close encoder and convert to base64
|
||||
encoder.finish()
|
||||
const result = await fs.readFile(path, "base64")
|
||||
await fs.unlink(path)
|
||||
return `data:image/gif;base64,${result}`
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user