Add linter and minor bug fixes (#107)
This commit is contained in:
158
source/.eslintrc.yml
Normal file
158
source/.eslintrc.yml
Normal file
@@ -0,0 +1,158 @@
|
||||
# Use recommended rules
|
||||
extends: eslint:recommended
|
||||
|
||||
# Environment
|
||||
env:
|
||||
node: yes
|
||||
es2021: yes
|
||||
parserOptions:
|
||||
ecmaVersion: 2021
|
||||
sourceType: module
|
||||
|
||||
# Globally defined variables
|
||||
globals:
|
||||
# Puppeteer variables
|
||||
document: readonly
|
||||
window: readonly
|
||||
XMLSerializer: readonly
|
||||
|
||||
# Rules
|
||||
rules:
|
||||
|
||||
# Avoid useless statements
|
||||
no-unused-vars: [error, {argsIgnorePattern: "^_"}]
|
||||
no-unused-expressions: error
|
||||
no-return-await: error
|
||||
no-empty-function: error
|
||||
no-useless-call: error
|
||||
no-useless-constructor: error
|
||||
no-useless-concat: error
|
||||
no-useless-computed-key: error
|
||||
no-useless-backreference: error
|
||||
no-self-compare: error
|
||||
no-extra-label: error
|
||||
no-undef-init: error
|
||||
|
||||
# Avoid visual pollution
|
||||
semi: [error, never]
|
||||
semi-spacing: error
|
||||
semi-style: [error, first]
|
||||
curly: [error, multi-or-nest]
|
||||
dot-notation: error
|
||||
|
||||
# Avoid confusing code
|
||||
no-label-var: error
|
||||
no-bitwise: error
|
||||
new-cap: error
|
||||
new-parens: error
|
||||
func-name-matching: error
|
||||
no-extend-native: error
|
||||
no-extra-bind: error
|
||||
|
||||
# Avoid deprecated or legacy JavaScript
|
||||
no-var: error
|
||||
no-caller: error
|
||||
no-alert: error
|
||||
no-script-url: error
|
||||
no-eval: error
|
||||
no-implied-eval: error
|
||||
no-implicit-globals: error
|
||||
no-proto: error
|
||||
no-iterator: error
|
||||
no-new-object: error
|
||||
|
||||
# Code integrity
|
||||
no-unsafe-optional-chaining: error
|
||||
no-duplicate-imports: error
|
||||
no-promise-executor-return: error
|
||||
eqeqeq: error
|
||||
|
||||
# Code simplicity
|
||||
max-depth: [error, 10]
|
||||
max-nested-callbacks: error
|
||||
max-params: [error, 3]
|
||||
max-statements-per-line: error
|
||||
newline-per-chained-call: [error, {ignoreChainWithDepth: 6}]
|
||||
object-shorthand: error
|
||||
|
||||
# Code readability
|
||||
default-case-last: error
|
||||
default-param-last: error
|
||||
no-else-return: error
|
||||
no-lonely-if: error
|
||||
no-multiple-empty-lines: error
|
||||
no-multi-str: error
|
||||
no-multi-assign: error
|
||||
no-inline-comments: error
|
||||
yoda: error
|
||||
max-classes-per-file: [error, 1]
|
||||
grouped-accessor-pairs: error
|
||||
|
||||
# Prefer using new syntax and features
|
||||
prefer-regex-literals: error
|
||||
prefer-named-capture-group: error
|
||||
prefer-arrow-callback: error
|
||||
prefer-destructuring: error
|
||||
prefer-numeric-literals: error
|
||||
prefer-exponentiation-operator: error
|
||||
prefer-spread: error
|
||||
prefer-object-spread: error
|
||||
prefer-template: error
|
||||
|
||||
# Allow additional features
|
||||
no-ex-assign: off
|
||||
no-unsafe-finally: off
|
||||
|
||||
# =========================================================================
|
||||
# Rules below are really motivated by
|
||||
|
||||
# Code style (general rules)
|
||||
no-tabs: error
|
||||
no-trailing-spaces: error
|
||||
brace-style: [error, stroustrup]
|
||||
comma-dangle: [error, always-multiline]
|
||||
comma-style: error
|
||||
sort-vars: error
|
||||
|
||||
# Coding style (quoting rules)
|
||||
quote-props: [error, as-needed, {numbers: true}]
|
||||
quotes: [error, double, {avoidEscape: true}]
|
||||
template-curly-spacing: error
|
||||
|
||||
# Coding style (comments rules)
|
||||
line-comment-position: error
|
||||
capitalized-comments: error
|
||||
multiline-comment-style: error
|
||||
|
||||
# Coding style (spacing rules)
|
||||
block-spacing: [error, always]
|
||||
comma-spacing: error
|
||||
func-call-spacing: error
|
||||
arrow-spacing: error
|
||||
generator-star-spacing: error
|
||||
object-curly-spacing: [error, never]
|
||||
rest-spread-spacing: error
|
||||
key-spacing: [error, {afterColon: false}]
|
||||
computed-property-spacing: error
|
||||
switch-colon-spacing: [error, {after: false}]
|
||||
array-bracket-spacing: [error, never]
|
||||
no-whitespace-before-property: error
|
||||
space-before-function-paren: [error, never]
|
||||
space-in-parens: error
|
||||
spaced-comment: [error, never]
|
||||
lines-between-class-members: error
|
||||
keyword-spacing: error
|
||||
array-bracket-newline: [error, consistent]
|
||||
array-element-newline: [error, consistent]
|
||||
|
||||
# Coding style (function rules)
|
||||
no-new: error
|
||||
no-new-func: error
|
||||
no-new-wrappers: error
|
||||
func-names: [error, never]
|
||||
function-call-argument-newline: [error, never]
|
||||
function-paren-newline: [error, never]
|
||||
no-extra-parens: [error, functions]
|
||||
arrow-body-style: [error, as-needed]
|
||||
arrow-parens: [error, as-needed]
|
||||
implicit-arrow-linebreak: error
|
||||
@@ -5,7 +5,7 @@
|
||||
import setup from "../metrics/setup.mjs"
|
||||
import mocks from "../mocks/index.mjs"
|
||||
import metrics from "../metrics/index.mjs"
|
||||
process.on("unhandledRejection", error => { throw error })
|
||||
process.on("unhandledRejection", error => { throw error }) //eslint-disable-line max-statements-per-line, brace-style
|
||||
|
||||
//Debug message buffer
|
||||
let DEBUG = true
|
||||
@@ -28,201 +28,205 @@
|
||||
info.break = () => console.log("─".repeat(88))
|
||||
|
||||
//Runner
|
||||
try {
|
||||
//Initialization
|
||||
info.break()
|
||||
info.section(`Metrics`)
|
||||
|
||||
//Skip process if needed
|
||||
if ((github.context.eventName === "push")&&(github.context.payload?.head_commit)) {
|
||||
if (/\[Skip GitHub Action\]/.test(github.context.payload.head_commit.message)) {
|
||||
console.log(`Skipped because [Skip GitHub Action] is in commit message`)
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
//Load configuration
|
||||
const {conf, Plugins, Templates} = await setup({log:false, nosettings:true, community:{templates:core.getInput("setup_community_templates")}})
|
||||
const {metadata} = conf
|
||||
info("Setup", "complete")
|
||||
info("Version", conf.package.version)
|
||||
|
||||
//Core inputs
|
||||
const {
|
||||
user:_user, token,
|
||||
template, query, "setup.community.templates":_templates,
|
||||
filename, optimize, verify,
|
||||
debug, "debug.flags":dflags, "use.mocked.data":mocked, dryrun,
|
||||
"plugins.errors.fatal":die,
|
||||
"committer.token":_token, "committer.branch":_branch,
|
||||
"use.prebuilt.image":_image,
|
||||
...config
|
||||
} = metadata.plugins.core.inputs.action({core})
|
||||
const q = {...query, template}
|
||||
|
||||
//Docker image
|
||||
if (_image)
|
||||
info("Using prebuilt image", _image)
|
||||
|
||||
//Debug mode and flags
|
||||
info("Debug mode", debug)
|
||||
if (!debug) {
|
||||
console.debug = message => debugged.push(message)
|
||||
DEBUG = false
|
||||
}
|
||||
info("Debug flags", dflags)
|
||||
|
||||
//Token for data gathering
|
||||
info("GitHub token", token, {token:true})
|
||||
if (!token)
|
||||
throw new Error("You must provide a valid GitHub token to gather your metrics")
|
||||
conf.settings.token = token
|
||||
const api = {}
|
||||
api.graphql = octokit.graphql.defaults({headers:{authorization: `token ${token}`}})
|
||||
info("Github GraphQL API", "ok")
|
||||
api.rest = github.getOctokit(token)
|
||||
info("Github REST API", "ok")
|
||||
//Apply mocking if needed
|
||||
if (mocked) {
|
||||
Object.assign(api, await mocks(api))
|
||||
info("Use mocked API", true)
|
||||
}
|
||||
//Extract octokits
|
||||
const {graphql, rest} = api
|
||||
|
||||
//GitHub user
|
||||
let authenticated
|
||||
;(async function() {
|
||||
try {
|
||||
authenticated = (await rest.users.getAuthenticated()).data.login
|
||||
}
|
||||
catch {
|
||||
authenticated = github.context.repo.owner
|
||||
}
|
||||
const user = _user || authenticated
|
||||
info("GitHub account", user)
|
||||
//Initialization
|
||||
info.break()
|
||||
info.section("Metrics")
|
||||
|
||||
//Current repository
|
||||
info("Current repository", `${github.context.repo.owner}/${github.context.repo.repo}`)
|
||||
|
||||
//Committer
|
||||
const committer = {}
|
||||
if (!dryrun) {
|
||||
//Compute committer informations
|
||||
committer.commit = true
|
||||
committer.token = _token || token
|
||||
committer.branch = _branch || github.context.ref.replace(/^refs[/]heads[/]/, "")
|
||||
info("Committer token", committer.token, {token:true})
|
||||
if (!committer.token)
|
||||
throw new Error("You must provide a valid GitHub token to commit your metrics")
|
||||
info("Committer branch", committer.branch)
|
||||
//Instantiate API for committer
|
||||
committer.rest = github.getOctokit(committer.token)
|
||||
info("Committer REST API", "ok")
|
||||
try {
|
||||
info("Committer account", (await committer.rest.users.getAuthenticated()).data.login)
|
||||
}
|
||||
catch {
|
||||
info("Committer account", "(github-actions)")
|
||||
}
|
||||
//Retrieve previous render SHA to be able to update file content through API
|
||||
committer.sha = null
|
||||
try {
|
||||
const {repository:{object:{oid}}} = await graphql(`
|
||||
query Sha {
|
||||
repository(owner: "${github.context.repo.owner}", name: "${github.context.repo.repo}") {
|
||||
object(expression: "${committer.branch}:${filename}") { ... on Blob { oid } }
|
||||
}
|
||||
}
|
||||
`,
|
||||
{headers:{authorization:`token ${committer.token}`}}
|
||||
)
|
||||
committer.sha = oid
|
||||
} catch (error) { console.debug(error) }
|
||||
info("Previous render sha", committer.sha ?? "(none)")
|
||||
}
|
||||
else
|
||||
info("Dry-run", true)
|
||||
|
||||
//SVG file
|
||||
conf.optimize = optimize
|
||||
info("SVG output", filename)
|
||||
info("SVG optimization", optimize)
|
||||
info("SVG verification after generation", verify)
|
||||
|
||||
//Template
|
||||
info.break()
|
||||
info.section("Templates")
|
||||
info("Community templates", _templates)
|
||||
info("Template used", template)
|
||||
info("Query additional params", query)
|
||||
|
||||
//Core config
|
||||
info.break()
|
||||
info.group({metadata, name:"core", inputs:config})
|
||||
info("Plugin errors", die ? "(exit with error)" : "(displayed in generated SVG)")
|
||||
Object.assign(q, config)
|
||||
|
||||
//Base content
|
||||
info.break()
|
||||
const {base:parts, ...base} = metadata.plugins.base.inputs.action({core})
|
||||
info.group({metadata, name:"base", inputs:base})
|
||||
info("Base sections", parts)
|
||||
base.base = false
|
||||
for (const part of conf.settings.plugins.base.parts)
|
||||
base[`base.${part}`] = parts.includes(part)
|
||||
Object.assign(q, base)
|
||||
|
||||
//Additional plugins
|
||||
const plugins = {}
|
||||
for (const name of Object.keys(Plugins).filter(key => !["base", "core"].includes(key))) {
|
||||
//Parse inputs
|
||||
const {[name]:enabled, ...inputs} = metadata.plugins[name].inputs.action({core})
|
||||
plugins[name] = {enabled}
|
||||
//Register user inputs
|
||||
if (enabled) {
|
||||
info.break()
|
||||
info.group({metadata, name, inputs})
|
||||
q[name] = true
|
||||
for (const [key, value] of Object.entries(inputs)) {
|
||||
//Store token in plugin configuration
|
||||
if (metadata.plugins[name].inputs[key].type === "token")
|
||||
plugins[name][key] = value
|
||||
//Store value in query
|
||||
else
|
||||
q[`${name}.${key}`] = value
|
||||
//Skip process if needed
|
||||
if ((github.context.eventName === "push")&&(github.context.payload?.head_commit)) {
|
||||
if (/\[Skip GitHub Action\]/.test(github.context.payload.head_commit.message)) {
|
||||
console.log("Skipped because [Skip GitHub Action] is in commit message")
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
//Load configuration
|
||||
const {conf, Plugins, Templates} = await setup({log:false, nosettings:true, community:{templates:core.getInput("setup_community_templates")}})
|
||||
const {metadata} = conf
|
||||
info("Setup", "complete")
|
||||
info("Version", conf.package.version)
|
||||
|
||||
//Core inputs
|
||||
const {
|
||||
user:_user, token,
|
||||
template, query, "setup.community.templates":_templates,
|
||||
filename, optimize, verify,
|
||||
debug, "debug.flags":dflags, "use.mocked.data":mocked, dryrun,
|
||||
"plugins.errors.fatal":die,
|
||||
"committer.token":_token, "committer.branch":_branch,
|
||||
"use.prebuilt.image":_image,
|
||||
...config
|
||||
} = metadata.plugins.core.inputs.action({core})
|
||||
const q = {...query, template}
|
||||
|
||||
//Docker image
|
||||
if (_image)
|
||||
info("Using prebuilt image", _image)
|
||||
|
||||
//Debug mode and flags
|
||||
info("Debug mode", debug)
|
||||
if (!debug) {
|
||||
console.debug = message => debugged.push(message)
|
||||
DEBUG = false
|
||||
}
|
||||
info("Debug flags", dflags)
|
||||
|
||||
//Token for data gathering
|
||||
info("GitHub token", token, {token:true})
|
||||
if (!token)
|
||||
throw new Error("You must provide a valid GitHub token to gather your metrics")
|
||||
conf.settings.token = token
|
||||
const api = {}
|
||||
api.graphql = octokit.graphql.defaults({headers:{authorization:`token ${token}`}})
|
||||
info("Github GraphQL API", "ok")
|
||||
api.rest = github.getOctokit(token)
|
||||
info("Github REST API", "ok")
|
||||
//Apply mocking if needed
|
||||
if (mocked) {
|
||||
Object.assign(api, await mocks(api))
|
||||
info("Use mocked API", true)
|
||||
}
|
||||
//Extract octokits
|
||||
const {graphql, rest} = api
|
||||
|
||||
//GitHub user
|
||||
let authenticated
|
||||
try {
|
||||
authenticated = (await rest.users.getAuthenticated()).data.login
|
||||
}
|
||||
catch {
|
||||
authenticated = github.context.repo.owner
|
||||
}
|
||||
const user = _user || authenticated
|
||||
info("GitHub account", user)
|
||||
|
||||
//Current repository
|
||||
info("Current repository", `${github.context.repo.owner}/${github.context.repo.repo}`)
|
||||
|
||||
//Committer
|
||||
const committer = {}
|
||||
if (!dryrun) {
|
||||
//Compute committer informations
|
||||
committer.commit = true
|
||||
committer.token = _token || token
|
||||
committer.branch = _branch || github.context.ref.replace(/^refs[/]heads[/]/, "")
|
||||
info("Committer token", committer.token, {token:true})
|
||||
if (!committer.token)
|
||||
throw new Error("You must provide a valid GitHub token to commit your metrics")
|
||||
info("Committer branch", committer.branch)
|
||||
//Instantiate API for committer
|
||||
committer.rest = github.getOctokit(committer.token)
|
||||
info("Committer REST API", "ok")
|
||||
try {
|
||||
info("Committer account", (await committer.rest.users.getAuthenticated()).data.login)
|
||||
}
|
||||
catch {
|
||||
info("Committer account", "(github-actions)")
|
||||
}
|
||||
//Retrieve previous render SHA to be able to update file content through API
|
||||
committer.sha = null
|
||||
try {
|
||||
const {repository:{object:{oid}}} = await graphql(`
|
||||
query Sha {
|
||||
repository(owner: "${github.context.repo.owner}", name: "${github.context.repo.repo}") {
|
||||
object(expression: "${committer.branch}:${filename}") { ... on Blob { oid } }
|
||||
}
|
||||
}
|
||||
`, {headers:{authorization:`token ${committer.token}`}})
|
||||
committer.sha = oid
|
||||
}
|
||||
catch (error) {
|
||||
console.debug(error)
|
||||
}
|
||||
info("Previous render sha", committer.sha ?? "(none)")
|
||||
}
|
||||
else
|
||||
info("Dry-run", true)
|
||||
|
||||
//SVG file
|
||||
conf.optimize = optimize
|
||||
info("SVG output", filename)
|
||||
info("SVG optimization", optimize)
|
||||
info("SVG verification after generation", verify)
|
||||
|
||||
//Template
|
||||
info.break()
|
||||
info.section("Templates")
|
||||
info("Community templates", _templates)
|
||||
info("Template used", template)
|
||||
info("Query additional params", query)
|
||||
|
||||
//Core config
|
||||
info.break()
|
||||
info.group({metadata, name:"core", inputs:config})
|
||||
info("Plugin errors", die ? "(exit with error)" : "(displayed in generated SVG)")
|
||||
Object.assign(q, config)
|
||||
|
||||
//Base content
|
||||
info.break()
|
||||
const {base:parts, ...base} = metadata.plugins.base.inputs.action({core})
|
||||
info.group({metadata, name:"base", inputs:base})
|
||||
info("Base sections", parts)
|
||||
base.base = false
|
||||
for (const part of conf.settings.plugins.base.parts)
|
||||
base[`base.${part}`] = parts.includes(part)
|
||||
Object.assign(q, base)
|
||||
|
||||
//Additional plugins
|
||||
const plugins = {}
|
||||
for (const name of Object.keys(Plugins).filter(key => !["base", "core"].includes(key))) {
|
||||
//Parse inputs
|
||||
const {[name]:enabled, ...inputs} = metadata.plugins[name].inputs.action({core})
|
||||
plugins[name] = {enabled}
|
||||
//Register user inputs
|
||||
if (enabled) {
|
||||
info.break()
|
||||
info.group({metadata, name, inputs})
|
||||
q[name] = true
|
||||
for (const [key, value] of Object.entries(inputs)) {
|
||||
//Store token in plugin configuration
|
||||
if (metadata.plugins[name].inputs[key].type === "token")
|
||||
plugins[name][key] = value
|
||||
//Store value in query
|
||||
else
|
||||
q[`${name}.${key}`] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Render metrics
|
||||
info.break()
|
||||
info.section("Rendering")
|
||||
const {rendered} = await metrics({login:user, q, dflags}, {graphql, rest, plugins, conf, die, verify}, {Plugins, Templates})
|
||||
info("Status", "complete")
|
||||
|
||||
//Commit metrics
|
||||
if (committer.commit) {
|
||||
await committer.rest.repos.createOrUpdateFileContents({
|
||||
...github.context.repo, path:filename, message:`Update ${filename} - [Skip GitHub Action]`,
|
||||
content:Buffer.from(rendered).toString("base64"),
|
||||
branch:committer.branch,
|
||||
...(committer.sha ? {sha:committer.sha} : {}),
|
||||
})
|
||||
info("Commit to repository", "success")
|
||||
}
|
||||
|
||||
//Success
|
||||
info.break()
|
||||
console.log("Success, thanks for using metrics!")
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
//Render metrics
|
||||
info.break()
|
||||
info.section("Rendering")
|
||||
const {rendered} = await metrics({login:user, q, dflags}, {graphql, rest, plugins, conf, die, verify}, {Plugins, Templates})
|
||||
info("Status", "complete")
|
||||
|
||||
//Commit metrics
|
||||
if (committer.commit) {
|
||||
await committer.rest.repos.createOrUpdateFileContents({
|
||||
...github.context.repo, path:filename, message:`Update ${filename} - [Skip GitHub Action]`,
|
||||
content:Buffer.from(rendered).toString("base64"),
|
||||
branch:committer.branch,
|
||||
...(committer.sha ? {sha:committer.sha} : {})
|
||||
})
|
||||
info("Commit to repository", "success")
|
||||
//Errors
|
||||
catch (error) {
|
||||
console.error(error)
|
||||
//Print debug buffer if debug was not enabled (if it is, it's already logged on the fly)
|
||||
if (!DEBUG) {
|
||||
for (const log of [info.break(), "An error occured, logging debug message :", ...debugged])
|
||||
console.log(log)
|
||||
}
|
||||
core.setFailed(error.message)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
//Success
|
||||
info.break()
|
||||
console.log(`Success, thanks for using metrics!`)
|
||||
process.exit(0)
|
||||
}
|
||||
//Errors
|
||||
catch (error) {
|
||||
console.error(error)
|
||||
//Print debug buffer if debug was not enabled (if it is, it's already logged on the fly)
|
||||
if (!DEBUG)
|
||||
for (const log of [info.break(), "An error occured, logging debug message :", ...debugged])
|
||||
console.log(log)
|
||||
core.setFailed(error.message)
|
||||
process.exit(1)
|
||||
}
|
||||
})()
|
||||
@@ -1,8 +1,8 @@
|
||||
//Imports
|
||||
import util from "util"
|
||||
import ejs from "ejs"
|
||||
import SVGO from "svgo"
|
||||
import * as utils from "./utils.mjs"
|
||||
import ejs from "ejs"
|
||||
import util from "util"
|
||||
import SVGO from "svgo"
|
||||
|
||||
//Setup
|
||||
export default async function metrics({login, q, dflags = []}, {graphql, rest, plugins, conf, die = false, verify = false, convert = null}, {Plugins, Templates}) {
|
||||
@@ -23,7 +23,7 @@
|
||||
|
||||
//Initialization
|
||||
const pending = []
|
||||
const queries = conf.queries
|
||||
const {queries} = conf
|
||||
const data = {animated:true, base:{}, config:{}, errors:[], plugins:{}, computed:{}}
|
||||
const imports = {plugins:Plugins, templates:Templates, metadata:conf.metadata, ...utils}
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
if (errors.length) {
|
||||
console.warn(`metrics/compute/${login} > ${errors.length} errors !`)
|
||||
if (die)
|
||||
throw new Error(`An error occured during rendering, dying`)
|
||||
throw new Error("An error occured during rendering, dying")
|
||||
else
|
||||
console.warn(util.inspect(errors, {depth:Infinity, maxStringLength:256}))
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
//Imports
|
||||
import fs from "fs"
|
||||
import path from "path"
|
||||
import yaml from "js-yaml"
|
||||
import url from "url"
|
||||
import yaml from "js-yaml"
|
||||
|
||||
/** Metadata descriptor parser */
|
||||
/**Metadata descriptor parser */
|
||||
export default async function metadata({log = true} = {}) {
|
||||
//Paths
|
||||
const __metrics = path.join(path.dirname(url.fileURLToPath(import.meta.url)), "../../..")
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
//Load plugins metadata
|
||||
let Plugins = {}
|
||||
logger(`metrics/metadata > loading plugins metadata`)
|
||||
logger("metrics/metadata > loading plugins metadata")
|
||||
for (const name of await fs.promises.readdir(__plugins)) {
|
||||
if (!(await fs.promises.lstat(path.join(__plugins, name))).isDirectory())
|
||||
continue
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
//Load templates metadata
|
||||
let Templates = {}
|
||||
logger(`metrics/metadata > loading templates metadata`)
|
||||
logger("metrics/metadata > loading templates metadata")
|
||||
for (const name of await fs.promises.readdir(__templates)) {
|
||||
if (!(await fs.promises.lstat(path.join(__templates, name))).isDirectory())
|
||||
continue
|
||||
@@ -46,8 +46,8 @@
|
||||
return {plugins:Plugins, templates:Templates}
|
||||
}
|
||||
|
||||
/** Metadata extractor for templates */
|
||||
metadata.plugin = async function ({__plugins, name, logger}) {
|
||||
/**Metadata extractor for templates */
|
||||
metadata.plugin = async function({__plugins, name, logger}) {
|
||||
try {
|
||||
//Load meta descriptor
|
||||
const raw = `${await fs.promises.readFile(path.join(__plugins, name, "metadata.yml"), "utf-8")}`
|
||||
@@ -55,7 +55,7 @@
|
||||
|
||||
//Inputs parser
|
||||
{
|
||||
meta.inputs = function ({data:{user = null} = {}, q, account}, defaults = {}) {
|
||||
meta.inputs = function({data:{user = null} = {}, q, account}, defaults = {}) {
|
||||
//Support check
|
||||
if (!account)
|
||||
logger(`metrics/inputs > account type not set for plugin ${name}!`)
|
||||
@@ -142,7 +142,7 @@
|
||||
return value
|
||||
}
|
||||
}
|
||||
})(defaults[key] ?? defaulted)
|
||||
})(defaults[key] ?? defaulted),
|
||||
]))
|
||||
logger(`metrics/inputs > ${name} > ${JSON.stringify(result)}`)
|
||||
return result
|
||||
@@ -154,7 +154,7 @@
|
||||
{
|
||||
//Extract comments
|
||||
const comments = {}
|
||||
raw.split(/(\r?\n){2,}/m)
|
||||
raw.split(/(?:\r?\n){2,}/m)
|
||||
.map(x => x.trim()).filter(x => x)
|
||||
.map(x => x.split("\n").map(y => y.trim()).join("\n"))
|
||||
.map(x => {
|
||||
@@ -168,12 +168,12 @@
|
||||
key,
|
||||
{
|
||||
comment:comments[key] ?? "",
|
||||
descriptor:yaml.dump({[key]:Object.fromEntries(Object.entries(value).filter(([key]) => ["description", "default", "required"].includes(key)))}, {quotingType:'"', noCompatMode:true})
|
||||
}
|
||||
descriptor:yaml.dump({[key]:Object.fromEntries(Object.entries(value).filter(([key]) => ["description", "default", "required"].includes(key)))}, {quotingType:'"', noCompatMode:true}),
|
||||
},
|
||||
]))
|
||||
|
||||
//Action inputs
|
||||
meta.inputs.action = function ({core}) {
|
||||
meta.inputs.action = function({core}) {
|
||||
//Build query object from inputs
|
||||
const q = {}
|
||||
for (const key of Object.keys(inputs)) {
|
||||
@@ -207,15 +207,14 @@
|
||||
case "string":{
|
||||
if (Array.isArray(values))
|
||||
return {text, type:"select", values}
|
||||
else
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
}
|
||||
case "json":
|
||||
return {text, type:"text", placeholder:example ?? defaulted, defaulted}
|
||||
default:
|
||||
return null
|
||||
}
|
||||
})()
|
||||
})(),
|
||||
]).filter(([key, value]) => (value)&&(key !== name)))
|
||||
}
|
||||
|
||||
@@ -241,8 +240,8 @@
|
||||
}
|
||||
}
|
||||
|
||||
/** Metadata extractor for templates */
|
||||
metadata.template = async function ({__templates, name, plugins, logger}) {
|
||||
/**Metadata extractor for templates */
|
||||
metadata.template = async function({__templates, name, plugins, logger}) {
|
||||
try {
|
||||
//Load meta descriptor
|
||||
const raw = `${await fs.promises.readFile(path.join(__templates, name, "README.md"), "utf-8")}`
|
||||
@@ -262,7 +261,7 @@
|
||||
return {
|
||||
name:raw.match(/^### (?<name>[\s\S]+?)\n/)?.groups?.name?.trim(),
|
||||
readme:{
|
||||
demo:raw.match(/(?<demo><table>[\s\S]*?<[/]table>)/)?.groups?.demo?.replace(/<[/]?(?:table|tr)>/g, "")?.trim() ?? (name === "community" ? `<td align="center">See <a href="/source/templates/community/README.md">documentation</a> 🌍</td>` : "<td></td>"),
|
||||
demo:raw.match(/(?<demo><table>[\s\S]*?<[/]table>)/)?.groups?.demo?.replace(/<[/]?(?:table|tr)>/g, "")?.trim() ?? (name === "community" ? "<td align=\"center\">See <a href=\"/source/templates/community/README.md\">documentation</a> 🌍</td>" : "<td></td>"),
|
||||
compatibility:{...compatibility, base:true},
|
||||
},
|
||||
}
|
||||
@@ -273,10 +272,10 @@
|
||||
}
|
||||
}
|
||||
|
||||
/** Metadata converters */
|
||||
/**Metadata converters */
|
||||
metadata.to = {
|
||||
query(key, {name = null} = {}) {
|
||||
key = key.replace(/^plugin_/, "").replace(/_/g, ".")
|
||||
return name ? key.replace(new RegExp(`^(${name}.)`, "g"), "") : key
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
//Imports
|
||||
import fs from "fs"
|
||||
import metadata from "./metadata.mjs"
|
||||
import path from "path"
|
||||
import processes from "child_process"
|
||||
import util from "util"
|
||||
import url from "url"
|
||||
import processes from "child_process"
|
||||
import metadata from "./metadata.mjs"
|
||||
|
||||
//Templates and plugins
|
||||
const Templates = {}
|
||||
const Plugins = {}
|
||||
|
||||
/** Setup */
|
||||
export default async function ({log = true, nosettings = false, community = {}} = {}) {
|
||||
/**Setup */
|
||||
export default async function({log = true, nosettings = false, community = {}} = {}) {
|
||||
|
||||
//Paths
|
||||
const __metrics = path.join(path.dirname(url.fileURLToPath(import.meta.url)), "../../..")
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
//Init
|
||||
const logger = log ? console.debug : () => null
|
||||
logger(`metrics/setup > setup`)
|
||||
logger("metrics/setup > setup")
|
||||
const conf = {
|
||||
templates:{},
|
||||
queries:{},
|
||||
@@ -34,21 +34,21 @@
|
||||
statics:__statics,
|
||||
templates:__templates,
|
||||
node_modules:__modules,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
//Load settings
|
||||
logger(`metrics/setup > load settings.json`)
|
||||
logger("metrics/setup > load settings.json")
|
||||
if (fs.existsSync(__settings)) {
|
||||
if (nosettings)
|
||||
logger(`metrics/setup > load settings.json > skipped because no settings is enabled`)
|
||||
logger("metrics/setup > load settings.json > skipped because no settings is enabled")
|
||||
else {
|
||||
conf.settings = JSON.parse(`${await fs.promises.readFile(__settings)}`)
|
||||
logger(`metrics/setup > load settings.json > success`)
|
||||
logger("metrics/setup > load settings.json > success")
|
||||
}
|
||||
}
|
||||
else
|
||||
logger(`metrics/setup > load settings.json > (missing)`)
|
||||
logger("metrics/setup > load settings.json > (missing)")
|
||||
if (!conf.settings.templates)
|
||||
conf.settings.templates = {default:"classic", enabled:[]}
|
||||
if (!conf.settings.plugins)
|
||||
@@ -59,13 +59,13 @@
|
||||
logger(util.inspect(conf.settings, {depth:Infinity, maxStringLength:256}))
|
||||
|
||||
//Load package settings
|
||||
logger(`metrics/setup > load package.json`)
|
||||
logger("metrics/setup > load package.json")
|
||||
conf.package = JSON.parse(`${await fs.promises.readFile(__package)}`)
|
||||
logger(`metrics/setup > load package.json > success`)
|
||||
logger("metrics/setup > load package.json > success")
|
||||
|
||||
//Load community templates
|
||||
if ((typeof conf.settings.community.templates === "string")&&(conf.settings.community.templates.length)) {
|
||||
logger(`metrics/setup > parsing community templates list`)
|
||||
logger("metrics/setup > parsing community templates list")
|
||||
conf.settings.community.templates = [...new Set([...decodeURIComponent(conf.settings.community.templates).split(",").map(v => v.trim().toLocaleLowerCase()).filter(v => v)])]
|
||||
}
|
||||
if ((Array.isArray(conf.settings.community.templates))&&(conf.settings.community.templates.length)) {
|
||||
@@ -77,7 +77,7 @@
|
||||
try {
|
||||
//Parse community template
|
||||
logger(`metrics/setup > load community template ${template}`)
|
||||
const {repo, branch, name, trust = false} = template.match(/^(?<repo>[\s\S]+?)@(?<branch>[\s\S]+?):(?<name>[\s\S]+?)(?<trust>[+]trust)?$/)?.groups
|
||||
const {repo, branch, name, trust = false} = template.match(/^(?<repo>[\s\S]+?)@(?<branch>[\s\S]+?):(?<name>[\s\S]+?)(?<trust>[+]trust)?$/)?.groups ?? null
|
||||
const command = `git clone --single-branch --branch ${branch} https://github.com/${repo}.git ${path.join(__templates, ".community")}`
|
||||
logger(`metrics/setup > run ${command}`)
|
||||
//Clone remote repository
|
||||
@@ -99,14 +99,15 @@
|
||||
logger(`metrics/setup > clean ${repo}@${branch}`)
|
||||
await fs.promises.rmdir(path.join(__templates, ".community"), {recursive:true})
|
||||
logger(`metrics/setup > loaded community template ${name}`)
|
||||
} catch (error) {
|
||||
}
|
||||
catch (error) {
|
||||
logger(`metrics/setup > failed to load community template ${template}`)
|
||||
logger(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
logger(`metrics/setup > no community templates to install`)
|
||||
logger("metrics/setup > no community templates to install")
|
||||
|
||||
//Load templates
|
||||
for (const name of await fs.promises.readdir(__templates)) {
|
||||
@@ -122,7 +123,7 @@
|
||||
conf.templates[name] = {image, style, fonts, partials, views:[directory]}
|
||||
|
||||
//Cache templates scripts
|
||||
Templates[name] = await (async () => {
|
||||
Templates[name] = await (async() => {
|
||||
const template = path.join(directory, "template.mjs")
|
||||
const fallback = path.join(__templates, "classic", "template.mjs")
|
||||
return (await import(url.pathToFileURL(fs.existsSync(template) ? template : fallback).href)).default
|
||||
@@ -137,7 +138,7 @@
|
||||
const partials = JSON.parse(`${fs.readFileSync(path.join(directory, "partials/_.json"))}`)
|
||||
logger(`metrics/setup > reload template [${name}] > success`)
|
||||
return {image, style, fonts, partials, views:[directory]}
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -156,11 +157,12 @@
|
||||
const __queries = path.join(directory, "queries")
|
||||
if (fs.existsSync(__queries)) {
|
||||
//Alias for default query
|
||||
const queries = conf.queries[name] = function () {
|
||||
const queries = function() {
|
||||
if (!queries[name])
|
||||
throw new ReferenceError(`Default query for ${name} undefined`)
|
||||
return queries[name](...arguments)
|
||||
}
|
||||
conf.queries[name] = queries
|
||||
//Load queries
|
||||
for (const file of await fs.promises.readdir(__queries)) {
|
||||
//Cache queries
|
||||
@@ -176,7 +178,7 @@
|
||||
const raw = `${fs.readFileSync(path.join(__queries, file))}`
|
||||
logger(`metrics/setup > reload query [${name}/${query}] > success`)
|
||||
return raw
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -194,10 +196,12 @@
|
||||
conf.metadata = await metadata({log})
|
||||
|
||||
//Set no token property
|
||||
Object.defineProperty(conf.settings, "notoken", {get() { return conf.settings.token === "NOT_NEEDED" }})
|
||||
Object.defineProperty(conf.settings, "notoken", {get() {
|
||||
return conf.settings.token === "NOT_NEEDED"
|
||||
}})
|
||||
|
||||
//Conf
|
||||
logger(`metrics/setup > setup > success`)
|
||||
logger("metrics/setup > setup > success")
|
||||
return {Templates, Plugins, conf}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,48 +8,50 @@
|
||||
import axios from "axios"
|
||||
import puppeteer from "puppeteer"
|
||||
import imgb64 from "image-to-base64"
|
||||
import dayjs from 'dayjs';
|
||||
import utc from 'dayjs/plugin/utc.js';
|
||||
dayjs.extend(utc);
|
||||
import dayjs from "dayjs"
|
||||
import utc from "dayjs/plugin/utc.js"
|
||||
dayjs.extend(utc)
|
||||
|
||||
export {fs, os, paths, url, util, processes, axios, puppeteer, imgb64, dayjs};
|
||||
export {fs, os, paths, url, util, processes, axios, puppeteer, imgb64, dayjs}
|
||||
|
||||
/** Returns module __dirname */
|
||||
/**Returns module __dirname */
|
||||
export function __module(module) {
|
||||
return paths.join(paths.dirname(url.fileURLToPath(module)))
|
||||
}
|
||||
|
||||
/** Plural formatter */
|
||||
/**Plural formatter */
|
||||
export function s(value, end = "") {
|
||||
return value !== 1 ? {y:"ies", "":"s"}[end] : end
|
||||
}
|
||||
|
||||
/** Formatter */
|
||||
/**Formatter */
|
||||
export function format(n, {sign = false} = {}) {
|
||||
for (const {u, v} of [{u:"b", v:10**9}, {u:"m", v:10**6}, {u:"k", v:10**3}])
|
||||
for (const {u, v} of [{u:"b", v:10**9}, {u:"m", v:10**6}, {u:"k", v:10**3}]) {
|
||||
if (n/v >= 1)
|
||||
return `${(sign)&&(n > 0) ? "+" : ""}${(n/v).toFixed(2).substr(0, 4).replace(/[.]0*$/, "")}${u}`
|
||||
}
|
||||
return `${(sign)&&(n > 0) ? "+" : ""}${n}`
|
||||
}
|
||||
|
||||
/** Bytes formatter */
|
||||
/**Bytes formatter */
|
||||
export function bytes(n) {
|
||||
for (const {u, v} of [{u:"E", v:10**18}, {u:"P", v:10**15}, {u:"T", v:10**12}, {u:"G", v:10**9}, {u:"M", v:10**6}, {u:"k", v:10**3}])
|
||||
for (const {u, v} of [{u:"E", v:10**18}, {u:"P", v:10**15}, {u:"T", v:10**12}, {u:"G", v:10**9}, {u:"M", v:10**6}, {u:"k", v:10**3}]) {
|
||||
if (n/v >= 1)
|
||||
return `${(n/v).toFixed(2).substr(0, 4).replace(/[.]0*$/, "")} ${u}B`
|
||||
}
|
||||
return `${n} byte${n > 1 ? "s" : ""}`
|
||||
}
|
||||
format.bytes = bytes
|
||||
|
||||
/** Percentage formatter */
|
||||
/**Percentage formatter */
|
||||
export function percentage(n, {rescale = true} = {}) {
|
||||
return `${(n*(rescale ? 100 : 1)).toFixed(2)
|
||||
.replace(/(?<=[.])([1-9]*)(0+)$/, (m, a, b) => a)
|
||||
.replace(/(?<=[.])(?<decimal>[1-9]*)0+$/, "$<decimal>")
|
||||
.replace(/[.]$/, "")}%`
|
||||
}
|
||||
format.percentage = percentage
|
||||
|
||||
/** Text ellipsis formatter */
|
||||
/**Text ellipsis formatter */
|
||||
export function ellipsis(text, {length = 20} = {}) {
|
||||
text = `${text}`
|
||||
if (text.length < length)
|
||||
@@ -58,7 +60,7 @@
|
||||
}
|
||||
format.ellipsis = ellipsis
|
||||
|
||||
/** Array shuffler */
|
||||
/**Array shuffler */
|
||||
export function shuffle(array) {
|
||||
for (let i = array.length-1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random()*(i+1))
|
||||
@@ -67,7 +69,7 @@
|
||||
return array
|
||||
}
|
||||
|
||||
/** Escape html */
|
||||
/**Escape html */
|
||||
export function htmlescape(string, u = {"&":true, "<":true, ">":true, '"':true, "'":true}) {
|
||||
return string
|
||||
.replace(/&(?!(?:amp|lt|gt|quot|apos);)/g, u["&"] ? "&" : "&")
|
||||
@@ -77,18 +79,19 @@
|
||||
.replace(/'/g, u["'"] ? "'" : "'")
|
||||
}
|
||||
|
||||
/** Expand url */
|
||||
/**Expand url */
|
||||
export async function urlexpand(url) {
|
||||
try {
|
||||
return (await axios.get(url)).request.res.responseUrl
|
||||
} catch {
|
||||
}
|
||||
catch {
|
||||
return url
|
||||
}
|
||||
}
|
||||
|
||||
/** Run command */
|
||||
/**Run command */
|
||||
export async function run(command, options) {
|
||||
return await new Promise((solve, reject) => {
|
||||
return new Promise((solve, reject) => {
|
||||
console.debug(`metrics/command > ${command}`)
|
||||
const child = processes.exec(command, options)
|
||||
let [stdout, stderr] = ["", ""]
|
||||
@@ -101,7 +104,7 @@
|
||||
})
|
||||
}
|
||||
|
||||
/** Render svg */
|
||||
/**Render svg */
|
||||
export async function svgresize(svg, {paddings = ["6%"], convert} = {}) {
|
||||
//Instantiate browser if needed
|
||||
if (!svgresize.browser) {
|
||||
@@ -144,7 +147,7 @@
|
||||
return {resized, mime}
|
||||
}
|
||||
|
||||
/** Wait */
|
||||
/**Wait */
|
||||
export async function wait(seconds) {
|
||||
await new Promise(solve => setTimeout(solve, seconds*1000))
|
||||
}
|
||||
await new Promise(solve => setTimeout(solve, seconds*1000)) //eslint-disable-line no-promise-executor-return
|
||||
}
|
||||
|
||||
5
source/app/mocks/.eslintrc.yml
Normal file
5
source/app/mocks/.eslintrc.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
# Overrides enforced rules for mocks
|
||||
rules:
|
||||
max-params: off
|
||||
no-unused-vars: off
|
||||
prefer-named-capture-group: off
|
||||
@@ -1,5 +1,5 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, url, options, login = faker.internet.userName()}) {
|
||||
/**Mocked data */
|
||||
export default function({faker, url, options, login = faker.internet.userName()}) {
|
||||
//Last.fm api
|
||||
if (/^https:..ws.audioscrobbler.com.*$/.test(url)) {
|
||||
//Get recently played tracks
|
||||
@@ -63,4 +63,4 @@
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, url, options, login = faker.internet.userName()}) {
|
||||
/**Mocked data */
|
||||
export default function({faker, url, options, login = faker.internet.userName()}) {
|
||||
//Tested url
|
||||
const tested = url.match(/&url=(?<tested>.*?)(?:&|$)/)?.groups?.tested ?? faker.internet.url()
|
||||
//Pagespeed api
|
||||
@@ -20,17 +20,17 @@
|
||||
"final-screenshot":{
|
||||
id:"final-screenshot",
|
||||
title:"Final Screenshot",
|
||||
score: null,
|
||||
score:null,
|
||||
details:{
|
||||
data:"data:image/jpg;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==",
|
||||
type:"screenshot",
|
||||
timestamp:Date.now()
|
||||
}
|
||||
timestamp:Date.now(),
|
||||
},
|
||||
},
|
||||
metrics:{
|
||||
id:"metrics",
|
||||
title:"Metrics",
|
||||
score: null,
|
||||
score:null,
|
||||
details:{
|
||||
items:[
|
||||
{
|
||||
@@ -68,9 +68,9 @@
|
||||
interactive:faker.random.number(1000),
|
||||
observedNavigationStartTs:faker.time.recent(),
|
||||
observedNavigationStart:faker.random.number(10),
|
||||
observedFirstMeaningfulPaintTs:faker.time.recent()
|
||||
observedFirstMeaningfulPaintTs:faker.time.recent(),
|
||||
},
|
||||
]
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -90,16 +90,16 @@
|
||||
title:"Accessibility",
|
||||
score:faker.random.float({max:1}),
|
||||
},
|
||||
performance: {
|
||||
performance:{
|
||||
id:"performance",
|
||||
title:"Performance",
|
||||
score:faker.random.float({max:1}),
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
analysisUTCTimestamp:`${faker.date.recent()}`,
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, url, options, login = faker.internet.userName()}) {
|
||||
/**Mocked data */
|
||||
export default function({faker, url, options, login = faker.internet.userName()}) {
|
||||
//Spotify api
|
||||
if (/^https:..api.spotify.com.*$/.test(url)) {
|
||||
//Get recently played tracks
|
||||
@@ -19,24 +19,24 @@
|
||||
{
|
||||
name:artist,
|
||||
type:"artist",
|
||||
}
|
||||
},
|
||||
],
|
||||
images:[
|
||||
{
|
||||
height:640,
|
||||
url:faker.image.abstract(),
|
||||
width:640
|
||||
width:640,
|
||||
},
|
||||
{
|
||||
height:300,
|
||||
url:faker.image.abstract(),
|
||||
width:300
|
||||
width:300,
|
||||
},
|
||||
{
|
||||
height:64,
|
||||
url:faker.image.abstract(),
|
||||
width:64
|
||||
}
|
||||
width:64,
|
||||
},
|
||||
],
|
||||
name:track,
|
||||
release_date:`${faker.date.past()}`.substring(0, 10),
|
||||
@@ -46,7 +46,7 @@
|
||||
{
|
||||
name:artist,
|
||||
type:"artist",
|
||||
}
|
||||
},
|
||||
],
|
||||
name:track,
|
||||
preview_url:faker.internet.url(),
|
||||
@@ -55,11 +55,11 @@
|
||||
played_at:`${faker.date.recent()}`,
|
||||
context:{
|
||||
type:"album",
|
||||
}
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, url, options, login = faker.internet.userName()}) {
|
||||
/**Mocked data */
|
||||
export default function({faker, url, options, login = faker.internet.userName()}) {
|
||||
//Twitter api
|
||||
if (/^https:..api.twitter.com.*$/.test(url)) {
|
||||
//Get user profile
|
||||
@@ -16,7 +16,7 @@
|
||||
id:faker.random.number(1000000).toString(),
|
||||
username,
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
//Get recent tweets
|
||||
@@ -40,7 +40,7 @@
|
||||
id:faker.random.number(100000000000000).toString(),
|
||||
created_at:`${faker.date.recent()}`,
|
||||
text:faker.lorem.paragraph(),
|
||||
}
|
||||
},
|
||||
],
|
||||
includes:{
|
||||
users:[
|
||||
@@ -49,7 +49,7 @@
|
||||
name:"lowlighter",
|
||||
username:"lowlighter",
|
||||
},
|
||||
]
|
||||
],
|
||||
},
|
||||
meta:{
|
||||
newest_id:faker.random.number(100000000000000).toString(),
|
||||
@@ -57,8 +57,8 @@
|
||||
result_count:2,
|
||||
next_token:"MOCKED_CURSOR",
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, url, options, login = faker.internet.userName()}) {
|
||||
/**Mocked data */
|
||||
export default function({faker, url, options, login = faker.internet.userName()}) {
|
||||
//Wakatime api
|
||||
if (/^https:..wakatime.com.api.v1.users.current.stats.*$/.test(url)) {
|
||||
//Get user profile
|
||||
if (/api_key=MOCKED_TOKEN/.test(url)) {
|
||||
console.debug(`metrics/compute/mocks > mocking wakatime api result > ${url}`)
|
||||
const stats = (array) => {
|
||||
const stats = array => {
|
||||
const elements = []
|
||||
let results = new Array(4+faker.random.number(2)).fill(null).map(_ => ({
|
||||
get digital() { return `${this.hours}:${this.minutes}` },
|
||||
get digital() {
|
||||
return `${this.hours}:${this.minutes}`
|
||||
},
|
||||
hours:faker.random.number(1000), minutes:faker.random.number(1000),
|
||||
name:array ? faker.random.arrayElement(array) : faker.random.words(2).replace(/ /g, "-").toLocaleLowerCase(),
|
||||
percent:0, total_seconds:faker.random.number(1000000),
|
||||
@@ -43,8 +45,8 @@
|
||||
total_seconds:faker.random.number(1000000000),
|
||||
total_seconds_including_other_language:faker.random.number(1000000000),
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, url, body, login = faker.internet.userName()}) {
|
||||
/**Mocked data */
|
||||
export default function({faker, url, body, login = faker.internet.userName()}) {
|
||||
if (/^https:..graphql.anilist.co.*$/.test(url)) {
|
||||
//Initialization and media generator
|
||||
const query = body.query
|
||||
const {query} = body
|
||||
const media = ({type}) => ({
|
||||
title:{romaji:faker.lorem.words(), english:faker.lorem.words(), native:faker.lorem.words()},
|
||||
description:faker.lorem.paragraphs(),
|
||||
@@ -15,11 +15,11 @@
|
||||
countryOfOrigin:"JP",
|
||||
genres:new Array(6).fill(null).map(_ => faker.lorem.word()),
|
||||
coverImage:{medium:null},
|
||||
startDate:{year:faker.date.past(20).getFullYear()}
|
||||
startDate:{year:faker.date.past(20).getFullYear()},
|
||||
})
|
||||
//User statistics query
|
||||
if (/^query Statistics /.test(query)) {
|
||||
console.debug(`metrics/compute/mocks > mocking anilist api result > Statistics`)
|
||||
console.debug("metrics/compute/mocks > mocking anilist api result > Statistics")
|
||||
return ({
|
||||
status:200,
|
||||
data:{
|
||||
@@ -41,15 +41,15 @@
|
||||
volumesRead:faker.random.number(10000),
|
||||
genres:new Array(4).fill(null).map(_ => ({genre:faker.lorem.word()})),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
//Favorites characters
|
||||
if (/^query FavoritesCharacters /.test(query)) {
|
||||
console.debug(`metrics/compute/mocks > mocking anilist api result > Favorites characters`)
|
||||
console.debug("metrics/compute/mocks > mocking anilist api result > Favorites characters")
|
||||
return ({
|
||||
status:200,
|
||||
data:{
|
||||
@@ -59,20 +59,19 @@
|
||||
characters:{
|
||||
nodes:new Array(2+faker.random.number(16)).fill(null).map(_ => ({
|
||||
name:{full:faker.name.findName(), native:faker.name.findName()},
|
||||
image:{medium:null}
|
||||
}),
|
||||
),
|
||||
pageInfo:{currentPage:1, hasNextPage:false}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
image:{medium:null},
|
||||
})),
|
||||
pageInfo:{currentPage:1, hasNextPage:false},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
//Favorites anime/manga query
|
||||
if (/^query Favorites /.test(query)) {
|
||||
console.debug(`metrics/compute/mocks > mocking anilist api result > Favorites`)
|
||||
console.debug("metrics/compute/mocks > mocking anilist api result > Favorites")
|
||||
const type = /anime[(]/.test(query) ? "ANIME" : /manga[(]/.test(query) ? "MANGA" : "OTHER"
|
||||
return ({
|
||||
status:200,
|
||||
@@ -83,17 +82,17 @@
|
||||
[type.toLocaleLowerCase()]:{
|
||||
nodes:new Array(16).fill(null).map(_ => media({type})),
|
||||
pageInfo:{currentPage:1, hasNextPage:false},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
//Medias query
|
||||
if (/^query Medias /.test(query)) {
|
||||
console.debug(`metrics/compute/mocks > mocking anilist api result > Medias`)
|
||||
const type = body.variables.type
|
||||
console.debug("metrics/compute/mocks > mocking anilist api result > Medias")
|
||||
const {type} = body.variables
|
||||
return ({
|
||||
status:200,
|
||||
data:{
|
||||
@@ -106,19 +105,19 @@
|
||||
entries:new Array(16).fill(null).map(_ => ({
|
||||
status:faker.random.arrayElement(["CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING"]),
|
||||
progress:faker.random.number(100),
|
||||
progressVolumes: null,
|
||||
progressVolumes:null,
|
||||
score:0,
|
||||
startedAt:{year:null, month:null, day:null},
|
||||
completedAt:{year:null, month:null, day:null},
|
||||
media:media({type})
|
||||
media:media({type}),
|
||||
})),
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
//Imports
|
||||
import urls from "url"
|
||||
|
||||
/** Mocked data */
|
||||
export default function ({faker, url, body, login = faker.internet.userName()}) {
|
||||
/**Mocked data */
|
||||
export default function({faker, url, body, login = faker.internet.userName()}) {
|
||||
if (/^https:..accounts.spotify.com.api.token.*$/.test(url)) {
|
||||
//Access token generator
|
||||
const params = new urls.URLSearchParams(body)
|
||||
@@ -15,8 +15,8 @@
|
||||
token_type:"Bearer",
|
||||
expires_in:3600,
|
||||
scope:"user-read-recently-played user-read-private",
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > base/repositories`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > base/repositories")
|
||||
return /after: "MOCKED_CURSOR"/m.test(query) ? ({
|
||||
user:{
|
||||
repositories:{
|
||||
edges:[],
|
||||
nodes:[],
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}) : ({
|
||||
user:{
|
||||
repositories:{
|
||||
edges:[
|
||||
{
|
||||
cursor:"MOCKED_CURSOR"
|
||||
cursor:"MOCKED_CURSOR",
|
||||
},
|
||||
],
|
||||
nodes:[
|
||||
@@ -31,7 +31,7 @@
|
||||
{size:faker.random.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}},
|
||||
{size:faker.random.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}},
|
||||
{size:faker.random.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}},
|
||||
]
|
||||
],
|
||||
},
|
||||
issues_open:{totalCount:faker.random.number(100)},
|
||||
issues_closed:{totalCount:faker.random.number(100)},
|
||||
@@ -39,10 +39,10 @@
|
||||
pr_merged:{totalCount:faker.random.number(100)},
|
||||
releases:{totalCount:faker.random.number(100)},
|
||||
forkCount:faker.random.number(100),
|
||||
licenseInfo:{spdxId:"MIT"}
|
||||
licenseInfo:{spdxId:"MIT"},
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > base/repository`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > base/repository")
|
||||
return ({
|
||||
user:{
|
||||
repository:{
|
||||
@@ -20,7 +20,7 @@
|
||||
{size:faker.random.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}},
|
||||
{size:faker.random.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}},
|
||||
{size:faker.random.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}},
|
||||
]
|
||||
],
|
||||
},
|
||||
issues_open:{totalCount:faker.random.number(100)},
|
||||
issues_closed:{totalCount:faker.random.number(100)},
|
||||
@@ -28,8 +28,8 @@
|
||||
pr_merged:{totalCount:faker.random.number(100)},
|
||||
releases:{totalCount:faker.random.number(100)},
|
||||
forkCount:faker.random.number(100),
|
||||
licenseInfo:{spdxId:"MIT"}
|
||||
licenseInfo:{spdxId:"MIT"},
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > base/user`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > base/user")
|
||||
return ({
|
||||
user: {
|
||||
user:{
|
||||
databaseId:faker.random.number(10000000),
|
||||
name:faker.name.findName(),
|
||||
login,
|
||||
@@ -35,7 +35,7 @@
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
contributionDays:[
|
||||
@@ -46,23 +46,23 @@
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
contributionDays:[
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])},
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
repositoriesContributedTo:{totalCount:faker.random.number(100)},
|
||||
followers:{totalCount:faker.random.number(1000)},
|
||||
following:{totalCount:faker.random.number(1000)},
|
||||
issueComments:{totalCount:faker.random.number(1000)},
|
||||
organizations:{totalCount:faker.random.number(10)}
|
||||
}
|
||||
organizations:{totalCount:faker.random.number(10)},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > gists/default`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > gists/default")
|
||||
return /after: "MOCKED_CURSOR"/m.test(query) ? ({
|
||||
user:{
|
||||
gists:{
|
||||
edges:[],
|
||||
nodes:[],
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}) : ({
|
||||
user:{
|
||||
gists:{
|
||||
edges:[
|
||||
{
|
||||
cursor:"MOCKED_CURSOR"
|
||||
cursor:"MOCKED_CURSOR",
|
||||
},
|
||||
],
|
||||
totalCount:faker.random.number(100),
|
||||
@@ -23,17 +23,17 @@
|
||||
isFork:false,
|
||||
forks:{totalCount:faker.random.number(10)},
|
||||
files:[{name:faker.system.fileName()}],
|
||||
comments:{totalCount:faker.random.number(10)}
|
||||
comments:{totalCount:faker.random.number(10)},
|
||||
},
|
||||
{
|
||||
stargazerCount:faker.random.number(10),
|
||||
isFork:false,
|
||||
forks:{totalCount:faker.random.number(10)},
|
||||
files:[{name:faker.system.fileName()}],
|
||||
comments:{totalCount:faker.random.number(10)}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
comments:{totalCount:faker.random.number(10)},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > isocalendar/calendar`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > isocalendar/calendar")
|
||||
//Generate calendar
|
||||
const date = new Date(query.match(/from: "(?<date>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z)"/)?.groups?.date)
|
||||
const to = new Date(query.match(/to: "(?<date>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z)"/)?.groups?.date)
|
||||
@@ -17,16 +17,16 @@
|
||||
contributionDays.push({
|
||||
contributionCount,
|
||||
color:["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"][Math.ceil(contributionCount/10/0.25)],
|
||||
date:date.toISOString().substring(0, 10)
|
||||
date:date.toISOString().substring(0, 10),
|
||||
})
|
||||
}
|
||||
return ({
|
||||
user: {
|
||||
user:{
|
||||
calendar:{
|
||||
contributionCalendar:{
|
||||
weeks
|
||||
}
|
||||
}
|
||||
}
|
||||
weeks,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > people/default`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > people/default")
|
||||
const type = query.match(/(?<type>followers|following)[(]/)?.groups?.type ?? "(unknown type)"
|
||||
return /after: "MOCKED_CURSOR"/m.test(query) ? ({
|
||||
user:{
|
||||
[type]:{
|
||||
edges:[],
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}) : ({
|
||||
user:{
|
||||
[type]:{
|
||||
@@ -16,9 +16,9 @@
|
||||
node:{
|
||||
login,
|
||||
avatarUrl:null,
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
},
|
||||
})),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > People`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > People")
|
||||
const type = query.match(/(?<type>stargazers|watchers)[(]/)?.groups?.type ?? "(unknown type)"
|
||||
return /after: "MOCKED_CURSOR"/m.test(query) ? ({
|
||||
user:{
|
||||
repository:{
|
||||
[type]:{
|
||||
edges:[],
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
}) : ({
|
||||
user:{
|
||||
repository:{
|
||||
@@ -19,10 +19,10 @@
|
||||
node:{
|
||||
login,
|
||||
avatarUrl:null,
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
})),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > People`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > People")
|
||||
const type = query.match(/(?<type>sponsorshipsAsSponsor|sponsorshipsAsMaintainer)[(]/)?.groups?.type ?? "(unknown type)"
|
||||
return /after: "MOCKED_CURSOR"/m.test(query) ? ({
|
||||
user:{
|
||||
login,
|
||||
[type]:{
|
||||
edges:[]
|
||||
}
|
||||
}
|
||||
edges:[],
|
||||
},
|
||||
},
|
||||
}) : ({
|
||||
user:{
|
||||
login,
|
||||
@@ -23,10 +23,10 @@
|
||||
sponsorable:{
|
||||
login:faker.internet.userName(),
|
||||
avatarUrl:null,
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
})),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > projects/repository`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > projects/repository")
|
||||
return ({
|
||||
user:{
|
||||
repository:{
|
||||
@@ -12,10 +12,10 @@
|
||||
doneCount:faker.random.number(10),
|
||||
inProgressCount:faker.random.number(10),
|
||||
todoCount:faker.random.number(10),
|
||||
enabled:true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
enabled:true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > projects/user`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > projects/user")
|
||||
return ({
|
||||
user:{
|
||||
projects:{
|
||||
@@ -14,11 +14,11 @@
|
||||
doneCount:faker.random.number(10),
|
||||
inProgressCount:faker.random.number(10),
|
||||
todoCount:faker.random.number(10),
|
||||
enabled:true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
enabled:true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > stargazers/default`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > stargazers/default")
|
||||
return /after: "MOCKED_CURSOR"/m.test(query) ? ({
|
||||
repository:{
|
||||
stargazers:{
|
||||
edges:[],
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}) : ({
|
||||
repository:{
|
||||
stargazers:{
|
||||
edges:new Array(faker.random.number({min:50, max:100})).fill(null).map(() => ({
|
||||
starredAt:`${faker.date.recent(30)}`,
|
||||
cursor:"MOCKED_CURSOR"
|
||||
}))
|
||||
}
|
||||
}
|
||||
cursor:"MOCKED_CURSOR",
|
||||
})),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api result > stars/default`)
|
||||
/**Mocked data */
|
||||
export default function({faker, query, login = faker.internet.userName()}) {
|
||||
console.debug("metrics/compute/mocks > mocking graphql api result > stars/default")
|
||||
return ({
|
||||
user:{
|
||||
starredRepositories:{
|
||||
@@ -22,16 +22,16 @@
|
||||
stargazerCount:faker.random.number(10000),
|
||||
licenseInfo:{
|
||||
nickname:null,
|
||||
name:"MIT License"
|
||||
name:"MIT License",
|
||||
},
|
||||
primaryLanguage:{
|
||||
color:"#f1e05a",
|
||||
name:"JavaScript"
|
||||
}
|
||||
}
|
||||
name:"JavaScript",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
export default function ({faker}, target, that, [{username:login, page, per_page}]) {
|
||||
console.debug(`metrics/compute/mocks > mocking rest api result > rest.activity.listEventsForAuthenticatedUser`)
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, [{username:login, page, per_page}]) {
|
||||
console.debug("metrics/compute/mocks > mocking rest api result > rest.activity.listEventsForAuthenticatedUser")
|
||||
return ({
|
||||
status:200,
|
||||
url:`https://api.github.com/users/${login}/events?per_page=${per_page}&page=${page}`,
|
||||
@@ -27,7 +27,7 @@
|
||||
path:faker.system.fileName(),
|
||||
commit_id:"MOCKED_SHA",
|
||||
body:faker.lorem.sentence(),
|
||||
}
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -55,7 +55,7 @@
|
||||
login:faker.internet.userName(),
|
||||
},
|
||||
body:"",
|
||||
}
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -77,8 +77,8 @@
|
||||
login,
|
||||
},
|
||||
body:faker.lorem.paragraph(),
|
||||
performed_via_github_app:null
|
||||
}
|
||||
performed_via_github_app:null,
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -99,8 +99,8 @@
|
||||
summary:null,
|
||||
action:"created",
|
||||
sha:"MOCKED_SHA",
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -125,14 +125,14 @@
|
||||
{
|
||||
name:"lorem ipsum",
|
||||
color:"d876e3",
|
||||
}
|
||||
},
|
||||
],
|
||||
state:"open",
|
||||
},
|
||||
comment:{
|
||||
body:faker.lorem.paragraph(),
|
||||
performed_via_github_app:null
|
||||
}
|
||||
performed_via_github_app:null,
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -149,7 +149,7 @@
|
||||
forkee:{
|
||||
name:faker.random.word(),
|
||||
full_name:`${faker.random.word()}/${faker.random.word()}`,
|
||||
}
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -178,7 +178,7 @@
|
||||
user:{
|
||||
login:faker.internet.userName(),
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -198,7 +198,7 @@
|
||||
name:faker.random.words(4),
|
||||
draft:faker.random.boolean(),
|
||||
prerelease:faker.random.boolean(),
|
||||
}
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -262,8 +262,8 @@
|
||||
sha:"MOCKED_SHA",
|
||||
message:faker.lorem.sentence(),
|
||||
url:"https://api.github.com/repos/lowlighter/metrics/commits/MOCKED_SHA",
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -288,7 +288,7 @@
|
||||
additions:faker.random.number(1000),
|
||||
deletions:faker.random.number(1000),
|
||||
changed_files:faker.random.number(10),
|
||||
}
|
||||
},
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -305,7 +305,7 @@
|
||||
member:{
|
||||
login:faker.internet.userName(),
|
||||
},
|
||||
action:"added"
|
||||
action:"added",
|
||||
},
|
||||
created_at:faker.date.recent(7),
|
||||
},
|
||||
@@ -320,7 +320,7 @@
|
||||
},
|
||||
payload:{},
|
||||
created_at:faker.date.recent(7),
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
/** Mocked data */
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, args) {
|
||||
return ({
|
||||
status:200,
|
||||
@@ -17,7 +17,7 @@
|
||||
source_import:{limit:100, used:0, remaining:100, reset:0},
|
||||
code_scanning_upload:{limit:500, used:0, remaining:500, reset:0},
|
||||
},
|
||||
rate:{limit:5000, used:0, remaining:"MOCKED", reset:0}
|
||||
}
|
||||
rate:{limit:5000, used:0, remaining:"MOCKED", reset:0},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, [{owner, repo}]) {
|
||||
console.debug(`metrics/compute/mocks > mocking rest api result > rest.repos.getContributorsStats`)
|
||||
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getContributorsStats")
|
||||
return ({
|
||||
status:200,
|
||||
url:`https://api.github.com/repos/${owner}/${repo}/stats/contributors`,
|
||||
@@ -18,10 +18,10 @@
|
||||
{w:3, a:faker.random.number(10000), d:faker.random.number(10000), c:faker.random.number(10000)},
|
||||
{w:4, a:faker.random.number(10000), d:faker.random.number(10000), c:faker.random.number(10000)},
|
||||
],
|
||||
author: {
|
||||
author:{
|
||||
login:owner,
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/** Mocked data */
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, [{owner, repo}]) {
|
||||
console.debug(`metrics/compute/mocks > mocking rest api result > rest.repos.getViews`)
|
||||
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getViews")
|
||||
const count = faker.random.number(10000)*2
|
||||
const uniques = faker.random.number(count)*2
|
||||
return ({
|
||||
@@ -17,7 +17,7 @@
|
||||
views:[
|
||||
{timestamp:`${faker.date.recent()}`, count:count/2, uniques:uniques/2},
|
||||
{timestamp:`${faker.date.recent()}`, count:count/2, uniques:uniques/2},
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,26 @@
|
||||
/** Mocked data */
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, [{page, per_page, owner, repo}]) {
|
||||
console.debug(`metrics/compute/mocks > mocking rest api result > rest.repos.listCommits`)
|
||||
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.listCommits")
|
||||
return ({
|
||||
status:200,
|
||||
url:`https://api.github.com/repos/${owner}/${repo}/commits?per_page=${per_page}&page=${page}`,
|
||||
headers: {
|
||||
headers:{
|
||||
server:"GitHub.com",
|
||||
status:"200 OK",
|
||||
"x-oauth-scopes":"repo",
|
||||
},
|
||||
data:page < 2 ? new Array(per_page).fill(null).map(() =>
|
||||
({
|
||||
data:page < 2 ? new Array(per_page).fill(null).map(() => ({
|
||||
sha:"MOCKED_SHA",
|
||||
commit:{
|
||||
author:{
|
||||
name:owner,
|
||||
date:`${faker.date.recent(14)}`
|
||||
date:`${faker.date.recent(14)}`,
|
||||
},
|
||||
committer:{
|
||||
name:owner,
|
||||
date:`${faker.date.recent(14)}`
|
||||
date:`${faker.date.recent(14)}`,
|
||||
},
|
||||
}
|
||||
})
|
||||
) : []
|
||||
},
|
||||
})) : [],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
/** Mocked data */
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, [{owner, repo}]) {
|
||||
console.debug(`metrics/compute/mocks > mocking rest api result > rest.repos.listContributors`)
|
||||
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.listContributors")
|
||||
return ({
|
||||
status:200,
|
||||
url:`https://api.github.com/repos/${owner}/${repo}/contributors`,
|
||||
headers: {
|
||||
headers:{
|
||||
server:"GitHub.com",
|
||||
status:"200 OK",
|
||||
"x-oauth-scopes":"repo",
|
||||
@@ -13,6 +13,6 @@
|
||||
login:faker.internet.userName(),
|
||||
avatar_url:null,
|
||||
contributions:faker.random.number(1000),
|
||||
}))
|
||||
})),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
/** Mocked data */
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, args) {
|
||||
//Arguments
|
||||
const [url] = args
|
||||
//Head request
|
||||
if (/^HEAD .$/.test(url)) {
|
||||
console.debug(`metrics/compute/mocks > mocking rest api result > rest.request HEAD`)
|
||||
console.debug("metrics/compute/mocks > mocking rest api result > rest.request HEAD")
|
||||
return ({
|
||||
status:200,
|
||||
url:"https://api.github.com/",
|
||||
@@ -13,7 +13,7 @@
|
||||
status:"200 OK",
|
||||
"x-oauth-scopes":"repo",
|
||||
},
|
||||
data:undefined
|
||||
data:undefined,
|
||||
})
|
||||
}
|
||||
//Commit content
|
||||
@@ -44,16 +44,16 @@
|
||||
login:faker.internet.userName(),
|
||||
id:faker.random.number(100000000),
|
||||
},
|
||||
files: [
|
||||
files:[
|
||||
{
|
||||
sha:"MOCKED_SHA",
|
||||
filename:faker.system.fileName(),
|
||||
patch:"@@ -0,0 +1,5 @@\n+//Imports\n+ import app from \"./src/app.mjs\"\n+\n+//Start app\n+ await app()\n\\ No newline at end of file"
|
||||
patch:"@@ -0,0 +1,5 @@\n+//Imports\n+ import app from \"./src/app.mjs\"\n+\n+//Start app\n+ await app()\n\\ No newline at end of file",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return target(...args)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
/** Mocked data */
|
||||
/**Mocked data */
|
||||
export default function({faker}, target, that, [{username}]) {
|
||||
console.debug(`metrics/compute/mocks > mocking rest api result > rest.repos.getByUsername`)
|
||||
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getByUsername")
|
||||
return ({
|
||||
status:200,
|
||||
url:`'https://api.github.com/users/${username}/`,
|
||||
headers: {
|
||||
headers:{
|
||||
server:"GitHub.com",
|
||||
status:"200 OK",
|
||||
"x-oauth-scopes":"repo",
|
||||
@@ -13,6 +13,6 @@
|
||||
login:faker.internet.userName(),
|
||||
avatar_url:null,
|
||||
contributions:faker.random.number(1000),
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,17 +9,17 @@
|
||||
let mocked = false
|
||||
|
||||
//Mocking
|
||||
export default async function ({graphql, rest}) {
|
||||
export default async function({graphql, rest}) {
|
||||
|
||||
//Check if already mocked
|
||||
if (mocked)
|
||||
return {graphql, rest}
|
||||
mocked = true
|
||||
console.debug(`metrics/compute/mocks > mocking`)
|
||||
console.debug("metrics/compute/mocks > mocking")
|
||||
|
||||
//Load mocks
|
||||
const __mocks = paths.join(paths.dirname(urls.fileURLToPath(import.meta.url)))
|
||||
const mock = async ({directory, mocks}) => {
|
||||
const mock = async({directory, mocks}) => {
|
||||
for (const entry of await fs.readdir(directory)) {
|
||||
if ((await fs.lstat(paths.join(directory, entry))).isDirectory()) {
|
||||
if (!mocks[entry])
|
||||
@@ -36,7 +36,7 @@
|
||||
//GraphQL API mocking
|
||||
{
|
||||
//Unmocked
|
||||
console.debug(`metrics/compute/mocks > mocking graphql api`)
|
||||
console.debug("metrics/compute/mocks > mocking graphql api")
|
||||
const unmocked = graphql
|
||||
//Mocked
|
||||
graphql = new Proxy(unmocked, {
|
||||
@@ -46,20 +46,21 @@
|
||||
const login = query.match(/login: "(?<login>.*?)"/)?.groups?.login ?? faker.internet.userName()
|
||||
|
||||
//Search for mocked query
|
||||
for (const mocked of Object.keys(mocks.github.graphql))
|
||||
for (const mocked of Object.keys(mocks.github.graphql)) {
|
||||
if (new RegExp(`^query ${mocked.replace(/([.]\w)/g, (_, g) => g.toLocaleUpperCase().substring(1)).replace(/^(\w)/g, (_, g) => g.toLocaleUpperCase())} `).test(query))
|
||||
return mocks.github.graphql[mocked]({faker, query, login})
|
||||
}
|
||||
|
||||
//Unmocked call
|
||||
return target(...args)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
//Rest API mocking
|
||||
{
|
||||
//Unmocked
|
||||
console.debug(`metrics/compute/mocks > mocking rest api`)
|
||||
console.debug("metrics/compute/mocks > mocking rest api")
|
||||
const unmocked = {}
|
||||
//Mocked
|
||||
const mocker = ({path = "rest", mocks, mocked}) => {
|
||||
@@ -79,12 +80,12 @@
|
||||
//Axios mocking
|
||||
{
|
||||
//Unmocked
|
||||
console.debug(`metrics/compute/mocks > mocking axios`)
|
||||
console.debug("metrics/compute/mocks > mocking axios")
|
||||
const unmocked = {get:axios.get, post:axios.post}
|
||||
|
||||
//Mocked post requests
|
||||
axios.post = new Proxy(unmocked.post, {
|
||||
apply:function(target, that, args) {
|
||||
apply(target, that, args) {
|
||||
//Arguments
|
||||
const [url, body] = args
|
||||
|
||||
@@ -97,12 +98,12 @@
|
||||
|
||||
//Unmocked call
|
||||
return target(...args)
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
//Mocked get requests
|
||||
axios.get = new Proxy(unmocked.get, {
|
||||
apply:function(target, that, args) {
|
||||
apply(target, that, args) {
|
||||
//Arguments
|
||||
const [url, options] = args
|
||||
|
||||
@@ -115,7 +116,7 @@
|
||||
|
||||
//Unmocked call
|
||||
return target(...args)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -2,4 +2,6 @@
|
||||
import app from "./instance.mjs"
|
||||
|
||||
//Start app
|
||||
await app({mock:process.env.USE_MOCKED_DATA, nosettings:process.env.NO_SETTINGS})
|
||||
(async function() {
|
||||
await app({mock:process.env.USE_MOCKED_DATA, nosettings:process.env.NO_SETTINGS})
|
||||
})()
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
import mocks from "../mocks/index.mjs"
|
||||
import metrics from "../metrics/index.mjs"
|
||||
|
||||
/** App */
|
||||
export default async function ({mock, nosettings} = {}) {
|
||||
/**App */
|
||||
export default async function({mock, nosettings} = {}) {
|
||||
|
||||
//Load configuration settings
|
||||
const {conf, Plugins, Templates} = await setup({nosettings})
|
||||
@@ -39,14 +39,14 @@
|
||||
}
|
||||
}
|
||||
if (((mock)&&(!conf.settings.token))||(mock === "force")) {
|
||||
console.debug(`metrics/app > using mocked token`)
|
||||
console.debug("metrics/app > using mocked token")
|
||||
conf.settings.token = "MOCKED_TOKEN"
|
||||
}
|
||||
if (debug)
|
||||
console.debug(util.inspect(conf.settings, {depth:Infinity, maxStringLength:256}))
|
||||
|
||||
//Load octokits
|
||||
const api = {graphql:octokit.graphql.defaults({headers:{authorization: `token ${token}`}}), rest:new OctokitRest.Octokit({auth:token})}
|
||||
const api = {graphql:octokit.graphql.defaults({headers:{authorization:`token ${token}`}}), rest:new OctokitRest.Octokit({auth:token})}
|
||||
//Apply mocking if needed
|
||||
if (mock)
|
||||
Object.assign(api, await mocks(api))
|
||||
@@ -60,9 +60,11 @@
|
||||
if (ratelimiter) {
|
||||
app.set("trust proxy", 1)
|
||||
middlewares.push(ratelimit({
|
||||
skip(req, res) { return !!cache.get(req.params.login) },
|
||||
skip(req, _res) {
|
||||
return !!cache.get(req.params.login)
|
||||
},
|
||||
message:"Too many requests",
|
||||
...ratelimiter
|
||||
...ratelimiter,
|
||||
}))
|
||||
}
|
||||
//Cache headers middleware
|
||||
@@ -82,7 +84,7 @@
|
||||
let requests = {limit:0, used:0, remaining:0, reset:NaN}
|
||||
if (!conf.settings.notoken) {
|
||||
requests = (await rest.rateLimit.get()).data.rate
|
||||
setInterval(async () => requests = (await rest.rateLimit.get()).data.rate, 30*1000)
|
||||
setInterval(async() => requests = (await rest.rateLimit.get()).data.rate, 30*1000)
|
||||
}
|
||||
//Web
|
||||
app.get("/", limiter, (req, res) => res.sendFile(`${conf.paths.statics}/index.html`))
|
||||
@@ -117,9 +119,9 @@
|
||||
app.get("/.js/prism.markdown.min.js", limiter, (req, res) => res.sendFile(`${conf.paths.node_modules}/prismjs/components/prism-markdown.min.js`))
|
||||
//Meta
|
||||
app.get("/.version", limiter, (req, res) => res.status(200).send(conf.package.version))
|
||||
app.get("/.requests", limiter, async (req, res) => res.status(200).json(requests))
|
||||
app.get("/.requests", limiter, async(req, res) => res.status(200).json(requests))
|
||||
//Cache
|
||||
app.get("/.uncache", limiter, async (req, res) => {
|
||||
app.get("/.uncache", limiter, async(req, res) => {
|
||||
const {token, user} = req.query
|
||||
if (token) {
|
||||
if (actions.flush.has(token)) {
|
||||
@@ -127,10 +129,9 @@
|
||||
cache.del(actions.flush.get(token))
|
||||
return res.sendStatus(200)
|
||||
}
|
||||
else
|
||||
return res.sendStatus(404)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
else {
|
||||
{
|
||||
const token = `${Math.random().toString(16).replace("0.", "")}${Math.random().toString(16).replace("0.", "")}`
|
||||
actions.flush.set(token, user)
|
||||
return res.json({token})
|
||||
@@ -138,7 +139,7 @@
|
||||
})
|
||||
|
||||
//Metrics
|
||||
app.get("/:login", ...middlewares, async (req, res) => {
|
||||
app.get("/:login", ...middlewares, async(req, res) => {
|
||||
//Request params
|
||||
const login = req.params.login?.replace(/[\n\r]/g, "")
|
||||
if ((restricted.length)&&(!restricted.includes(login))) {
|
||||
@@ -146,13 +147,12 @@
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
//Read cached data if possible
|
||||
//User cached
|
||||
if ((!debug)&&(cached)&&(cache.get(login))) {
|
||||
const {rendered, mime} = cache.get(login)
|
||||
res.header("Content-Type", mime)
|
||||
res.send(rendered)
|
||||
return
|
||||
}
|
||||
if ((!debug)&&(cached)&&(cache.get(login))) {
|
||||
const {rendered, mime} = cache.get(login)
|
||||
res.header("Content-Type", mime)
|
||||
res.send(rendered)
|
||||
return
|
||||
}
|
||||
//Maximum simultaneous users
|
||||
if ((maxusers)&&(cache.size()+1 > maxusers)) {
|
||||
console.debug(`metrics/app/${login} > 503 (maximum users reached)`)
|
||||
@@ -167,8 +167,8 @@
|
||||
const {rendered, mime} = await metrics({login, q}, {
|
||||
graphql, rest, plugins, conf,
|
||||
die:q["plugins.errors.fatal"] ?? false,
|
||||
verify:q["verify"] ?? false,
|
||||
convert:["jpeg", "png"].includes(q["config.output"]) ? q["config.output"] : null
|
||||
verify:q.verify ?? false,
|
||||
convert:["jpeg", "png"].includes(q["config.output"]) ? q["config.output"] : null,
|
||||
}, {Plugins, Templates})
|
||||
//Cache
|
||||
if ((!debug)&&(cached))
|
||||
@@ -206,6 +206,6 @@
|
||||
`Max simultaneous users │ ${maxusers ? `${maxusers} users` : "(unrestricted)"}`,
|
||||
`Plugins enabled │ ${enabled.map(({name}) => name).join(", ")}`,
|
||||
`SVG optimization │ ${conf.settings.optimize ?? false}`,
|
||||
`Server ready !`
|
||||
"Server ready !",
|
||||
].join("\n")))
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, rest, q, account, imports}, {enabled = false} = {}) {
|
||||
export default async function({login, data, rest, q, account, imports}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
|
||||
@@ -34,6 +34,7 @@ inputs:
|
||||
type: array
|
||||
format: comma-separated
|
||||
default: all
|
||||
example: issue, pr, review, wiki, star
|
||||
values:
|
||||
- all # Display all types of events
|
||||
- comment # Display commits, issues and pull requests comments
|
||||
@@ -48,4 +49,4 @@ inputs:
|
||||
- fork # Display forked repositories
|
||||
- star # Display starred repositories
|
||||
- member # Display collaborators additions
|
||||
- public # Display repositories made public
|
||||
- public # Display repositories made public
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, queries, imports, q, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, queries, imports, q, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -31,7 +31,7 @@
|
||||
//Format and save results
|
||||
for (const {name, entries} of lists) {
|
||||
//Format results
|
||||
const list = await Promise.all(entries.map(async media => await format({media, imports})))
|
||||
const list = await Promise.all(entries.map(media => format({media, imports})))
|
||||
result.lists[type][name.toLocaleLowerCase()] = shuffle ? imports.shuffle(list) : list
|
||||
//Limit results
|
||||
if (limit > 0) {
|
||||
@@ -131,7 +131,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
/** Media formatter */
|
||||
/**Media formatter */
|
||||
async function format({media, imports}) {
|
||||
const {progress, score:userScore, media:{title, description, status, startDate:{year:release}, genres, averageScore, episodes, chapters, type, coverImage:{medium:artwork}}} = media
|
||||
return {
|
||||
@@ -140,6 +140,6 @@
|
||||
description:description.replace(/<br\s*\\?>/g, " "),
|
||||
scores:{user:userScore, community:averageScore},
|
||||
released:type === "ANIME" ? episodes : chapters,
|
||||
artwork:artwork ? await imports.imgb64(artwork) : "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg=="
|
||||
artwork:artwork ? await imports.imgb64(artwork) : "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ inputs:
|
||||
type: array
|
||||
format: comma-separated
|
||||
default: favorites
|
||||
example: favorites, watching, characters
|
||||
values:
|
||||
- favorites # Favorites animes and mangas (depending on plugin_anilist_medias values)
|
||||
- watching # Animes in your watching list
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*/
|
||||
|
||||
//Setup
|
||||
export default async function ({login, graphql, data, q, queries, imports}, conf) {
|
||||
export default async function({login, graphql, data, q, queries, imports}, conf) {
|
||||
//Load inputs
|
||||
console.debug(`metrics/compute/${login}/base > started`)
|
||||
let {repositories, repositories_forks:forks} = imports.metadata.plugins.base.inputs({data, q, account:"bypass"}, {repositories:conf.settings.repositories ?? 100})
|
||||
@@ -16,7 +16,7 @@
|
||||
//Base parts (legacy handling for web instance)
|
||||
const defaulted = ("base" in q) ? legacy.converter(q.base) ?? true : true
|
||||
for (const part of conf.settings.plugins.base.parts)
|
||||
data.base[part] = `base.${part}` in q ? legacy.converter(q[ `base.${part}`]) : defaulted
|
||||
data.base[part] = `base.${part}` in q ? legacy.converter(q[`base.${part}`]) : defaulted
|
||||
|
||||
//Iterate through account types
|
||||
for (const account of ["user", "organization"]) {
|
||||
@@ -46,7 +46,8 @@
|
||||
//Success
|
||||
console.debug(`metrics/compute/${login}/base > graphql query > account ${account} > success`)
|
||||
return {}
|
||||
} catch (error) {
|
||||
}
|
||||
catch (error) {
|
||||
console.debug(`metrics/compute/${login}/base > account ${account} > failed : ${error}`)
|
||||
console.debug(`metrics/compute/${login}/base > checking next account`)
|
||||
}
|
||||
@@ -69,7 +70,7 @@
|
||||
//Organization
|
||||
organization({login, data}) {
|
||||
console.debug(`metrics/compute/${login}/base > applying postprocessing`)
|
||||
data.account = "organization",
|
||||
data.account = "organization"
|
||||
Object.assign(data.user, {
|
||||
isHireable:false,
|
||||
starredRepositories:{totalCount:0},
|
||||
@@ -107,7 +108,7 @@
|
||||
repositories:{totalCount:0, totalDiskUsage:0, nodes:[]},
|
||||
packages:{totalCount:0},
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
//Legacy functions
|
||||
@@ -119,5 +120,5 @@
|
||||
return false
|
||||
if (Number.isFinite(Number(value)))
|
||||
return !!(Number(value))
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -4,22 +4,25 @@
|
||||
*/
|
||||
|
||||
//Setup
|
||||
export default async function ({login, q, dflags}, {conf, data, rest, graphql, plugins, queries, account}, {pending, imports}) {
|
||||
export default async function({login, q, dflags}, {conf, data, rest, graphql, plugins, queries, account}, {pending, imports}) {
|
||||
//Load inputs
|
||||
imports.metadata.plugins.core.inputs({data, account, q})
|
||||
|
||||
//Init
|
||||
const computed = data.computed = {commits:0, sponsorships:0, licenses:{favorite:"", used:{}}, token:{}, repositories:{watchers:0, stargazers:0, issues_open:0, issues_closed:0, pr_open:0, pr_merged:0, forks:0, forked:0, releases:0}}
|
||||
const computed = {commits:0, sponsorships:0, licenses:{favorite:"", used:{}}, token:{}, repositories:{watchers:0, stargazers:0, issues_open:0, issues_closed:0, pr_open:0, pr_merged:0, forks:0, forked:0, releases:0}}
|
||||
const avatar = imports.imgb64(data.user.avatarUrl)
|
||||
data.computed = computed
|
||||
console.debug(`metrics/compute/${login} > formatting common metrics`)
|
||||
|
||||
//Timezone config
|
||||
if (q["config.timezone"]) {
|
||||
const timezone = data.config.timezone = {name:q["config.timezone"], offset:0}
|
||||
const timezone = {name:q["config.timezone"], offset:0}
|
||||
data.config.timezone = timezone
|
||||
try {
|
||||
timezone.offset = Number(new Date().toLocaleString("fr", {timeZoneName:"short", timeZone:timezone.name}).match(/UTC[+](?<offset>\d+)/)?.groups?.offset*60*60*1000) || 0
|
||||
console.debug(`metrics/compute/${login} > timezone set to ${timezone.name} (${timezone.offset > 0 ? "+" : ""}${Math.round(timezone.offset/(60*60*1000))} hours)`)
|
||||
} catch {
|
||||
}
|
||||
catch {
|
||||
timezone.error = `Failed to use timezone "${timezone.name}"`
|
||||
console.debug(`metrics/compute/${login} > failed to use timezone "${timezone.name}"`)
|
||||
}
|
||||
@@ -35,7 +38,7 @@
|
||||
for (const name of Object.keys(imports.plugins)) {
|
||||
if (!plugins[name]?.enabled)
|
||||
continue
|
||||
pending.push((async () => {
|
||||
pending.push((async() => {
|
||||
try {
|
||||
console.debug(`metrics/compute/${login}/plugins > ${name} > started`)
|
||||
data.plugins[name] = await imports.plugins[name]({login, q, imports, data, computed, rest, graphql, queries, account}, plugins[name])
|
||||
@@ -71,7 +74,7 @@
|
||||
computed.diskUsage = `${imports.bytes(data.user.repositories.totalDiskUsage*1000)}`
|
||||
|
||||
//Compute licenses stats
|
||||
computed.licenses.favorite = Object.entries(computed.licenses.used).sort(([an, a], [bn, b]) => b - a).slice(0, 1).map(([name, value]) => name) ?? ""
|
||||
computed.licenses.favorite = Object.entries(computed.licenses.used).sort(([_an, a], [_bn, b]) => b - a).slice(0, 1).map(([name, _value]) => name) ?? ""
|
||||
|
||||
//Compute total commits
|
||||
computed.commits += data.user.contributionsCollection.totalCommitContributions + data.user.contributionsCollection.restrictedContributionsCount
|
||||
@@ -117,7 +120,7 @@
|
||||
computed.calendar.map(day => day.color = halloween(day.color))
|
||||
//Update isocalendar colors
|
||||
const waiting = [...pending]
|
||||
pending.push((async () => {
|
||||
pending.push((async() => {
|
||||
await Promise.all(waiting)
|
||||
if (data.plugins.isocalendar?.svg)
|
||||
data.plugins.isocalendar.svg = halloween(data.plugins.isocalendar.svg)
|
||||
@@ -127,4 +130,4 @@
|
||||
|
||||
//Results
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({data, computed, imports, q, account}, {enabled = false} = {}) {
|
||||
export default async function({data, computed, imports, q, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -12,15 +12,27 @@
|
||||
//Define getters
|
||||
const followup = {
|
||||
issues:{
|
||||
get count() { return this.open + this.closed },
|
||||
get open() { return computed.repositories.issues_open },
|
||||
get closed() { return computed.repositories.issues_closed },
|
||||
get count() {
|
||||
return this.open + this.closed
|
||||
},
|
||||
get open() {
|
||||
return computed.repositories.issues_open
|
||||
},
|
||||
get closed() {
|
||||
return computed.repositories.issues_closed
|
||||
},
|
||||
},
|
||||
pr:{
|
||||
get count() { return this.open + this.merged },
|
||||
get open() { return computed.repositories.pr_open },
|
||||
get merged() { return computed.repositories.pr_merged }
|
||||
}
|
||||
get count() {
|
||||
return this.open + this.merged
|
||||
},
|
||||
get open() {
|
||||
return computed.repositories.pr_open
|
||||
},
|
||||
get merged() {
|
||||
return computed.repositories.pr_merged
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
//Results
|
||||
@@ -30,4 +42,4 @@
|
||||
catch (error) {
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, graphql, q, imports, queries, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, graphql, q, imports, queries, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -28,7 +28,7 @@
|
||||
|
||||
//Iterate through gists
|
||||
console.debug(`metrics/compute/${login}/plugins > gists > processing ${gists.length} gists`)
|
||||
let stargazers = 0, forks = 0, comments = 0, files = 0
|
||||
let comments = 0, files = 0, forks = 0, stargazers = 0
|
||||
for (const gist of gists) {
|
||||
//Skip forks
|
||||
if (gist.isFork)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, rest, imports, q, account}, {enabled = false, ...defaults} = {}) {
|
||||
export default async function({login, data, rest, imports, q, account}, {enabled = false, ...defaults} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -22,7 +22,10 @@
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > loading page ${page}`)
|
||||
events.push(...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data)
|
||||
}
|
||||
} catch { console.debug(`metrics/compute/${login}/plugins > habits > no more page to load`) }
|
||||
}
|
||||
catch {
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > no more page to load`)
|
||||
}
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > ${events.length} events loaded`)
|
||||
|
||||
//Get user recent commits
|
||||
@@ -36,8 +39,7 @@
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > loading patches`)
|
||||
const patches = [...await Promise.allSettled(commits
|
||||
.flatMap(({payload}) => payload.commits).map(commit => commit.url)
|
||||
.map(async commit => (await rest.request(commit)).data.files)
|
||||
)]
|
||||
.map(async commit => (await rest.request(commit)).data.files))]
|
||||
.filter(({status}) => status === "fulfilled")
|
||||
.map(({value}) => value)
|
||||
.flatMap(files => files.map(file => ({name:imports.paths.basename(file.filename), patch:file.patch ?? ""})))
|
||||
@@ -52,7 +54,7 @@
|
||||
habits.commits.days[day] = (habits.commits.days[day] ?? 0) + 1
|
||||
habits.commits.days.max = Math.max(...Object.values(habits.commits.days))
|
||||
//Compute day with most commits
|
||||
habits.commits.day = days.length ? ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"][Object.entries(habits.commits.days).sort(([an, a], [bn, b]) => b - a).map(([day, occurence]) => day)[0]] ?? NaN : NaN
|
||||
habits.commits.day = days.length ? ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"][Object.entries(habits.commits.days).sort(([_an, a], [_bn, b]) => b - a).map(([day, _occurence]) => day)[0]] ?? NaN : NaN
|
||||
}
|
||||
|
||||
//Commit hour
|
||||
@@ -64,7 +66,7 @@
|
||||
habits.commits.hours[hour] = (habits.commits.hours[hour] ?? 0) + 1
|
||||
habits.commits.hours.max = Math.max(...Object.values(habits.commits.hours))
|
||||
//Compute hour with most commits
|
||||
habits.commits.hour = hours.length ? `${Object.entries(habits.commits.hours).sort(([an, a], [bn, b]) => b - a).map(([hour, occurence]) => hour)[0]}`.padStart(2, "0") : NaN
|
||||
habits.commits.hour = hours.length ? `${Object.entries(habits.commits.hours).sort(([_an, a], [_bn, b]) => b - a).map(([hour, _occurence]) => hour)[0]}`.padStart(2, "0") : NaN
|
||||
}
|
||||
|
||||
//Indent style
|
||||
@@ -72,7 +74,7 @@
|
||||
//Attempt to guess whether tabs or spaces are used in patches
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > searching indent style`)
|
||||
patches
|
||||
.map(({patch}) => patch.match(/((?:\t)|(?: )) /gm) ?? [])
|
||||
.map(({patch}) => patch.match(/((?:\t)|(?:[ ]{2})) /gm) ?? []) //eslint-disable-line prefer-named-capture-group
|
||||
.forEach(indent => habits.indents[/^\t/.test(indent) ? "tabs" : "spaces"]++)
|
||||
habits.indents.style = habits.indents.spaces > habits.indents.tabs ? "spaces" : habits.indents.tabs > habits.indents.spaces ? "tabs" : ""
|
||||
}
|
||||
@@ -89,18 +91,18 @@
|
||||
//Create temporary directory and save patches
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > creating temp dir ${path} with ${patches.length} files`)
|
||||
await imports.fs.mkdir(path, {recursive:true})
|
||||
await Promise.all(patches.map(async ({name, patch}, i) => await imports.fs.writeFile(imports.paths.join(path, `${i}${imports.paths.extname(name)}`), patch)))
|
||||
await Promise.all(patches.map(({name, patch}, i) => imports.fs.writeFile(imports.paths.join(path, `${i}${imports.paths.extname(name)}`), patch)))
|
||||
//Create temporary git repository
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > creating temp git repository`)
|
||||
await imports.run(`git init && git add . && git config user.name "linguist" && git config user.email "<>" && git commit -m "linguist"`, {cwd:path}).catch(console.debug)
|
||||
await imports.run(`git status`, {cwd:path})
|
||||
await imports.run('git init && git add . && git config user.name "linguist" && git config user.email "<>" && git commit -m "linguist"', {cwd:path}).catch(console.debug)
|
||||
await imports.run("git status", {cwd:path})
|
||||
//Spawn linguist process
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > running linguist`)
|
||||
;(await imports.run(`${prefix} github-linguist --breakdown`, {cwd:path}))
|
||||
//Parse linguist result
|
||||
.split("\n").map(line => line.match(/(?<value>[\d.]+)%\s+(?<language>\w+)/)?.groups).filter(line => line)
|
||||
.map(({value, language}) => habits.linguist.languages[language] = (habits.linguist.languages[language] ?? 0) + value/100)
|
||||
habits.linguist.ordered = Object.entries(habits.linguist.languages).sort(([an, a], [bn, b]) => b - a)
|
||||
habits.linguist.ordered = Object.entries(habits.linguist.languages).sort(([_an, a], [_bn, b]) => b - a)
|
||||
}
|
||||
else
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > linguist not available`)
|
||||
@@ -115,4 +117,4 @@
|
||||
throw error
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, graphql, q, imports, queries, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, graphql, q, imports, queries, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -39,7 +39,7 @@
|
||||
|
||||
//Compute the highest contributions in a day, streaks and average commits per day
|
||||
console.debug(`metrics/compute/${login}/plugins > isocalendar > computing stats`)
|
||||
let max = 0, streak = {max:0, current:0}, values = [], average = 0
|
||||
let average = 0, max = 0, streak = {max:0, current:0}, values = []
|
||||
for (const week of calendar.weeks) {
|
||||
for (const day of week.contributionDays) {
|
||||
values.push(day.contributionCount)
|
||||
@@ -61,8 +61,8 @@
|
||||
<feComponentTransfer>
|
||||
${[..."RGB"].map(channel => `<feFunc${channel} type="linear" slope="${1-k*0.4}" />`).join("")}
|
||||
</feComponentTransfer>
|
||||
</filter>`
|
||||
).join("")}
|
||||
</filter>`)
|
||||
.join("")}
|
||||
<g transform="scale(4) translate(12, 0)">`
|
||||
//Iterate through weeks
|
||||
for (const week of calendar.weeks) {
|
||||
@@ -79,10 +79,10 @@
|
||||
</g>`
|
||||
j++
|
||||
}
|
||||
svg += `</g>`
|
||||
svg += "</g>"
|
||||
i++
|
||||
}
|
||||
svg += `</g></svg>`
|
||||
svg += "</g></svg>"
|
||||
|
||||
//Results
|
||||
return {streak, max, average, svg, duration}
|
||||
@@ -93,4 +93,4 @@
|
||||
throw error
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, imports, q, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, imports, q, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -42,7 +42,7 @@
|
||||
|
||||
//Compute languages stats
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > computing stats`)
|
||||
languages.favorites = Object.entries(languages.stats).sort(([an, a], [bn, b]) => b - a).slice(0, 8).map(([name, value]) => ({name, value, size:value, color:languages.colors[name], x:0})).filter(({value}) => value/languages.total > threshold)
|
||||
languages.favorites = Object.entries(languages.stats).sort(([_an, a], [_bn, b]) => b - a).slice(0, 8).map(([name, value]) => ({name, value, size:value, color:languages.colors[name], x:0})).filter(({value}) => value/languages.total > threshold)
|
||||
const visible = {total:Object.values(languages.favorites).map(({size}) => size).reduce((a, b) => a + b, 0)}
|
||||
for (let i = 0; i < languages.favorites.length; i++) {
|
||||
languages.favorites[i].value /= visible.total
|
||||
|
||||
@@ -18,6 +18,7 @@ inputs:
|
||||
type: array
|
||||
format: comma-separated
|
||||
default: ""
|
||||
example: html, css, ...
|
||||
|
||||
# List of repositories that will be skipped
|
||||
plugin_languages_skipped:
|
||||
@@ -25,6 +26,7 @@ inputs:
|
||||
type: array
|
||||
format: comma-separated
|
||||
default: ""
|
||||
example: my-repo-1, my-repo-2, ...
|
||||
|
||||
# Overrides default languages colors
|
||||
# Use `${n}:${color}` to change the color of the n-th most used language (e.g. "0:red" to make your most used language red)
|
||||
@@ -38,6 +40,7 @@ inputs:
|
||||
- comma-separated
|
||||
- /((?<index>[0-9])|(?<language>[-+a-z0-9#])):(?<color>#?[-a-z0-9]+)/
|
||||
default: github
|
||||
example: javascript:red, 0:blue, 1:#ff00aa
|
||||
|
||||
# Languages additional details
|
||||
plugin_languages_details:
|
||||
@@ -48,6 +51,7 @@ inputs:
|
||||
- bytes-size # Languages total size written in bytes
|
||||
- percentage # Languages proportions in %
|
||||
default: ""
|
||||
example: bytes-size, percentage
|
||||
|
||||
# Minimum threshold (in percentage) to reach for languages to be displayed
|
||||
plugin_languages_threshold:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, imports, rest, q, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, imports, rest, q, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -22,7 +22,7 @@
|
||||
//Get contributors stats from repositories
|
||||
console.debug(`metrics/compute/${login}/plugins > lines > querying api`)
|
||||
const lines = {added:0, deleted:0}
|
||||
const response = await Promise.all(repositories.map(async ({repo, owner}) => await rest.repos.getContributorsStats({owner, repo})))
|
||||
const response = await Promise.all(repositories.map(({repo, owner}) => rest.repos.getContributorsStats({owner, repo})))
|
||||
|
||||
//Compute changed lines
|
||||
console.debug(`metrics/compute/${login}/plugins > lines > computing total diff`)
|
||||
|
||||
@@ -180,8 +180,8 @@ Register your API key to finish setup.
|
||||
plugin_music_provider: spotify # Use Spotify as provider
|
||||
plugin_music_mode: recent # Set plugin mode
|
||||
plugin_music_limit: 4 # Limit to 4 entries
|
||||
plugin_music_played_at: yes # Show timestamp (only works with spotify, 🚧 @master feature)
|
||||
plugin_music_token: "${{ secrets.SPOTIFY_CLIENT_ID }}, ${{ secrets.SPOTIFY_CLIENT_SECRET }}, ${{ secrets.SPOTIFY_REFRESH_TOKEN }}"
|
||||
plugin_music_played_at: yes # Only works with spotify.
|
||||
```
|
||||
|
||||
```yaml
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
}
|
||||
|
||||
//Setup
|
||||
export default async function ({login, imports, data, q, account}, {enabled = false, token = ""} = {}) {
|
||||
export default async function({login, imports, data, q, account}, {enabled = false, token = ""} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -29,34 +29,40 @@
|
||||
|
||||
//Initialization
|
||||
const raw = {
|
||||
get provider() { return providers[provider]?.name ?? "" },
|
||||
get mode() { return modes[mode] ?? "Unconfigured music plugin"},
|
||||
get provider() {
|
||||
return providers[provider]?.name ?? ""
|
||||
},
|
||||
get mode() {
|
||||
return modes[mode] ?? "Unconfigured music plugin"
|
||||
},
|
||||
}
|
||||
let tracks = null
|
||||
|
||||
//Load inputs
|
||||
let {provider, mode, playlist, limit, user, played_at} = imports.metadata.plugins.music.inputs({data, account, q})
|
||||
let {provider, mode, playlist, limit, user, "played.at":played_at} = imports.metadata.plugins.music.inputs({data, account, q})
|
||||
//Auto-guess parameters
|
||||
if ((playlist)&&(!mode))
|
||||
mode = "playlist"
|
||||
if ((playlist)&&(!provider))
|
||||
for (const [name, {embed}] of Object.entries(providers))
|
||||
if ((playlist)&&(!provider)) {
|
||||
for (const [name, {embed}] of Object.entries(providers)) {
|
||||
if (embed.test(playlist))
|
||||
provider = name
|
||||
}
|
||||
}
|
||||
if (!mode)
|
||||
mode = "recent"
|
||||
//Provider
|
||||
if (!(provider in providers))
|
||||
throw {error:{message:provider ? `Unsupported provider "${provider}"` : `Missing provider`}, ...raw}
|
||||
throw {error:{message:provider ? `Unsupported provider "${provider}"` : "Missing provider"}, ...raw}
|
||||
//Mode
|
||||
if (!(mode in modes))
|
||||
throw {error:{message:`Unsupported mode "${mode}"`}, ...raw}
|
||||
//Playlist mode
|
||||
if (mode === "playlist") {
|
||||
if (!playlist)
|
||||
throw {error:{message:`Missing playlist url`}, ...raw}
|
||||
throw {error:{message:"Missing playlist url"}, ...raw}
|
||||
if (!providers[provider].embed.test(playlist))
|
||||
throw {error:{message:`Unsupported playlist url format`}, ...raw}
|
||||
throw {error:{message:"Unsupported playlist url format"}, ...raw}
|
||||
}
|
||||
//Limit
|
||||
limit = Math.max(1, Math.min(100, Number(limit)))
|
||||
@@ -83,7 +89,7 @@
|
||||
tracks = [...await frame.evaluate(() => [...document.querySelectorAll(".tracklist li")].map(li => ({
|
||||
name:li.querySelector(".tracklist__track__name").innerText,
|
||||
artist:li.querySelector(".tracklist__track__sub").innerText,
|
||||
artwork:li.querySelector(".tracklist__track__artwork img").src
|
||||
artwork:li.querySelector(".tracklist__track__artwork img").src,
|
||||
})))]
|
||||
break
|
||||
}
|
||||
@@ -95,7 +101,7 @@
|
||||
name:tr.querySelector("td:nth-child(2) div:nth-child(1)").innerText,
|
||||
artist:tr.querySelector("td:nth-child(2) div:nth-child(2)").innerText,
|
||||
//Spotify doesn't provide artworks so we fallback on playlist artwork instead
|
||||
artwork:window.getComputedStyle(document.querySelector("button[title=Play]").parentNode, null).backgroundImage.match(/^url\("(https:...+)"\)$/)[1]
|
||||
artwork:window.getComputedStyle(document.querySelector("button[title=Play]").parentNode, null).backgroundImage.match(/^url\("(?<url>https:...+)"\)$/)?.groups?.url ?? null,
|
||||
})))]
|
||||
break
|
||||
}
|
||||
@@ -118,8 +124,6 @@
|
||||
}
|
||||
//Recently played
|
||||
case "recent":{
|
||||
//Initialisation
|
||||
const timestamp = Date.now()-24*60*60*1000
|
||||
//Handle provider
|
||||
switch (provider) {
|
||||
//Spotify
|
||||
@@ -127,28 +131,40 @@
|
||||
//Prepare credentials
|
||||
const [client_id, client_secret, refresh_token] = token.split(",").map(part => part.trim())
|
||||
if ((!client_id)||(!client_secret)||(!refresh_token))
|
||||
throw {error:{message:`Spotify token must contain client id/secret and refresh token`}}
|
||||
throw {error:{message:"Spotify token must contain client id/secret and refresh token"}}
|
||||
//API call and parse tracklist
|
||||
try {
|
||||
//Request access token
|
||||
console.debug(`metrics/compute/${login}/plugins > music > requesting access token with spotify refresh token`)
|
||||
const {data:{access_token:access}} = await imports.axios.post("https://accounts.spotify.com/api/token",
|
||||
`${new imports.url.URLSearchParams({grant_type:"refresh_token", refresh_token, client_id, client_secret})}`,
|
||||
{headers:{"Content-Type":"application/x-www-form-urlencoded"}},
|
||||
)
|
||||
const {data:{access_token:access}} = await imports.axios.post("https://accounts.spotify.com/api/token", `${new imports.url.URLSearchParams({grant_type:"refresh_token", refresh_token, client_id, client_secret})}`, {headers:{
|
||||
"Content-Type":"application/x-www-form-urlencoded",
|
||||
}})
|
||||
console.debug(`metrics/compute/${login}/plugins > music > got access token`)
|
||||
//Retrieve tracks
|
||||
console.debug(`metrics/compute/${login}/plugins > music > querying spotify api`)
|
||||
tracks = (await imports.axios.get(`https://api.spotify.com/v1/me/player/recently-played?limit=${limit}&after=${timestamp}`, {headers:{
|
||||
"Accept":"application/json",
|
||||
"Content-Type":"application/json",
|
||||
"Authorization":`Bearer ${access}`}
|
||||
})).data.items.map(({track, played_at}) => ({
|
||||
name:track.name,
|
||||
artist:track.artists[0].name,
|
||||
artwork:track.album.images[0].url,
|
||||
played_at: played_at ? imports.dayjs(played_at).format('[played at] HH:MM on DD/MM/YYYY') : ''
|
||||
}))
|
||||
tracks = []
|
||||
for (let hours = .5; hours <= 24; hours++) {
|
||||
//Load track half-hour by half-hour
|
||||
const timestamp = Date.now()-hours*60*60*1000
|
||||
const loaded = (await imports.axios.get(`https://api.spotify.com/v1/me/player/recently-played?after=${timestamp}`, {headers:{
|
||||
"Content-Type":"application/json",
|
||||
Accept:"application/json",
|
||||
Authorization:`Bearer ${access}`,
|
||||
}})).data.items.map(({track, played_at}) => ({
|
||||
name:track.name,
|
||||
artist:track.artists[0].name,
|
||||
artwork:track.album.images[0].url,
|
||||
played_at:played_at ? imports.dayjs(played_at).format("[played at] HH:MM on DD/MM/YYYY") : null,
|
||||
}))
|
||||
//Ensure no duplicate are added
|
||||
for (const track of loaded) {
|
||||
if (!tracks.map(({name}) => name).includes(track.name))
|
||||
tracks.push(track)
|
||||
}
|
||||
//Early break
|
||||
if (tracks.length >= limit)
|
||||
break
|
||||
}
|
||||
}
|
||||
//Handle errors
|
||||
catch (error) {
|
||||
@@ -169,9 +185,9 @@
|
||||
try {
|
||||
console.debug(`metrics/compute/${login}/plugins > music > querying lastfm api`)
|
||||
tracks = (await imports.axios.get(`https://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks&user=${user}&api_key=${token}&limit=${limit}&format=json`, {headers:{
|
||||
"Accept":"application/json",
|
||||
"User-Agent":"lowlighter/metrics"}
|
||||
})).data.recenttracks.track.map((track) => ({
|
||||
"User-Agent":"lowlighter/metrics",
|
||||
Accept:"application/json",
|
||||
}})).data.recenttracks.track.map(track => ({
|
||||
name:track.name,
|
||||
artist:track.artist["#text"],
|
||||
artwork:track.image.reverse()[0]["#text"],
|
||||
@@ -215,11 +231,11 @@
|
||||
track.artwork = await imports.imgb64(track.artwork)
|
||||
}
|
||||
//Save results
|
||||
return {...raw, tracks}
|
||||
return {...raw, tracks, played_at}
|
||||
}
|
||||
|
||||
//Unhandled error
|
||||
throw {error:{message:`An error occured (could not retrieve tracks)`}}
|
||||
throw {error:{message:"An error occured (could not retrieve tracks)"}}
|
||||
}
|
||||
//Handle errors
|
||||
catch (error) {
|
||||
@@ -227,4 +243,4 @@
|
||||
throw error
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,6 +47,7 @@ inputs:
|
||||
description: Embed playlist url
|
||||
type: string
|
||||
default: ""
|
||||
example: https://embed.music.apple.com/--/playlist/--------/--------
|
||||
|
||||
# Number of music tracks to display
|
||||
plugin_music_limit:
|
||||
@@ -55,6 +56,8 @@ inputs:
|
||||
default: 4
|
||||
min: 1
|
||||
max: 100
|
||||
|
||||
# Display when track was last played
|
||||
plugin_music_played_at:
|
||||
description: Display when the track was played
|
||||
type: boolean
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
token: NOT_NEEDED
|
||||
plugin_music_token: MOCKED_CLIENT_ID, MOCKED_CLIENT_SECRET, MOCKED_REFRESH_TOKEN
|
||||
plugin_music: yes
|
||||
plugin_music_played_at: yes
|
||||
plugin_music_provider: spotify
|
||||
|
||||
- name: Music plugin (recent - lastfm)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, imports, data, q, account}, {enabled = false, token = null} = {}) {
|
||||
export default async function({login, imports, data, q, account}, {enabled = false, token = null} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -53,4 +53,4 @@
|
||||
}
|
||||
throw {error:{message, instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, graphql, rest, q, queries, imports, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, graphql, rest, q, queries, imports, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -12,7 +12,7 @@
|
||||
types:account === "organization" ? ["sponsorshipsAsMaintainer", "sponsorshipsAsSponsor", "thanks"] : ["followers", "following", "sponsorshipsAsMaintainer", "sponsorshipsAsSponsor", "thanks"],
|
||||
default:"followers, following",
|
||||
alias:{followed:"following", sponsors:"sponsorshipsAsMaintainer", sponsored:"sponsorshipsAsSponsor", sponsoring:"sponsorshipsAsSponsor"},
|
||||
sponsorships:{sponsorshipsAsMaintainer:"sponsorEntity", sponsorshipsAsSponsor:"sponsorable"}
|
||||
sponsorships:{sponsorshipsAsMaintainer:"sponsorEntity", sponsorshipsAsSponsor:"sponsorable"},
|
||||
}
|
||||
if (q.repo) {
|
||||
console.debug(`metrics/compute/${login}/plugins > people > switched to repository mode`)
|
||||
|
||||
@@ -34,6 +34,7 @@ inputs:
|
||||
type: array
|
||||
format: comma-separated
|
||||
default: followers, following
|
||||
example: follower, following, sponsors, sponsoring
|
||||
values:
|
||||
- followers # For user metrics
|
||||
- following # For user metrics
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, imports, q, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, imports, q, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -36,7 +36,7 @@
|
||||
}
|
||||
|
||||
//Unhandled error
|
||||
throw {error:{message:`An error occured (could not retrieve posts)`}}
|
||||
throw {error:{message:"An error occured (could not retrieve posts)"}}
|
||||
}
|
||||
//Handle errors
|
||||
catch (error) {
|
||||
@@ -44,4 +44,4 @@
|
||||
throw error
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, imports, graphql, q, queries, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, imports, graphql, q, queries, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -21,7 +21,7 @@
|
||||
for (const identifier of repositories) {
|
||||
//Querying repository project
|
||||
console.debug(`metrics/compute/${login}/plugins > projects > querying api for ${identifier}`)
|
||||
const {user, repository, id} = identifier.match(/(?<user>[-\w]+)[/](?<repository>[-\w]+)[/]projects[/](?<id>\d+)/)?.groups
|
||||
const {user, repository, id} = identifier.match(/(?<user>[-\w]+)[/](?<repository>[-\w]+)[/]projects[/](?<id>\d+)/)?.groups ?? {}
|
||||
const {[account]:{repository:{project}}} = await graphql(queries.projects.repository({user, repository, id, account}))
|
||||
//Adding it to projects list
|
||||
console.debug(`metrics/compute/${login}/plugins > projects > registering ${identifier}`)
|
||||
|
||||
@@ -27,6 +27,7 @@ inputs:
|
||||
plugin_projects_repositories:
|
||||
description: List of repository project identifiers to disaplay
|
||||
type: array
|
||||
example: username/repo/projects/1, username/repo/projects/2, ...
|
||||
format:
|
||||
- comma-separated
|
||||
- /(?<user>[-a-z0-9]+)[/](?<repo>[-a-z0-9]+)[/]projects[/](?<id>[0-9]+)/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, graphql, data, imports, q, queries, account}, {enabled = false} = {}) {
|
||||
export default async function({login, graphql, data, imports, q, queries, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -41,7 +41,7 @@
|
||||
increments.max = Math.max(...Object.values(increments.dates))
|
||||
|
||||
//Compute total stargazers
|
||||
let stargazers = data.computed.repositories.stargazers
|
||||
let {stargazers} = data.computed.repositories
|
||||
const total = {dates:{...increments.dates}, max:NaN, min:NaN}
|
||||
{
|
||||
const dates = Object.keys(total.dates)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, graphql, q, queries, imports, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, graphql, q, queries, imports, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, data, imports, q, account}, {enabled = false} = {}) {
|
||||
export default async function({login, data, imports, q, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -90,4 +90,4 @@
|
||||
throw error
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, imports, data, rest, q, account}, {enabled = false} = {}) {
|
||||
export default async function({login, imports, data, rest, q, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -15,7 +15,7 @@
|
||||
//Get views stats from repositories
|
||||
console.debug(`metrics/compute/${login}/plugins > traffic > querying api`)
|
||||
const views = {count:0, uniques:0}
|
||||
const response = await Promise.all(repositories.map(async ({repo, owner}) => await rest.repos.getViews({owner, repo})))
|
||||
const response = await Promise.all(repositories.map(({repo, owner}) => rest.repos.getViews({owner, repo})))
|
||||
|
||||
//Compute views
|
||||
console.debug(`metrics/compute/${login}/plugins > traffic > computing stats`)
|
||||
@@ -31,4 +31,4 @@
|
||||
message = "Insufficient token rights"
|
||||
throw {error:{message, instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, imports, data, q, account}, {enabled = false, token = ""} = {}) {
|
||||
export default async function({login, imports, data, q, account}, {enabled = false, token = ""} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -35,18 +35,17 @@
|
||||
tweet.mentions = tweet.entities?.mentions.map(({username}) => username) ?? []
|
||||
//Format text
|
||||
console.debug(`metrics/compute/${login}/plugins > tweets > formatting tweet ${tweet.id}`)
|
||||
tweet.text = imports.htmlescape(
|
||||
tweet.text = imports.htmlescape( //eslint-disable-line function-paren-newline
|
||||
//Escape tags
|
||||
imports.htmlescape(tweet.text, {"<":true, ">":true})
|
||||
//Mentions
|
||||
.replace(new RegExp(`@(${tweet.mentions.join("|")})`, "gi"), ` <span class="mention">@$1</span> `)
|
||||
.replace(new RegExp(`@(${tweet.mentions.join("|")})`, "gi"), ' <span class="mention">@$1</span> ')
|
||||
//Hashtags (this regex comes from the twitter source code)
|
||||
.replace(/(?<!&)[#|#]([a-z0-9_\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u00ff\u0100-\u024f\u0253-\u0254\u0256-\u0257\u0300-\u036f\u1e00-\u1eff\u0400-\u04ff\u0500-\u0527\u2de0-\u2dff\ua640-\ua69f\u0591-\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05d0-\u05ea\u05f0-\u05f4\ufb12-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4f\u0610-\u061a\u0620-\u065f\u066e-\u06d3\u06d5-\u06dc\u06de-\u06e8\u06ea-\u06ef\u06fa-\u06fc\u0750-\u077f\u08a2-\u08ac\u08e4-\u08fe\ufb50-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\u200c-\u200c\u0e01-\u0e3a\u0e40-\u0e4e\u1100-\u11ff\u3130-\u3185\ua960-\ua97f\uac00-\ud7af\ud7b0-\ud7ff\uffa1-\uffdc\u30a1-\u30fa\u30fc-\u30fe\uff66-\uff9f\uff10-\uff19\uff21-\uff3a\uff41-\uff5a\u3041-\u3096\u3099-\u309e\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df\u2a700-\u2b73f\u2b740-\u2b81f\u2f800-\u2fa1f]*[a-z_\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u00ff\u0100-\u024f\u0253-\u0254\u0256-\u0257\u0300-\u036f\u1e00-\u1eff\u0400-\u04ff\u0500-\u0527\u2de0-\u2dff\ua640-\ua69f\u0591-\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05d0-\u05ea\u05f0-\u05f4\ufb12-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4f\u0610-\u061a\u0620-\u065f\u066e-\u06d3\u06d5-\u06dc\u06de-\u06e8\u06ea-\u06ef\u06fa-\u06fc\u0750-\u077f\u08a2-\u08ac\u08e4-\u08fe\ufb50-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\u200c-\u200c\u0e01-\u0e3a\u0e40-\u0e4e\u1100-\u11ff\u3130-\u3185\ua960-\ua97f\uac00-\ud7af\ud7b0-\ud7ff\uffa1-\uffdc\u30a1-\u30fa\u30fc-\u30fe\uff66-\uff9f\uff10-\uff19\uff21-\uff3a\uff41-\uff5a\u3041-\u3096\u3099-\u309e\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df\u2a700-\u2b73f\u2b740-\u2b81f\u2f800-\u2fa1f][a-z0-9_\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u00ff\u0100-\u024f\u0253-\u0254\u0256-\u0257\u0300-\u036f\u1e00-\u1eff\u0400-\u04ff\u0500-\u0527\u2de0-\u2dff\ua640-\ua69f\u0591-\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05d0-\u05ea\u05f0-\u05f4\ufb12-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4f\u0610-\u061a\u0620-\u065f\u066e-\u06d3\u06d5-\u06dc\u06de-\u06e8\u06ea-\u06ef\u06fa-\u06fc\u0750-\u077f\u08a2-\u08ac\u08e4-\u08fe\ufb50-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\u200c-\u200c\u0e01-\u0e3a\u0e40-\u0e4e\u1100-\u11ff\u3130-\u3185\ua960-\ua97f\uac00-\ud7af\ud7b0-\ud7ff\uffa1-\uffdc\u30a1-\u30fa\u30fc-\u30fe\uff66-\uff9f\uff10-\uff19\uff21-\uff3a\uff41-\uff5a\u3041-\u3096\u3099-\u309e\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df\u2a700-\u2b73f\u2b740-\u2b81f\u2f800-\u2fa1f]*)/gi, ` <span class="hashtag">#$1</span> `)
|
||||
.replace(/(?<!&)[#|#]([a-z0-9_\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u00ff\u0100-\u024f\u0253-\u0254\u0256-\u0257\u0300-\u036f\u1e00-\u1eff\u0400-\u04ff\u0500-\u0527\u2de0-\u2dff\ua640-\ua69f\u0591-\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05d0-\u05ea\u05f0-\u05f4\ufb12-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4f\u0610-\u061a\u0620-\u065f\u066e-\u06d3\u06d5-\u06dc\u06de-\u06e8\u06ea-\u06ef\u06fa-\u06fc\u0750-\u077f\u08a2-\u08ac\u08e4-\u08fe\ufb50-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\u200c-\u200c\u0e01-\u0e3a\u0e40-\u0e4e\u1100-\u11ff\u3130-\u3185\ua960-\ua97f\uac00-\ud7af\ud7b0-\ud7ff\uffa1-\uffdc\u30a1-\u30fa\u30fc-\u30fe\uff66-\uff9f\uff10-\uff19\uff21-\uff3a\uff41-\uff5a\u3041-\u3096\u3099-\u309e\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df\u2a700-\u2b73f\u2b740-\u2b81f\u2f800-\u2fa1f]*[a-z_\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u00ff\u0100-\u024f\u0253-\u0254\u0256-\u0257\u0300-\u036f\u1e00-\u1eff\u0400-\u04ff\u0500-\u0527\u2de0-\u2dff\ua640-\ua69f\u0591-\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05d0-\u05ea\u05f0-\u05f4\ufb12-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4f\u0610-\u061a\u0620-\u065f\u066e-\u06d3\u06d5-\u06dc\u06de-\u06e8\u06ea-\u06ef\u06fa-\u06fc\u0750-\u077f\u08a2-\u08ac\u08e4-\u08fe\ufb50-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\u200c-\u200c\u0e01-\u0e3a\u0e40-\u0e4e\u1100-\u11ff\u3130-\u3185\ua960-\ua97f\uac00-\ud7af\ud7b0-\ud7ff\uffa1-\uffdc\u30a1-\u30fa\u30fc-\u30fe\uff66-\uff9f\uff10-\uff19\uff21-\uff3a\uff41-\uff5a\u3041-\u3096\u3099-\u309e\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df\u2a700-\u2b73f\u2b740-\u2b81f\u2f800-\u2fa1f][a-z0-9_\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u00ff\u0100-\u024f\u0253-\u0254\u0256-\u0257\u0300-\u036f\u1e00-\u1eff\u0400-\u04ff\u0500-\u0527\u2de0-\u2dff\ua640-\ua69f\u0591-\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05d0-\u05ea\u05f0-\u05f4\ufb12-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4f\u0610-\u061a\u0620-\u065f\u066e-\u06d3\u06d5-\u06dc\u06de-\u06e8\u06ea-\u06ef\u06fa-\u06fc\u0750-\u077f\u08a2-\u08ac\u08e4-\u08fe\ufb50-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\u200c-\u200c\u0e01-\u0e3a\u0e40-\u0e4e\u1100-\u11ff\u3130-\u3185\ua960-\ua97f\uac00-\ud7af\ud7b0-\ud7ff\uffa1-\uffdc\u30a1-\u30fa\u30fc-\u30fe\uff66-\uff9f\uff10-\uff19\uff21-\uff3a\uff41-\uff5a\u3041-\u3096\u3099-\u309e\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df\u2a700-\u2b73f\u2b740-\u2b81f\u2f800-\u2fa1f]*)/gi, ' <span class="hashtag">#$1</span> ') //eslint-disable-line no-misleading-character-class, prefer-named-capture-group
|
||||
//Line breaks
|
||||
.replace(/\n/g, "<br/>")
|
||||
//Links
|
||||
.replace(/https?:[/][/](t.co[/]\w+)/g, ` <span class="link">$1</span> `)
|
||||
, {"&":true})
|
||||
.replace(/https?:[/][/](?<link>t.co[/]\w+)/g, ' <span class="link">$<link></span> '), {"&":true})
|
||||
}))
|
||||
|
||||
//Result
|
||||
@@ -63,4 +62,4 @@
|
||||
}
|
||||
throw {error:{message, instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Setup
|
||||
export default async function ({login, q, imports, data, account}, {enabled = false, token} = {}) {
|
||||
export default async function({login, q, imports, data, account}, {enabled = false, token} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
@@ -12,8 +12,7 @@
|
||||
limit = void(limit)
|
||||
const range = {"7":"last_7_days", "30":"last_30_days", "180":"last_6_months", "365":"last_year"}[days] ?? "last_7_days"
|
||||
|
||||
//Querying api and format result
|
||||
//https://wakatime.com/developers#stats
|
||||
//Querying api and format result (https://wakatime.com/developers#stats)
|
||||
console.debug(`metrics/compute/${login}/plugins > wakatime > querying api`)
|
||||
const {data:{data:stats}} = await imports.axios.get(`https://wakatime.com/api/v1/users/current/stats/${range}?api_key=${token}`)
|
||||
const result = {
|
||||
@@ -42,4 +41,4 @@
|
||||
}
|
||||
throw {error:{message, instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,12 +22,12 @@
|
||||
<div class="tracklist">
|
||||
<% for (const {name = "", artist = "", artwork = "", played_at = ""} of plugins.music.tracks) { %>
|
||||
<div class="track">
|
||||
<img src="data:image/png;base64,<%= artwork %>" width="<%= !!played_at.length ? 48 : 32 %>" height="<%= !!played_at.length ? 48 : 32 %>" alt=""/>
|
||||
<img src="data:image/png;base64,<%= artwork %>" width="<%= plugins.music.played_at ? 48 : 32 %>" height="<%= plugins.music.played_at ? 48 : 32 %>" alt=""/>
|
||||
<div class="infos">
|
||||
<div class="name"><b><%= name %><b></div>
|
||||
<div class="name"><%= name %></div>
|
||||
<div class="artist"><%= artist %></div>
|
||||
<% if (played_at.length) { %>
|
||||
<div class="played_at"><%= played_at %></div>
|
||||
<% if (plugins.music.played_at) { %>
|
||||
<div class="played-at"><%= played_at %></div>
|
||||
<% } %>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -272,8 +272,9 @@
|
||||
.track .name {
|
||||
font-size: 14px;
|
||||
line-height: 14px;
|
||||
font-weight: 600;
|
||||
}
|
||||
.track .artist {
|
||||
.track .artist, .track .played-at {
|
||||
font-size: 12px;
|
||||
color: #666666;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/** Template processor */
|
||||
export default async function ({login, q}, {conf, data, rest, graphql, plugins, queries}, {s, pending, imports}) {
|
||||
/**Template processor */
|
||||
export default async function(_, __, {imports}) {
|
||||
//Core
|
||||
await imports.plugins.core(...arguments)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/** Template processor */
|
||||
export default async function ({login, q}, {conf, data, rest, graphql, plugins, queries, account}, {s, pending, imports}) {
|
||||
/**Template processor */
|
||||
export default async function({login, q}, {data, rest, graphql, queries, account}, {pending, imports}) {
|
||||
//Check arguments
|
||||
const {repo} = q
|
||||
if (!repo) {
|
||||
console.debug(`metrics/compute/${login}/${repo} > error, repo was undefined`)
|
||||
data.errors.push({error:{message:`You must pass a "repo" argument to use this template`}})
|
||||
return await imports.plugins.core(...arguments)
|
||||
data.errors.push({error:{message:"You must pass a \"repo\" argument to use this template"}})
|
||||
return imports.plugins.core(...arguments)
|
||||
}
|
||||
console.debug(`metrics/compute/${login}/${repo} > switching to mode ${account}`)
|
||||
|
||||
@@ -73,4 +73,4 @@
|
||||
//Reformat projects names
|
||||
if (data.plugins.projects)
|
||||
data.plugins.projects.list?.map(project => project.name = project.name.replace(`(${login}/${repo})`, "").trim())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/** Template processor */
|
||||
export default async function ({login, q}, {conf, data, rest, graphql, plugins, queries}, {s, pending, imports}) {
|
||||
/**Template processor */
|
||||
export default async function({q}, _, {imports}) {
|
||||
//Core
|
||||
await imports.plugins.core(...arguments)
|
||||
//Disable optimization to keep white-spaces
|
||||
q.raw = true
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user