chore: code formatting

This commit is contained in:
github-actions[bot]
2022-01-16 01:41:24 +00:00
parent 4fa62aad67
commit d3228a126a
94 changed files with 3875 additions and 3822 deletions

View File

@@ -1,12 +1,12 @@
//Imports //Imports
import ejs from "ejs"
import fs from "fs/promises" import fs from "fs/promises"
import ejs from "ejs"
import fss from "fs" import fss from "fs"
import paths from "path"
import url from "url"
import sgit from "simple-git"
import metadata from "../../source/app/metrics/metadata.mjs"
import yaml from "js-yaml" import yaml from "js-yaml"
import paths from "path"
import sgit from "simple-git"
import url from "url"
import metadata from "../../source/app/metrics/metadata.mjs"
//Mode //Mode
const [mode = "dryrun"] = process.argv.slice(2) const [mode = "dryrun"] = process.argv.slice(2)
@@ -25,34 +25,36 @@ const __test_secrets = paths.join(paths.join(__metrics, "tests/secrets.json"))
//Git setup //Git setup
const git = sgit(__metrics) const git = sgit(__metrics)
const staged = new Set() const staged = new Set()
const secrets = Object.assign(JSON.parse(`${await fs.readFile(__test_secrets)}`), {$regex:/\$\{\{\s*secrets\.(?<secret>\w+)\s*\}\}/}) const secrets = Object.assign(JSON.parse(`${await fs.readFile(__test_secrets)}`), { $regex: /\$\{\{\s*secrets\.(?<secret>\w+)\s*\}\}/ })
const {plugins, templates} = await metadata({log:false, diff:true}) const { plugins, templates } = await metadata({ log: false, diff: true })
const workflow = [] const workflow = []
//Config and general documentation auto-generation //Config and general documentation auto-generation
for (const step of ["config", "documentation"]) { for (const step of ["config", "documentation"]) {
switch (step) { switch (step) {
case "config": case "config":
await update({source:paths.join(__action, "action.yml"), output:"action.yml"}) await update({ source: paths.join(__action, "action.yml"), output: "action.yml" })
await update({source:paths.join(__web, "settings.example.json"), output:"settings.example.json"}) await update({ source: paths.join(__web, "settings.example.json"), output: "settings.example.json" })
break break
case "documentation": case "documentation":
await update({source:paths.join(__readme, "README.md"), output:"README.md", options:{root:__readme}}) await update({ source: paths.join(__readme, "README.md"), output: "README.md", options: { root: __readme } })
await update({source:paths.join(__readme, "partials/documentation/plugins.md"), output:"source/plugins/README.md"}) await update({ source: paths.join(__readme, "partials/documentation/plugins.md"), output: "source/plugins/README.md" })
await update({source:paths.join(__readme, "partials/documentation/templates.md"), output:"source/templates/README.md"}) await update({ source: paths.join(__readme, "partials/documentation/templates.md"), output: "source/templates/README.md" })
break break
} }
} }
//Plugins //Plugins
for (const id of Object.keys(plugins)) { for (const id of Object.keys(plugins)) {
const {examples, options, readme, tests, header} = await plugin(id) const { examples, options, readme, tests, header } = await plugin(id)
//Readme //Readme
await fs.writeFile(readme.path, readme.content await fs.writeFile(
.replace(/(<!--header-->)[\s\S]*(<!--\/header-->)/g, `$1\n${header}\n$2`) readme.path,
.replace(/(<!--examples-->)[\s\S]*(<!--\/examples-->)/g, `$1\n${examples.map(({test, prod, ...step}) => ["```yaml", yaml.dump(step), "```"].join("\n")).join("\n")}\n$2`) readme.content
.replace(/(<!--options-->)[\s\S]*(<!--\/options-->)/g, `$1\n${options}\n$2`) .replace(/(<!--header-->)[\s\S]*(<!--\/header-->)/g, `$1\n${header}\n$2`)
.replace(/(<!--examples-->)[\s\S]*(<!--\/examples-->)/g, `$1\n${examples.map(({ test, prod, ...step }) => ["```yaml", yaml.dump(step), "```"].join("\n")).join("\n")}\n$2`)
.replace(/(<!--options-->)[\s\S]*(<!--\/options-->)/g, `$1\n${options}\n$2`),
) )
console.log(`Generating source/plugins/${id}/README.md`) console.log(`Generating source/plugins/${id}/README.md`)
@@ -64,12 +66,14 @@ for (const id of Object.keys(plugins)) {
//Templates //Templates
for (const id of Object.keys(templates)) { for (const id of Object.keys(templates)) {
const {examples, readme, tests, header} = await template(id) const { examples, readme, tests, header } = await template(id)
//Readme //Readme
await fs.writeFile(readme.path, readme.content await fs.writeFile(
.replace(/(<!--header-->)[\s\S]*(<!--\/header-->)/g, `$1\n${header}\n$2`) readme.path,
.replace(/(<!--examples-->)[\s\S]*(<!--\/examples-->)/g, `$1\n${examples.map(({test, prod, ...step}) => ["```yaml", yaml.dump(step), "```"].join("\n")).join("\n")}\n$2`) readme.content
.replace(/(<!--header-->)[\s\S]*(<!--\/header-->)/g, `$1\n${header}\n$2`)
.replace(/(<!--examples-->)[\s\S]*(<!--\/examples-->)/g, `$1\n${examples.map(({ test, prod, ...step }) => ["```yaml", yaml.dump(step), "```"].join("\n")).join("\n")}\n$2`),
) )
console.log(`Generating source/templates/${id}/README.md`) console.log(`Generating source/templates/${id}/README.md`)
@@ -80,7 +84,7 @@ for (const id of Object.keys(templates)) {
} }
//Example workflows //Example workflows
await update({source:paths.join(__metrics, ".github/scripts/files/examples.yml"), output:".github/workflows/examples.yml", context:{steps:yaml.dump(workflow)}}) await update({ source: paths.join(__metrics, ".github/scripts/files/examples.yml"), output: ".github/workflows/examples.yml", context: { steps: yaml.dump(workflow) } })
//Commit and push //Commit and push
if (mode === "publish") { if (mode === "publish") {
@@ -98,10 +102,10 @@ console.log("Success!")
//================================================================================== //==================================================================================
//Update generated files //Update generated files
async function update({source, output, context = {}, options = {}}) { async function update({ source, output, context = {}, options = {} }) {
console.log(`Generating ${output}`) console.log(`Generating ${output}`)
const {plugins, templates, packaged, descriptor} = await metadata({log:false}) const { plugins, templates, packaged, descriptor } = await metadata({ log: false })
const content = await ejs.renderFile(source, {plugins, templates, packaged, descriptor, ...context}, {async:true, ...options}) const content = await ejs.renderFile(source, { plugins, templates, packaged, descriptor, ...context }, { async: true, ...options })
const file = paths.join(__metrics, output) const file = paths.join(__metrics, output)
await fs.writeFile(file, content) await fs.writeFile(file, content)
staged.add(file) staged.add(file)
@@ -114,16 +118,16 @@ async function plugin(id) {
const examples = paths.join(path, "examples.yml") const examples = paths.join(path, "examples.yml")
const tests = paths.join(__test_cases, `${id}.plugin.yml`) const tests = paths.join(__test_cases, `${id}.plugin.yml`)
return { return {
readme:{ readme: {
path:readme, path: readme,
content:`${await fs.readFile(readme)}` content: `${await fs.readFile(readme)}`,
}, },
tests:{ tests: {
path:tests path: tests,
}, },
examples:fss.existsSync(examples) ? yaml.load(await fs.readFile(examples), "utf8") ?? [] : [], examples: fss.existsSync(examples) ? yaml.load(await fs.readFile(examples), "utf8") ?? [] : [],
options:plugins[id].readme.table, options: plugins[id].readme.table,
header:plugins[id].readme.header header: plugins[id].readme.header,
} }
} }
@@ -134,44 +138,44 @@ async function template(id) {
const examples = paths.join(path, "examples.yml") const examples = paths.join(path, "examples.yml")
const tests = paths.join(__test_cases, `${id}.template.yml`) const tests = paths.join(__test_cases, `${id}.template.yml`)
return { return {
readme:{ readme: {
path:readme, path: readme,
content:`${await fs.readFile(readme)}` content: `${await fs.readFile(readme)}`,
}, },
tests:{ tests: {
path:tests path: tests,
}, },
examples:fss.existsSync(examples) ? yaml.load(await fs.readFile(examples), "utf8") ?? [] : [], examples: fss.existsSync(examples) ? yaml.load(await fs.readFile(examples), "utf8") ?? [] : [],
header:templates[id].readme.header header: templates[id].readme.header,
} }
} }
//Testcase generator //Testcase generator
function testcase(name, env, args) { function testcase(name, env, args) {
const {prod = {}, test = {}, ...step} = JSON.parse(JSON.stringify(args)) const { prod = {}, test = {}, ...step } = JSON.parse(JSON.stringify(args))
const context = {prod, test}[env] ?? {} const context = { prod, test }[env] ?? {}
if (context.skip) if (context.skip)
return null return null
Object.assign(step.with, context.with ?? {}) Object.assign(step.with, context.with ?? {})
delete context.with delete context.with
const result = {...step, ...context, name:`${name} - ${step.name ?? "(unnamed)"}`} const result = { ...step, ...context, name: `${name} - ${step.name ?? "(unnamed)"}` }
for (const [k, v] of Object.entries(result.with)) { for (const [k, v] of Object.entries(result.with)) {
if ((env === "test")&&(secrets.$regex.test(v))) if ((env === "test") && (secrets.$regex.test(v)))
result.with[k] = v.replace(secrets.$regex, secrets[v.match(secrets.$regex)?.groups?.secret]) result.with[k] = v.replace(secrets.$regex, secrets[v.match(secrets.$regex)?.groups?.secret])
} }
if (env === "prod") { if (env === "prod") {
result.if = "${{ success() || failure() }}" result.if = "${{ success() || failure() }}"
result.uses = "lowlighter/metrics@master" result.uses = "lowlighter/metrics@master"
Object.assign(result.with, {plugins_errors_fatal:"yes", output_action:"none", delay:120}) Object.assign(result.with, { plugins_errors_fatal: "yes", output_action: "none", delay: 120 })
} }
if (env === "test") { if (env === "test") {
if (!result.with.base) if (!result.with.base)
delete result.with.base delete result.with.base
delete result.with.filename delete result.with.filename
Object.assign(result.with, {use_mocked_data:"yes", verify:"yes"}) Object.assign(result.with, { use_mocked_data: "yes", verify: "yes" })
} }
return result return result

View File

@@ -3,19 +3,19 @@ import puppeteer from "puppeteer"
//Setup browser //Setup browser
const browser = await puppeteer.launch({ const browser = await puppeteer.launch({
headless:true, headless: true,
executablePath:process.env.PUPPETEER_BROWSER_PATH, executablePath: process.env.PUPPETEER_BROWSER_PATH,
args:["--no-sandbox", "--disable-extensions", "--disable-setuid-sandbox", "--disable-dev-shm-usage"], args: ["--no-sandbox", "--disable-extensions", "--disable-setuid-sandbox", "--disable-dev-shm-usage"],
ignoreDefaultArgs:["--disable-extensions"], ignoreDefaultArgs: ["--disable-extensions"],
}) })
const page = await browser.newPage() const page = await browser.newPage()
//Select markdown example and take screenshoot //Select markdown example and take screenshoot
await page.setViewport({width:600, height:600}) await page.setViewport({ width: 600, height: 600 })
await page.goto("https://github.com/lowlighter/metrics/blob/examples/metrics.markdown.md") await page.goto("https://github.com/lowlighter/metrics/blob/examples/metrics.markdown.md")
const clip = await page.evaluate(() => { const clip = await page.evaluate(() => {
const {x, y, width, height} = document.querySelector("#readme").getBoundingClientRect() const { x, y, width, height } = document.querySelector("#readme").getBoundingClientRect()
return {x, y, width, height} return { x, y, width, height }
}) })
await page.screenshot({type:"png", path:"metrics.markdown.png", clip, omitBackground:true}) await page.screenshot({ type: "png", path: "metrics.markdown.png", clip, omitBackground: true })
await browser.close() await browser.close()

View File

@@ -19,27 +19,28 @@ const __preview_templates_ = paths.join(__preview, ".templates_")
const __preview_about = paths.join(__preview, "about/.statics") const __preview_about = paths.join(__preview, "about/.statics")
//Extract from web server //Extract from web server
const {conf, Templates} = await setup({nosettings:true, log:false}) const { conf, Templates } = await setup({ nosettings: true, log: false })
const templates = Object.entries(Templates).map(([name]) => ({name, enabled:true})) const templates = Object.entries(Templates).map(([name]) => ({ name, enabled: true }))
const metadata = Object.fromEntries(Object.entries(conf.metadata.plugins) const metadata = Object.fromEntries(
.map(([key, value]) => [key, Object.fromEntries(Object.entries(value).filter(([key]) => ["name", "icon", "category", "web", "supports"].includes(key)))]) Object.entries(conf.metadata.plugins)
.map(([key, value]) => [key, key === "core" ? {...value, web:Object.fromEntries(Object.entries(value.web).filter(([key]) => /^config[.]/.test(key)).map(([key, value]) => [key.replace(/^config[.]/, ""), value]))} : value])) .map(([key, value]) => [key, Object.fromEntries(Object.entries(value).filter(([key]) => ["name", "icon", "category", "web", "supports"].includes(key)))])
.map(([key, value]) => [key, key === "core" ? { ...value, web: Object.fromEntries(Object.entries(value.web).filter(([key]) => /^config[.]/.test(key)).map(([key, value]) => [key.replace(/^config[.]/, ""), value])) } : value]),
)
//Directories //Directories
await fs.mkdir(__preview, {recursive:true}) await fs.mkdir(__preview, { recursive: true })
await fs.mkdir(__preview_js, {recursive:true}) await fs.mkdir(__preview_js, { recursive: true })
await fs.mkdir(__preview_css, {recursive:true}) await fs.mkdir(__preview_css, { recursive: true })
await fs.mkdir(__preview_templates, {recursive:true}) await fs.mkdir(__preview_templates, { recursive: true })
await fs.mkdir(__preview_templates_, {recursive:true}) await fs.mkdir(__preview_templates_, { recursive: true })
await fs.mkdir(__preview_about, {recursive:true}) await fs.mkdir(__preview_about, { recursive: true })
//Web //Web
fs.copyFile(paths.join(__web, "index.html"), paths.join(__preview, "index.html")) fs.copyFile(paths.join(__web, "index.html"), paths.join(__preview, "index.html"))
fs.copyFile(paths.join(__web, "favicon.png"), paths.join(__preview, ".favicon.png")) fs.copyFile(paths.join(__web, "favicon.png"), paths.join(__preview, ".favicon.png"))
fs.copyFile(paths.join(__web, "opengraph.png"), paths.join(__preview, ".opengraph.png")) fs.copyFile(paths.join(__web, "opengraph.png"), paths.join(__preview, ".opengraph.png"))
//Plugins and templates //Plugins and templates
fs.writeFile(paths.join(__preview, ".plugins"), JSON.stringify(Object.entries(metadata).filter(([_name, {category}]) => category !== "core").map(([name]) => ({name, enabled:false})))) fs.writeFile(paths.join(__preview, ".plugins"), JSON.stringify(Object.entries(metadata).filter(([_name, { category }]) => category !== "core").map(([name]) => ({ name, enabled: false }))))
fs.writeFile(paths.join(__preview, ".plugins.base"), JSON.stringify(conf.settings.plugins.base.parts)) fs.writeFile(paths.join(__preview, ".plugins.base"), JSON.stringify(conf.settings.plugins.base.parts))
fs.writeFile(paths.join(__preview, ".plugins.metadata"), JSON.stringify(metadata)) fs.writeFile(paths.join(__preview, ".plugins.metadata"), JSON.stringify(metadata))
fs.writeFile(paths.join(__preview, ".templates__"), JSON.stringify(templates)) fs.writeFile(paths.join(__preview, ".templates__"), JSON.stringify(templates))
@@ -47,7 +48,7 @@ for (const template in conf.templates) {
fs.writeFile(paths.join(__preview_templates_, template), JSON.stringify(conf.templates[template])) fs.writeFile(paths.join(__preview_templates_, template), JSON.stringify(conf.templates[template]))
const __partials = paths.join(__templates, template, "partials") const __partials = paths.join(__templates, template, "partials")
const __preview_partials = paths.join(__preview_templates, template, "partials") const __preview_partials = paths.join(__preview_templates, template, "partials")
await fs.mkdir(__preview_partials, {recursive:true}) await fs.mkdir(__preview_partials, { recursive: true })
for (const file of await fs.readdir(__partials)) for (const file of await fs.readdir(__partials))
fs.copyFile(paths.join(__partials, file), paths.join(__preview_partials, file)) fs.copyFile(paths.join(__partials, file), paths.join(__preview_partials, file))
} }
@@ -71,9 +72,10 @@ fs.copyFile(paths.join(__node_modules, "prismjs/components/prism-markdown.min.js
fs.copyFile(paths.join(__node_modules, "clipboard/dist/clipboard.min.js"), paths.join(__preview_js, "clipboard.min.js")) fs.copyFile(paths.join(__node_modules, "clipboard/dist/clipboard.min.js"), paths.join(__preview_js, "clipboard.min.js"))
//Meta //Meta
fs.writeFile(paths.join(__preview, ".version"), JSON.stringify(`${conf.package.version}-preview`)) fs.writeFile(paths.join(__preview, ".version"), JSON.stringify(`${conf.package.version}-preview`))
fs.writeFile(paths.join(__preview, ".hosted"), JSON.stringify({by:"metrics", link:"https://github.com/lowlighter/metrics"})) fs.writeFile(paths.join(__preview, ".hosted"), JSON.stringify({ by: "metrics", link: "https://github.com/lowlighter/metrics" }))
//About //About
fs.copyFile(paths.join(__web, "about", "index.html"), paths.join(__preview, "about", "index.html")) fs.copyFile(paths.join(__web, "about", "index.html"), paths.join(__preview, "about", "index.html"))
for (const file of await fs.readdir(__web_about)) for (const file of await fs.readdir(__web_about)) {
if (file !== ".statics") if (file !== ".statics")
fs.copyFile(paths.join(__web_about, file), paths.join(__preview_about, file)) fs.copyFile(paths.join(__web_about, file), paths.join(__preview_about, file))
}

View File

@@ -1,8 +1,8 @@
//Imports //Imports
import github from "@actions/github" import github from "@actions/github"
import paths from "path" import paths from "path"
import url from "url"
import sgit from "simple-git" import sgit from "simple-git"
import url from "url"
//Git setup //Git setup
const __metrics = paths.join(paths.dirname(url.fileURLToPath(import.meta.url)), "../..") const __metrics = paths.join(paths.dirname(url.fileURLToPath(import.meta.url)), "../..")
@@ -18,7 +18,7 @@ const repository = process.env.GITHUB_REPOSITORY.match(/^(?<owner>[\s\S]+)[/](?<
const version = process.env.GITHUB_COMMIT_MESSAGE.match(/(?<version>v\d+[.]\d+)/)?.groups?.version ?? null const version = process.env.GITHUB_COMMIT_MESSAGE.match(/(?<version>v\d+[.]\d+)/)?.groups?.version ?? null
//Check arguments //Check arguments
if ((!repository)||(!repository.name)||(!repository.owner)) if ((!repository) || (!repository.name) || (!repository.owner))
throw new Error(`Could not parse repository "${process.env.GITHUB_REPOSITORY}"`) throw new Error(`Could not parse repository "${process.env.GITHUB_REPOSITORY}"`)
console.log(`Repository: ${repository.owner}/${repository.name}`) console.log(`Repository: ${repository.owner}/${repository.name}`)
if (!version) if (!version)
@@ -26,8 +26,8 @@ if (!version)
console.log(`Version: ${version}`) console.log(`Version: ${version}`)
//Load related pr //Load related pr
const {data:{items:prs}} = await rest.search.issuesAndPullRequests({ const { data: { items: prs } } = await rest.search.issuesAndPullRequests({
q:`repo:${repository.owner}/${repository.name} is:pr is:merged author:${maintainer} assignee:${maintainer} Release ${version} in:title` q: `repo:${repository.owner}/${repository.name} is:pr is:merged author:${maintainer} assignee:${maintainer} Release ${version} in:title`,
}) })
//Ensure that there is exactly one pr matching //Ensure that there is exactly one pr matching
@@ -40,17 +40,17 @@ console.log(`Using pr#${patchnote.number}: ${patchnote.title}`)
//Check whether release already exists //Check whether release already exists
try { try {
const {data:{id}} = await rest.repos.getReleaseByTag({owner:repository.owner, repo:repository.name, tag:version}) const { data: { id } } = await rest.repos.getReleaseByTag({ owner: repository.owner, repo: repository.name, tag: version })
console.log(`Release ${version} already exists (#${id}), will replace it`) console.log(`Release ${version} already exists (#${id}), will replace it`)
await rest.repos.deleteRelease({owner:repository.owner, repo:repository.name, release_id:id}) await rest.repos.deleteRelease({ owner: repository.owner, repo: repository.name, release_id: id })
console.log(`Deleting tag ${version}`) console.log(`Deleting tag ${version}`)
await git.push(["--delete", "origin", version]) await git.push(["--delete", "origin", version])
await new Promise(solve => setTimeout(solve, 15*1000)) await new Promise(solve => setTimeout(solve, 15 * 1000))
} }
catch { catch {
console.log(`Release ${version} does not exists yet, will create it`) console.log(`Release ${version} does not exists yet, will create it`)
} }
//Publish release //Publish release
await rest.repos.createRelease({owner:repository.owner, repo:repository.name, tag_name:version, name:`Version ${version.replace(/^v/g, "")}`, body:patchnote.body}) await rest.repos.createRelease({ owner: repository.owner, repo: repository.name, tag_name: version, name: `Version ${version.replace(/^v/g, "")}`, body: patchnote.body })
console.log(`Successfully published`) console.log(`Successfully published`)

View File

@@ -3,12 +3,12 @@ import core from "@actions/core"
import github from "@actions/github" import github from "@actions/github"
import octokit from "@octokit/graphql" import octokit from "@octokit/graphql"
import fs from "fs/promises" import fs from "fs/promises"
import processes from "child_process"
import paths from "path" import paths from "path"
import sgit from "simple-git" import sgit from "simple-git"
import processes from "child_process" import mocks from "../../../tests/mocks/index.mjs"
import metrics from "../metrics/index.mjs" import metrics from "../metrics/index.mjs"
import setup from "../metrics/setup.mjs" import setup from "../metrics/setup.mjs"
import mocks from "../../../tests/mocks/index.mjs"
process.on("unhandledRejection", error => { process.on("unhandledRejection", error => {
throw error throw error
}) })
@@ -278,8 +278,8 @@ async function retry(func, {retries = 1, delay = 0} = {}) {
try { try {
await new Promise(async (solve, reject) => { await new Promise(async (solve, reject) => {
let stdout = "" let stdout = ""
setTimeout(() => reject("Timeout while waiting for Insights webserver"), 5*60*1000) setTimeout(() => reject("Timeout while waiting for Insights webserver"), 5 * 60 * 1000)
const web = await processes.spawn("node", ["/metrics/source/app/web/index.mjs"], {env:{...process.env, NO_SETTINGS: true }}) const web = await processes.spawn("node", ["/metrics/source/app/web/index.mjs"], {env:{...process.env, NO_SETTINGS:true}})
web.stdout.on("data", data => (console.debug(`web > ${data}`), stdout += data, /Server ready !/.test(stdout) ? solve() : null)) web.stdout.on("data", data => (console.debug(`web > ${data}`), stdout += data, /Server ready !/.test(stdout) ? solve() : null))
web.stderr.on("data", data => console.debug(`web > ${data}`)) web.stderr.on("data", data => console.debug(`web > ${data}`))
}) })
@@ -339,7 +339,7 @@ async function retry(func, {retries = 1, delay = 0} = {}) {
info.break() info.break()
info.section("Saving") info.section("Saving")
info("Output condition", _output_condition) info("Output condition", _output_condition)
if ((_output_condition === "data-changed")&&((committer.commit) || (committer.pr))) { if ((_output_condition === "data-changed") && ((committer.commit) || (committer.pr))) {
const {svg} = await import("../metrics/utils.mjs") const {svg} = await import("../metrics/utils.mjs")
let data = "" let data = ""
await retry(async () => { await retry(async () => {
@@ -485,6 +485,7 @@ async function retry(func, {retries = 1, delay = 0} = {}) {
} }
else else
throw error throw error
} }
info("Pull request number", number) info("Pull request number", number)
}, {retries:retries_output_action, delay:retries_delay_output_action}) }, {retries:retries_output_action, delay:retries_delay_output_action})
@@ -532,7 +533,7 @@ async function retry(func, {retries = 1, delay = 0} = {}) {
if (delay) { if (delay) {
info.break() info.break()
info("Delay before ending job", `${delay}s`) info("Delay before ending job", `${delay}s`)
await new Promise(solve => setTimeout(solve, delay*1000)) await new Promise(solve => setTimeout(solve, delay * 1000))
} }
//Success //Success

View File

@@ -78,11 +78,11 @@ export default async function metrics({login, q}, {graphql, rest, plugins, conf,
console.debug(`metrics/compute/${login} > json output`) console.debug(`metrics/compute/${login} > json output`)
const cache = new WeakSet() const cache = new WeakSet()
const rendered = JSON.parse(JSON.stringify(data, (key, value) => { const rendered = JSON.parse(JSON.stringify(data, (key, value) => {
if ((value instanceof Set)||(Array.isArray(value))) if ((value instanceof Set) || (Array.isArray(value)))
return [...value] return [...value]
if (value instanceof Map) if (value instanceof Map)
return Object.fromEntries(value) return Object.fromEntries(value)
if ((typeof value === "object")&&(value)) { if ((typeof value === "object") && (value)) {
if (cache.has(value)) if (cache.has(value))
return Object.fromEntries(Object.entries(value).map(([k, v]) => [k, cache.has(v) ? "[Circular]" : v])) return Object.fromEntries(Object.entries(value).map(([k, v]) => [k, cache.has(v) ? "[Circular]" : v]))
cache.add(value) cache.add(value)
@@ -227,14 +227,23 @@ metrics.insights = async function({login}, {graphql, rest, conf}, {Plugins, Temp
"habits.days":7, "habits.days":7,
"habits.facts":false, "habits.facts":false,
"habits.charts":true, "habits.charts":true,
introduction:true introduction:true,
}
const plugins = {
achievements:{enabled:true},
isocalendar:{enabled:true},
languages:{enabled:true, extras:false},
activity:{enabled:true, markdown:"extended"},
notable:{enabled:true},
followup:{enabled:true},
habits:{enabled:true, extras:false},
introduction:{enabled:true},
} }
const plugins = {achievements:{enabled:true}, isocalendar:{enabled:true}, languages:{enabled:true, extras:false}, activity:{enabled:true, markdown:"extended"}, notable:{enabled:true}, followup:{enabled:true}, habits:{enabled:true, extras:false}, introduction:{enabled:true}}
return metrics({login, q}, {graphql, rest, plugins, conf, convert:"json"}, {Plugins, Templates}) return metrics({login, q}, {graphql, rest, plugins, conf, convert:"json"}, {Plugins, Templates})
} }
//Metrics insights static render //Metrics insights static render
metrics.insights.output = async function ({login, imports, conf}, {graphql, rest, Plugins, Templates}) { metrics.insights.output = async function({login, imports, conf}, {graphql, rest, Plugins, Templates}) {
//Server //Server
console.debug(`metrics/compute/${login} > insights`) console.debug(`metrics/compute/${login} > insights`)
const server = `http://localhost:${conf.settings.port}` const server = `http://localhost:${conf.settings.port}`
@@ -248,7 +257,7 @@ metrics.insights.output = async function ({login, imports, conf}, {graphql, rest
await page.goto(`${server}/about/${login}?embed=1&localstorage=1`) await page.goto(`${server}/about/${login}?embed=1&localstorage=1`)
await page.evaluate(async json => localStorage.setItem("local.metrics", json), json) //eslint-disable-line no-undef await page.evaluate(async json => localStorage.setItem("local.metrics", json), json) //eslint-disable-line no-undef
await page.goto(`${server}/about/${login}?embed=1&localstorage=1`) await page.goto(`${server}/about/${login}?embed=1&localstorage=1`)
await page.waitForSelector(".container .user", {timeout:10*60*1000}) await page.waitForSelector(".container .user", {timeout:10 * 60 * 1000})
//Rendering //Rendering
console.debug(`metrics/compute/${login} > insights > rendering data`) console.debug(`metrics/compute/${login} > insights > rendering data`)

View File

@@ -1,10 +1,10 @@
//Imports //Imports
import fs from "fs" import fs from "fs"
import yaml from "js-yaml" import yaml from "js-yaml"
import {marked} from "marked"
import fetch from "node-fetch"
import path from "path" import path from "path"
import url from "url" import url from "url"
import fetch from "node-fetch"
import {marked} from "marked"
//Defined categories //Defined categories
const categories = ["core", "github", "social", "community"] const categories = ["core", "github", "social", "community"]
@@ -293,23 +293,25 @@ metadata.plugin = async function({__plugins, __templates, name, logger}) {
` <td>${Object.entries(compatibility).filter(([_, value]) => value).map(([id]) => `<a href="/source/templates/${id}"><code>${templates[id].name ?? ""}</code></a>`).join(" ")}</td>`, ` <td>${Object.entries(compatibility).filter(([_, value]) => value).map(([id]) => `<a href="/source/templates/${id}"><code>${templates[id].name ?? ""}</code></a>`).join(" ")}</td>`,
" </tr>", " </tr>",
" <tr>", " <tr>",
` <td>${[ ` <td>${
meta.supports?.includes("user") ? "<code>👤 Users</code>" : "", [
meta.supports?.includes("organization") ? "<code>👥 Organizations</code>" : "", meta.supports?.includes("user") ? "<code>👤 Users</code>" : "",
meta.supports?.includes("repository") ? "<code>📓 Repositories</code>" : "" meta.supports?.includes("organization") ? "<code>👥 Organizations</code>" : "",
].filter(v => v).join(" ")}</td>`, meta.supports?.includes("repository") ? "<code>📓 Repositories</code>" : "",
].filter(v => v).join(" ")
}</td>`,
" </tr>", " </tr>",
" <tr>", " <tr>",
` <td>${[ ` <td>${[
...(meta.scopes ?? []).map(scope => `<code>🔑 ${{public_access:"(scopeless)"}[scope] ?? scope}</code>`), ...(meta.scopes ?? []).map(scope => `<code>🔑 ${{public_access:"(scopeless)"}[scope] ?? scope}</code>`),
...Object.entries(inputs).filter(([_, {type}]) => type === "token").map(([token]) => `<code>🗝 ${token}</code>`), ...Object.entries(inputs).filter(([_, {type}]) => type === "token").map(([token]) => `<code>🗝 ${token}</code>`),
...(meta.scopes?.length ? ["read:org", "read:user", "repo"].map(scope => !meta.scopes.includes(scope) ? `<code>${scope} (optional)</code>` : null).filter(v => v) : []) ...(meta.scopes?.length ? ["read:org", "read:user", "repo"].map(scope => !meta.scopes.includes(scope) ? `<code>${scope} (optional)</code>` : null).filter(v => v) : []),
].filter(v => v).join(" ") || "<i>No tokens are required for this plugin</i>"}</td>`, ].filter(v => v).join(" ") || "<i>No tokens are required for this plugin</i>"}</td>`,
" </tr>", " </tr>",
" <tr>", " <tr>",
demos({colspan:2, wrap:name === "base", examples:meta.examples}), demos({colspan:2, wrap:name === "base", examples:meta.examples}),
" </tr>", " </tr>",
"</table>" "</table>",
].join("\n") ].join("\n")
//Options table //Options table
@@ -339,14 +341,14 @@ metadata.plugin = async function({__plugins, __templates, name, logger}) {
cell.push(`<i>(${Array.isArray(o.format) ? o.format[0] : o.format})</i>`) cell.push(`<i>(${Array.isArray(o.format) ? o.format[0] : o.format})</i>`)
if ("min" in o) if ("min" in o)
cell.push(`<i>(${o.min}`) cell.push(`<i>(${o.min}`)
if (("min" in o)||("max" in o)) if (("min" in o) || ("max" in o))
cell.push(`${"min" in o ? "" : "<i>("}𝑥${"max" in o ? "" : ")</i>"}`) cell.push(`${"min" in o ? "" : "<i>("}𝑥${"max" in o ? "" : ")</i>"}`)
if ("max" in o) if ("max" in o)
cell.push(`${o.max})</i>`) cell.push(`${o.max})</i>`)
cell.push("<br>") cell.push("<br>")
if ("zero" in o) if ("zero" in o)
cell.push(`<b>zero behaviour:</b> ${o.zero}</br>`) cell.push(`<b>zero behaviour:</b> ${o.zero}</br>`)
if (("default" in o)&&(o.default !== "")) { if (("default" in o) && (o.default !== "")) {
let text = o.default let text = o.default
if (o.default === ".user.login") if (o.default === ".user.login")
text = "<code>→ User login</code>" text = "<code>→ User login</code>"
@@ -414,26 +416,30 @@ metadata.template = async function({__templates, name, plugins, logger}) {
` <td>${Object.entries(compatibility).filter(([_, value]) => value).map(([id]) => `<a href="/source/plugins/${id}" title="${plugins[id].name}">${plugins[id].icon}</a>`).join(" ")}${meta.formats?.includes("markdown") ? " <code>✓ embed()</code>" : ""}</td>`, ` <td>${Object.entries(compatibility).filter(([_, value]) => value).map(([id]) => `<a href="/source/plugins/${id}" title="${plugins[id].name}">${plugins[id].icon}</a>`).join(" ")}${meta.formats?.includes("markdown") ? " <code>✓ embed()</code>" : ""}</td>`,
" </tr>", " </tr>",
" <tr>", " <tr>",
` <td>${[ ` <td>${
meta.supports?.includes("user") ? "<code>👤 Users</code>" : "", [
meta.supports?.includes("organization") ? "<code>👥 Organizations</code>" : "", meta.supports?.includes("user") ? "<code>👤 Users</code>" : "",
meta.supports?.includes("repository") ? "<code>📓 Repositories</code>" : "" meta.supports?.includes("organization") ? "<code>👥 Organizations</code>" : "",
].filter(v => v).join(" ")}</td>`, meta.supports?.includes("repository") ? "<code>📓 Repositories</code>" : "",
].filter(v => v).join(" ")
}</td>`,
" </tr>", " </tr>",
" <tr>", " <tr>",
` <td>${[ ` <td>${
meta.formats?.includes("svg") ? "<code>*️⃣ SVG</code>" : "", [
meta.formats?.includes("png") ? "<code>*️⃣ PNG</code>" : "", meta.formats?.includes("svg") ? "<code>*️⃣ SVG</code>" : "",
meta.formats?.includes("jpeg") ? "<code>*️⃣ JPEG</code>" : "", meta.formats?.includes("png") ? "<code>*️⃣ PNG</code>" : "",
meta.formats?.includes("json") ? "<code>#️⃣ JSON</code>" : "", meta.formats?.includes("jpeg") ? "<code>*️⃣ JPEG</code>" : "",
meta.formats?.includes("markdown") ? "<code>🔠 Markdown</code>" : "", meta.formats?.includes("json") ? "<code>#️⃣ JSON</code>" : "",
meta.formats?.includes("markdown-pdf") ? "<code>🔠 Markdown (PDF)</code>" : "", meta.formats?.includes("markdown") ? "<code>🔠 Markdown</code>" : "",
].filter(v => v).join(" ")}</td>`, meta.formats?.includes("markdown-pdf") ? "<code>🔠 Markdown (PDF)</code>" : "",
].filter(v => v).join(" ")
}</td>`,
" </tr>", " </tr>",
" <tr>", " <tr>",
demos({colspan:2, examples:meta.examples}), demos({colspan:2, examples:meta.examples}),
" </tr>", " </tr>",
"</table>" "</table>",
].join("\n") ].join("\n")
//Result //Result
@@ -448,9 +454,9 @@ metadata.template = async function({__templates, name, plugins, logger}) {
compatibility:{ compatibility:{
...Object.fromEntries(Object.entries(compatibility).filter(([_, value]) => value)), ...Object.fromEntries(Object.entries(compatibility).filter(([_, value]) => value)),
...Object.fromEntries(Object.entries(compatibility).filter(([_, value]) => !value).map(([key, value]) => [key, meta.formats?.includes("markdown") ? "embed" : value])), ...Object.fromEntries(Object.entries(compatibility).filter(([_, value]) => !value).map(([key, value]) => [key, meta.formats?.includes("markdown") ? "embed" : value])),
base:true base:true,
}, },
header header,
}, },
check({q, account = "bypass", format = null}) { check({q, account = "bypass", format = null}) {
//Support check //Support check
@@ -481,31 +487,33 @@ metadata.to = {
//Demo for main and individual readmes //Demo for main and individual readmes
function demos({colspan = null, wrap = false, examples = {}} = {}) { function demos({colspan = null, wrap = false, examples = {}} = {}) {
if (("default1" in examples)&&("default2" in examples)) { if (("default1" in examples) && ("default2" in examples)) {
return [ return [
wrap ? '<td colspan="2"><table><tr>' : "", wrap ? '<td colspan="2"><table><tr>' : "",
'<td align="center">', '<td align="center">',
`<img src="${examples.default1}" alt=""></img>`, `<img src="${examples.default1}" alt=""></img>`,
"</td>", "</td>",
'<td align="center">', '<td align="center">',
`<img src="${examples.default2}" alt=""></img>`, `<img src="${examples.default2}" alt=""></img>`,
"</td>", "</td>",
wrap ? "</tr></table></td>" : "", wrap ? "</tr></table></td>" : "",
].filter(v => v).join("\n") ].filter(v => v).join("\n")
} }
return [ return [
` <td ${colspan ? `colspan="${colspan}"` : ""} align="center">`, ` <td ${colspan ? `colspan="${colspan}"` : ""} align="center">`,
`${Object.entries(examples).map(([text, link]) => { `${
let img = `<img src="${link}" alt=""></img>` Object.entries(examples).map(([text, link]) => {
if (text !== "default") { let img = `<img src="${link}" alt=""></img>`
const open = text.charAt(0) === "+" ? " open" : "" if (text !== "default") {
text = open ? text.substring(1) : text const open = text.charAt(0) === "+" ? " open" : ""
text = `${text.charAt(0).toLocaleUpperCase()}${text.substring(1)}` text = open ? text.substring(1) : text
img = `<details${open}><summary>${text}</summary>${img}</details>` text = `${text.charAt(0).toLocaleUpperCase()}${text.substring(1)}`
} img = `<details${open}><summary>${text}</summary>${img}</details>`
return ` ${img}` }
}).join("\n")}`, return ` ${img}`
}).join("\n")
}`,
' <img width="900" height="1" alt="">', ' <img width="900" height="1" alt="">',
" </td>" " </td>",
].filter(v => v).join("\n") ].filter(v => v).join("\n")
} }

View File

@@ -3,38 +3,38 @@ import fs from "fs/promises"
import prism_lang from "prismjs/components/index.js" import prism_lang from "prismjs/components/index.js"
import axios from "axios" import axios from "axios"
import processes from "child_process" import processes from "child_process"
import crypto from "crypto"
import {minify as csso} from "csso"
import emoji from "emoji-name-map"
import fss from "fs" import fss from "fs"
import GIFEncoder from "gifencoder" import GIFEncoder from "gifencoder"
import jimp from "jimp" import jimp from "jimp"
import linguist from "linguist-js"
import {marked} from "marked" import {marked} from "marked"
import minimatch from "minimatch"
import nodechartist from "node-chartist" import nodechartist from "node-chartist"
import fetch from "node-fetch"
import opengraph from "open-graph-scraper" import opengraph from "open-graph-scraper"
import os from "os" import os from "os"
import paths from "path" import paths from "path"
import PNG from "png-js" import PNG from "png-js"
import prism from "prismjs" import prism from "prismjs"
import _puppeteer from "puppeteer" import _puppeteer from "puppeteer"
import purgecss from "purgecss"
import readline from "readline"
import rss from "rss-parser" import rss from "rss-parser"
import htmlsanitize from "sanitize-html" import htmlsanitize from "sanitize-html"
import git from "simple-git" import git from "simple-git"
import SVGO from "svgo"
import twemojis from "twemoji-parser" import twemojis from "twemoji-parser"
import url from "url" import url from "url"
import util from "util" import util from "util"
import fetch from "node-fetch"
import readline from "readline"
import emoji from "emoji-name-map"
import minimatch from "minimatch"
import crypto from "crypto"
import linguist from "linguist-js"
import purgecss from "purgecss"
import {minify as csso} from "csso"
import SVGO from "svgo"
import xmlformat from "xml-formatter" import xmlformat from "xml-formatter"
prism_lang() prism_lang()
//Exports //Exports
export {axios, fs, git, jimp, opengraph, os, paths, processes, rss, url, fetch, util, emoji, minimatch} export {axios, emoji, fetch, fs, git, jimp, minimatch, opengraph, os, paths, processes, rss, url, util}
/**Returns module __dirname */ /**Returns module __dirname */
export function __module(module) { export function __module(module) {
@@ -81,7 +81,7 @@ export function formatters({timeZone} = {}) {
} }
/**Bytes formatter */ /**Bytes formatter */
format.bytes = function (n) { format.bytes = function(n) {
for (const {u, v} of [{u:"E", v:10 ** 18}, {u:"P", v:10 ** 15}, {u:"T", v:10 ** 12}, {u:"G", v:10 ** 9}, {u:"M", v:10 ** 6}, {u:"k", v:10 ** 3}]) { for (const {u, v} of [{u:"E", v:10 ** 18}, {u:"P", v:10 ** 15}, {u:"T", v:10 ** 12}, {u:"G", v:10 ** 9}, {u:"M", v:10 ** 6}, {u:"k", v:10 ** 3}]) {
if (n / v >= 1) if (n / v >= 1)
return `${(n / v).toFixed(2).substr(0, 4).replace(/[.]0*$/, "")} ${u}B` return `${(n / v).toFixed(2).substr(0, 4).replace(/[.]0*$/, "")} ${u}B`
@@ -90,7 +90,7 @@ export function formatters({timeZone} = {}) {
} }
/**Percentage formatter */ /**Percentage formatter */
format.percentage = function (n, {rescale = true} = {}) { format.percentage = function(n, {rescale = true} = {}) {
return `${ return `${
(n * (rescale ? 100 : 1)).toFixed(2) (n * (rescale ? 100 : 1)).toFixed(2)
.replace(/(?<=[.])(?<decimal>[1-9]*)0+$/, "$<decimal>") .replace(/(?<=[.])(?<decimal>[1-9]*)0+$/, "$<decimal>")
@@ -166,7 +166,7 @@ export async function chartist() {
} }
/**Language analyzer (single file) */ /**Language analyzer (single file) */
export async function language({filename, patch, prefix = "", timeout = 20*1000}) { export async function language({filename, patch, prefix = "", timeout = 20 * 1000}) {
const path = paths.join(os.tmpdir(), `${prefix}-${Math.random()}`.replace(/[^\w-]/g, "")) const path = paths.join(os.tmpdir(), `${prefix}-${Math.random()}`.replace(/[^\w-]/g, ""))
return new Promise(async (solve, reject) => { return new Promise(async (solve, reject) => {
setTimeout(() => { setTimeout(() => {
@@ -222,9 +222,9 @@ export async function run(command, options, {prefixed = true, log = true} = {})
} }
/**Spawn command (use this to execute commands and process output on the fly) */ /**Spawn command (use this to execute commands and process output on the fly) */
export async function spawn(command, args = [], options = {}, {prefixed = true, timeout = 300*1000, stdout} = {}) { //eslint-disable-line max-params export async function spawn(command, args = [], options = {}, {prefixed = true, timeout = 300 * 1000, stdout} = {}) { //eslint-disable-line max-params
const prefix = {win32:"wsl"}[process.platform] ?? "" const prefix = {win32:"wsl"}[process.platform] ?? ""
if ((prefixed)&&(prefix)) { if ((prefixed) && (prefix)) {
args.unshift(command) args.unshift(command)
command = prefix command = prefix
} }
@@ -403,7 +403,7 @@ export const svg = {
if (Number.isFinite(Number(absolute))) if (Number.isFinite(Number(absolute)))
padding.absolute[dimension] = Number(absolute) padding.absolute[dimension] = Number(absolute)
if (Number.isFinite(Number(relative))) if (Number.isFinite(Number(relative)))
padding[dimension] = 1 + Number(relative/100) padding[dimension] = 1 + Number(relative / 100)
} }
console.debug(`metrics/svg/resize > padding width*${padding.width}+${padding.absolute.width}, height*${padding.height}+${padding.absolute.height}`) console.debug(`metrics/svg/resize > padding width*${padding.width}+${padding.absolute.width}, height*${padding.height}+${padding.absolute.height}`)
//Render through browser and resize height //Render through browser and resize height
@@ -433,7 +433,7 @@ export const svg = {
console.debug(`bounds after applying padding width=${width} (*${padding.width}+${padding.absolute.width}), height=${height} (*${padding.height}+${padding.absolute.height})`) console.debug(`bounds after applying padding width=${width} (*${padding.width}+${padding.absolute.width}), height=${height} (*${padding.height}+${padding.absolute.height})`)
//Resize svg //Resize svg
if (document.querySelector("svg").getAttribute("height") === "auto") if (document.querySelector("svg").getAttribute("height") === "auto")
console.debug("skipped height resizing because it was set to \"auto\"") console.debug('skipped height resizing because it was set to "auto"')
else else
document.querySelector("svg").setAttribute("height", height) document.querySelector("svg").setAttribute("height", height)
//Enable animations //Enable animations
@@ -575,8 +575,8 @@ export const svg = {
if (error) if (error)
throw new Error(`Could not optimize SVG: \n${error}`) throw new Error(`Could not optimize SVG: \n${error}`)
return optimized return optimized
} },
} },
} }
/**Wait */ /**Wait */

View File

@@ -6,9 +6,9 @@ import express from "express"
import ratelimit from "express-rate-limit" import ratelimit from "express-rate-limit"
import cache from "memory-cache" import cache from "memory-cache"
import util from "util" import util from "util"
import mocks from "../../../tests/mocks/index.mjs"
import metrics from "../metrics/index.mjs" import metrics from "../metrics/index.mjs"
import setup from "../metrics/setup.mjs" import setup from "../metrics/setup.mjs"
import mocks from "../../../tests/mocks/index.mjs"
/**App */ /**App */
export default async function({mock, nosettings} = {}) { export default async function({mock, nosettings} = {}) {

View File

@@ -31,8 +31,8 @@
//Plugins //Plugins
(async () => { (async () => {
const { data: plugins } = await axios.get("/.plugins") const { data: plugins } = await axios.get("/.plugins")
this.plugins.list = plugins.filter(({name}) => metadata[name]?.supports.includes("user") || metadata[name]?.supports.includes("organization")) this.plugins.list = plugins.filter(({ name }) => metadata[name]?.supports.includes("user") || metadata[name]?.supports.includes("organization"))
const categories = [...new Set(this.plugins.list.map(({category}) => category))] const categories = [...new Set(this.plugins.list.map(({ category }) => category))]
this.plugins.categories = Object.fromEntries(categories.map(category => [category, this.plugins.list.filter(value => category === value.category)])) this.plugins.categories = Object.fromEntries(categories.map(category => [category, this.plugins.list.filter(value => category === value.category)]))
})(), })(),
//Base //Base
@@ -68,8 +68,10 @@
tab: { tab: {
immediate: true, immediate: true,
handler(current) { handler(current) {
if (current === 'action') this.clipboard = new ClipboardJS('.copy-action') if (current === "action")
else this.clipboard?.destroy() this.clipboard = new ClipboardJS(".copy-action")
else
this.clipboard?.destroy()
}, },
}, },
palette: { palette: {
@@ -181,7 +183,7 @@
scopes() { scopes() {
return new Set([ return new Set([
...Object.entries(this.plugins.enabled).filter(([key, value]) => (key !== "base") && (value)).flatMap(([key]) => metadata[key].scopes), ...Object.entries(this.plugins.enabled).filter(([key, value]) => (key !== "base") && (value)).flatMap(([key]) => metadata[key].scopes),
...(Object.entries(this.plugins.enabled.base).filter(([key, value]) => value).length ? metadata.base.scopes : []) ...(Object.entries(this.plugins.enabled.base).filter(([key, value]) => value).length ? metadata.base.scopes : []),
]) ])
}, },
//GitHub action auto-generated code //GitHub action auto-generated code
@@ -201,17 +203,19 @@
` steps:`, ` steps:`,
` - uses: lowlighter/metrics@latest`, ` - uses: lowlighter/metrics@latest`,
` with:`, ` with:`,
...(this.scopes.size ? [ ...(this.scopes.size
` # Your GitHub token`, ? [
` # The following scopes are required:`, ` # Your GitHub token`,
...[...this.scopes].map(scope => ` # - ${scope}${scope === "public_access" ? " (default scope)" : ""}`), ` # The following scopes are required:`,
` # The following additional scopes may be required:`, ...[...this.scopes].map(scope => ` # - ${scope}${scope === "public_access" ? " (default scope)" : ""}`),
` # - read:org (for organization related metrics)`, ` # The following additional scopes may be required:`,
` # - read:user (for user related data)`, ` # - read:org (for organization related metrics)`,
` # - repo (optional, if you want to include private repositories)` ` # - read:user (for user related data)`,
] : [ ` # - repo (optional, if you want to include private repositories)`,
` # Current configuration doesn't require a GitHub token`, ]
]), : [
` # Current configuration doesn't require a GitHub token`,
]),
` token: ${this.scopes.size ? `${"$"}{{ secrets.METRICS_TOKEN }}` : "NOT_NEEDED"}`, ` token: ${this.scopes.size ? `${"$"}{{ secrets.METRICS_TOKEN }}` : "NOT_NEEDED"}`,
``, ``,
` # Options`, ` # Options`,
@@ -252,7 +256,7 @@
methods: { methods: {
//Refresh computed properties //Refresh computed properties
async refresh() { async refresh() {
const keys = {action:["scopes", "action"], markdown:["url", "embed"]}[this.tab] const keys = { action: ["scopes", "action"], markdown: ["url", "embed"] }[this.tab]
if (keys) { if (keys) {
for (const key of keys) for (const key of keys)
this._computedWatchers[key]?.run() this._computedWatchers[key]?.run()

View File

@@ -51,7 +51,7 @@
return await ejs.render(partial, data, { async: true, rmWhitespace: true }) return await ejs.render(partial, data, { async: true, rmWhitespace: true })
}, },
//Meta-data //Meta-data
meta: { version: set.version, author: "lowlighter", generated:new Date().toGMTString().replace(/GMT$/g, "").trim() }, meta: { version: set.version, author: "lowlighter", generated: new Date().toGMTString().replace(/GMT$/g, "").trim() },
//Animated //Animated
animated: false, animated: false,
//Display size //Display size
@@ -60,14 +60,14 @@
//Config //Config
config: set.config, config: set.config,
//Extras //Extras
extras:{css:options["extras.css"] ?? ""}, extras: { css: options["extras.css"] ?? "" },
//Base elements //Base elements
base: set.plugins.enabled.base, base: set.plugins.enabled.base,
//Computed elements //Computed elements
computed: { computed: {
commits: faker.datatype.number(10000), commits: faker.datatype.number(10000),
sponsorships: faker.datatype.number(10), sponsorships: faker.datatype.number(10),
licenses: { favorite: [""], used: { MIT: 1 }, about:{} }, licenses: { favorite: [""], used: { MIT: 1 }, about: {} },
token: { scopes: [] }, token: { scopes: [] },
repositories: { repositories: {
watchers: faker.datatype.number(1000), watchers: faker.datatype.number(1000),
@@ -193,7 +193,7 @@
drafts: faker.datatype.number(this.drafts), drafts: faker.datatype.number(this.drafts),
skipped: faker.datatype.number(this.skipped), skipped: faker.datatype.number(this.skipped),
} }
} },
}, },
pr: { pr: {
get count() { get count() {
@@ -210,7 +210,7 @@
merged: faker.datatype.number(this.skipped), merged: faker.datatype.number(this.skipped),
drafts: faker.datatype.number(this.drafts), drafts: faker.datatype.number(this.drafts),
} }
} },
}, },
user: { user: {
issues: { issues: {
@@ -232,7 +232,7 @@
drafts: faker.datatype.number(100), drafts: faker.datatype.number(100),
}, },
}, },
indepth:options["followup.indepth"] ? {} : null indepth: options["followup.indepth"] ? {} : null,
}, },
}) })
: null), : null),
@@ -240,7 +240,10 @@
...(set.plugins.enabled.notable ...(set.plugins.enabled.notable
? ({ ? ({
notable: { notable: {
contributions: new Array(2 + faker.datatype.number(2)).fill(null).map(_ => ({ name: `${options["notable.repositories"] ? `${faker.lorem.slug()}/` : ""}${faker.lorem.slug()}`, avatar: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==" })), contributions: new Array(2 + faker.datatype.number(2)).fill(null).map(_ => ({
name: `${options["notable.repositories"] ? `${faker.lorem.slug()}/` : ""}${faker.lorem.slug()}`,
avatar: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==",
})),
}, },
}) })
: null), : null),
@@ -335,7 +338,7 @@
unlock: null, unlock: null,
text: faker.lorem.sentence(), text: faker.lorem.sentence(),
get icon() { get icon() {
const colors = {S:["#FF0000", "#FF8500"], A:["#B59151", "#FFD576"], B:["#7D6CFF", "#B2A8FF"], C:["#2088FF", "#79B8FF"], $:["#FF48BD", "#FF92D8"], X:["#7A7A7A", "#B0B0B0"]} const colors = { S: ["#FF0000", "#FF8500"], A: ["#B59151", "#FFD576"], B: ["#7D6CFF", "#B2A8FF"], C: ["#2088FF", "#79B8FF"], $: ["#FF48BD", "#FF92D8"], X: ["#7A7A7A", "#B0B0B0"] }
return `<g xmlns="http://www.w3.org/2000/svg" stroke-linecap="round" stroke-width="2" fill="none" fill-rule="evenodd"><g stroke-linejoin="round"><g stroke="#secondary"><path d="M8 43a3 3 0 100 6 3 3 0 000-6zm40 0a3.001 3.001 0 10.002 6.002A3.001 3.001 0 0048 43zm-18 3h-4.971m-11.045 0H11M45 46h-4"/></g><path stroke="#primary" d="M13 51h28M36.992 45.276l6.375-8.017c1.488.63 3.272.29 4.414-.977a3.883 3.883 0 00.658-4.193l-1.96 2.174-1.936-.151-.406-1.955 1.96-2.173a3.898 3.898 0 00-4.107 1.092 3.886 3.886 0 00-.512 4.485l-7.317 7.169c-1.32 1.314-.807 2.59-.236 3.105.67.601 1.888.845 3.067-.56z"/><g stroke="#primary"><path d="M12.652 31.063l9.442 12.578a.512.512 0 01-.087.716l-2.396 1.805a.512.512 0 01-.712-.114L9.46 33.47l-.176-3.557 3.37 1.15zM17.099 43.115l2.395-1.806"/></g></g><path d="M25.68 36.927v-2.54a2.227 2.227 0 01.37-1.265c-.526-.04-3.84-.371-3.84-4.302 0-1.013.305-1.839.915-2.477a4.989 4.989 0 01-.146-1.86c.087-.882.946-.823 2.577.178 1.277-.47 2.852-.47 4.725 0 .248-.303 2.434-1.704 2.658-.268.047.296.016.946-.093 1.95.516.524.776 1.358.78 2.501.007 2.261-1.26 3.687-3.8 4.278.24.436.355.857.346 1.264a117.57 117.57 0 000 2.614c2.43-.744 4.228-2.06 5.395-3.95.837-1.356 1.433-2.932 1.433-4.865 0-2.886-1.175-4.984-2.5-6.388C32.714 19.903 30.266 19 28 19a9.094 9.094 0 00-6.588 2.897C20.028 23.393 19 25.507 19 28.185c0 2.026.701 3.945 1.773 5.38 1.228 1.643 2.864 2.764 4.907 3.362zM52.98 25.002l-3.07 3.065-1.49-1.485M6.98 25.002l-3.07 3.065-1.49-1.485" stroke="#primary" stroke-linejoin="round"/><path d="M19.001 11V9a2 2 0 012-2h14a2 2 0 012 2v2m-21 12.028v-10.03a2 2 0 012-1.998h20a2 2 0 012 2v10.028" stroke="#secondary" stroke-linejoin="round"/><path stroke="#secondary" d="M28.001 7V3M15.039 7.797c-5.297 3.406-9.168 8.837-10.517 15.2m46.737-.936c-1.514-5.949-5.25-11.01-10.273-14.248"/></g>` return `<g xmlns="http://www.w3.org/2000/svg" stroke-linecap="round" stroke-width="2" fill="none" fill-rule="evenodd"><g stroke-linejoin="round"><g stroke="#secondary"><path d="M8 43a3 3 0 100 6 3 3 0 000-6zm40 0a3.001 3.001 0 10.002 6.002A3.001 3.001 0 0048 43zm-18 3h-4.971m-11.045 0H11M45 46h-4"/></g><path stroke="#primary" d="M13 51h28M36.992 45.276l6.375-8.017c1.488.63 3.272.29 4.414-.977a3.883 3.883 0 00.658-4.193l-1.96 2.174-1.936-.151-.406-1.955 1.96-2.173a3.898 3.898 0 00-4.107 1.092 3.886 3.886 0 00-.512 4.485l-7.317 7.169c-1.32 1.314-.807 2.59-.236 3.105.67.601 1.888.845 3.067-.56z"/><g stroke="#primary"><path d="M12.652 31.063l9.442 12.578a.512.512 0 01-.087.716l-2.396 1.805a.512.512 0 01-.712-.114L9.46 33.47l-.176-3.557 3.37 1.15zM17.099 43.115l2.395-1.806"/></g></g><path d="M25.68 36.927v-2.54a2.227 2.227 0 01.37-1.265c-.526-.04-3.84-.371-3.84-4.302 0-1.013.305-1.839.915-2.477a4.989 4.989 0 01-.146-1.86c.087-.882.946-.823 2.577.178 1.277-.47 2.852-.47 4.725 0 .248-.303 2.434-1.704 2.658-.268.047.296.016.946-.093 1.95.516.524.776 1.358.78 2.501.007 2.261-1.26 3.687-3.8 4.278.24.436.355.857.346 1.264a117.57 117.57 0 000 2.614c2.43-.744 4.228-2.06 5.395-3.95.837-1.356 1.433-2.932 1.433-4.865 0-2.886-1.175-4.984-2.5-6.388C32.714 19.903 30.266 19 28 19a9.094 9.094 0 00-6.588 2.897C20.028 23.393 19 25.507 19 28.185c0 2.026.701 3.945 1.773 5.38 1.228 1.643 2.864 2.764 4.907 3.362zM52.98 25.002l-3.07 3.065-1.49-1.485M6.98 25.002l-3.07 3.065-1.49-1.485" stroke="#primary" stroke-linejoin="round"/><path d="M19.001 11V9a2 2 0 012-2h14a2 2 0 012 2v2m-21 12.028v-10.03a2 2 0 012-1.998h20a2 2 0 012 2v10.028" stroke="#secondary" stroke-linejoin="round"/><path stroke="#secondary" d="M28.001 7V3M15.039 7.797c-5.297 3.406-9.168 8.837-10.517 15.2m46.737-.936c-1.514-5.949-5.25-11.01-10.273-14.248"/></g>`
.replace(/#primary/g, colors[this.rank][0]) .replace(/#primary/g, colors[this.rank][0])
.replace(/#secondary/g, colors[this.rank][1]) .replace(/#secondary/g, colors[this.rank][1])
@@ -362,20 +365,21 @@
}) })
: null), : null),
//Code snippet //Code snippet
...(set.plugins.enabled.code ...(set.plugins.enabled.code
? ({ ? ({
code: { code: {
snippet: { snippet: {
sha: faker.git.shortSha(), sha: faker.git.shortSha(),
message: faker.lorem.sentence(), message: faker.lorem.sentence(),
filename: 'docs/specifications.html', filename: "docs/specifications.html",
status: "modified", status: "modified",
additions: faker.datatype.number(50), additions: faker.datatype.number(50),
deletions: faker.datatype.number(50), deletions: faker.datatype.number(50),
patch: `<span class="token coord">@@ -0,0 +1,5 @@</span><br> //Imports<br><span class="token inserted">+ import app from "./src/app.mjs"</span><br><span class="token deleted">- import app from "./src/app.js"</span><br> //Start app<br> await app()<br>\\ No newline at end of file`, patch:
`<span class="token coord">@@ -0,0 +1,5 @@</span><br> //Imports<br><span class="token inserted">+ import app from "./src/app.mjs"</span><br><span class="token deleted">- import app from "./src/app.js"</span><br> //Start app<br> await app()<br>\\ No newline at end of file`,
repo: `${faker.random.word()}/${faker.random.word()}`, repo: `${faker.random.word()}/${faker.random.word()}`,
}, },
} },
}) })
: null), : null),
//Sponsors //Sponsors
@@ -392,10 +396,10 @@
count: faker.datatype.number(100), count: faker.datatype.number(100),
goal: { goal: {
progress: faker.datatype.number(100), progress: faker.datatype.number(100),
title: `$${faker.datatype.number(100)*10} per month`, title: `$${faker.datatype.number(100) * 10} per month`,
description: "Invest in the software that powers your world" description: "Invest in the software that powers your world",
} },
} },
}) })
: null), : null),
//Languages //Languages
@@ -412,7 +416,7 @@
get stats() { get stats() {
return Object.fromEntries(Object.entries(this.favorites).map(([key, { value }]) => [key, value])) return Object.fromEntries(Object.entries(this.favorites).map(([key, { value }]) => [key, value]))
}, },
["stats.recent"]:{ ["stats.recent"]: {
total: faker.datatype.number(10000), total: faker.datatype.number(10000),
get lines() { get lines() {
return Object.fromEntries(Object.entries(this.favorites).map(([key, { value }]) => [key, value])) return Object.fromEntries(Object.entries(this.favorites).map(([key, { value }]) => [key, value]))
@@ -420,9 +424,9 @@
get stats() { get stats() {
return Object.fromEntries(Object.entries(this.favorites).map(([key, { value }]) => [key, value])) return Object.fromEntries(Object.entries(this.favorites).map(([key, { value }]) => [key, value]))
}, },
commits:faker.datatype.number(500), commits: faker.datatype.number(500),
files:faker.datatype.number(1000), files: faker.datatype.number(1000),
days:Number(options["languages.recent.days"]) days: Number(options["languages.recent.days"]),
}, },
favorites: distribution(7).map((value, index, array) => ({ name: faker.lorem.word(), color: faker.internet.color(), value, size: faker.datatype.number(1000000), x: array.slice(0, index).reduce((a, b) => a + b, 0) })), favorites: distribution(7).map((value, index, array) => ({ name: faker.lorem.word(), color: faker.internet.color(), value, size: faker.datatype.number(1000000), x: array.slice(0, index).reduce((a, b) => a + b, 0) })),
recent: distribution(7).map((value, index, array) => ({ name: faker.lorem.word(), color: faker.internet.color(), value, size: faker.datatype.number(1000000), x: array.slice(0, index).reduce((a, b) => a + b, 0) })), recent: distribution(7).map((value, index, array) => ({ name: faker.lorem.word(), color: faker.internet.color(), value, size: faker.datatype.number(1000000), x: array.slice(0, index).reduce((a, b) => a + b, 0) })),
@@ -470,8 +474,8 @@
trim: options["habits.trim"], trim: options["habits.trim"],
lines: { lines: {
average: { average: {
chars: faker.datatype.number(1000)/10, chars: faker.datatype.number(1000) / 10,
} },
}, },
commits: { commits: {
get hour() { get hour() {
@@ -655,10 +659,10 @@
? ({ ? ({
discussions: { discussions: {
categories: { categories: {
stats: { '🙏 Q&A': faker.datatype.number(100), '📣 Announcements': faker.datatype.number(100), '💡 Ideas': faker.datatype.number(100), '💬 General': faker.datatype.number(100) }, stats: { "🙏 Q&A": faker.datatype.number(100), "📣 Announcements": faker.datatype.number(100), "💡 Ideas": faker.datatype.number(100), "💬 General": faker.datatype.number(100) },
favorite: '📣 Announcements' favorite: "📣 Announcements",
}, },
upvotes: { discussions:faker.datatype.number(1000), comments: faker.datatype.number(1000) }, upvotes: { discussions: faker.datatype.number(1000), comments: faker.datatype.number(1000) },
started: faker.datatype.number(1000), started: faker.datatype.number(1000),
comments: faker.datatype.number(1000), comments: faker.datatype.number(1000),
answers: faker.datatype.number(1000), answers: faker.datatype.number(1000),
@@ -753,9 +757,11 @@
description: faker.lorem.sentence(), description: faker.lorem.sentence(),
count: faker.datatype.number(100), count: faker.datatype.number(100),
repositories: new Array(Number(options["starlists.limit.repositories"])).fill(null).map((_, i) => ({ repositories: new Array(Number(options["starlists.limit.repositories"])).fill(null).map((_, i) => ({
description: !i ? "📊 An image generator with 20+ metrics about your GitHub account such as activity, community, repositories, coding habits, website performances, music played, starred topics, etc. that you can put on your profile or elsewhere !" : faker.lorem.sentence(), description: !i
? "📊 An image generator with 20+ metrics about your GitHub account such as activity, community, repositories, coding habits, website performances, music played, starred topics, etc. that you can put on your profile or elsewhere !"
: faker.lorem.sentence(),
name: !i ? "lowlighter/metrics" : `${faker.random.word()}/${faker.random.word()}`, name: !i ? "lowlighter/metrics" : `${faker.random.word()}/${faker.random.word()}`,
})) })),
})), })),
}, },
}) })
@@ -1062,7 +1068,7 @@
? ({ ? ({
support: { support: {
stats: { solutions: faker.datatype.number(100), posts: faker.datatype.number(1000), topics: faker.datatype.number(1000), received: faker.datatype.number(1000), hearts: faker.datatype.number(1000) }, stats: { solutions: faker.datatype.number(100), posts: faker.datatype.number(1000), topics: faker.datatype.number(1000), received: faker.datatype.number(1000), hearts: faker.datatype.number(1000) },
badges: { uniques: [ ], multiples: [], count: faker.datatype.number(1000) } badges: { uniques: [], multiples: [], count: faker.datatype.number(1000) },
}, },
}) })
: null), : null),
@@ -1070,7 +1076,7 @@
...(set.plugins.enabled.screenshot ...(set.plugins.enabled.screenshot
? ({ ? ({
screenshot: { screenshot: {
image:"data:image/jpg;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==", image: "data:image/jpg;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==",
title: options["screenshot.title"], title: options["screenshot.title"],
height: 440, height: 440,
width: 454, width: 454,
@@ -1081,10 +1087,10 @@
...(set.plugins.enabled.skyline ...(set.plugins.enabled.skyline
? ({ ? ({
skyline: { skyline: {
animation:"data:image/jpg;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==", animation: "data:image/jpg;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mOcOnfpfwAGfgLYttYINwAAAABJRU5ErkJggg==",
width: 454, width: 454,
height: 284, height: 284,
compatibility: false compatibility: false,
}, },
}) })
: null), : null),
@@ -1175,11 +1181,11 @@
data.f.date = function(string, options) { data.f.date = function(string, options) {
if (options.date) { if (options.date) {
delete options.date delete options.date
Object.assign(options, {day:"numeric", month:"short", year:"numeric"}) Object.assign(options, { day: "numeric", month: "short", year: "numeric" })
} }
if (options.time) { if (options.time) {
delete options.time delete options.time
Object.assign(options, {hour:"2-digit", minute:"2-digit", second:"2-digit"}) Object.assign(options, { hour: "2-digit", minute: "2-digit", second: "2-digit" })
} }
return new Intl.DateTimeFormat("en-GB", options).format(new Date(string)) return new Intl.DateTimeFormat("en-GB", options).format(new Date(string))
} }

View File

@@ -143,7 +143,7 @@ export default async function({list, login, data, computed, imports, graphql, qu
//Member //Member
{ {
const { years: value } = computed.registered const {years:value} = computed.registered
const unlock = null const unlock = null
list.push({ list.push({

View File

@@ -219,7 +219,7 @@ export default async function({list, login, data, computed, imports, graphql, qu
//Member //Member
{ {
const { years: value } = computed.registered const {years:value} = computed.registered
const unlock = null const unlock = null
list.push({ list.push({

View File

@@ -95,7 +95,7 @@ export default async function({login, graphql, rest, data, q, queries, imports},
} }
catch (error) { catch (error) {
console.debug(`metrics/compute/${login}/base > failed to retrieve ${_batch} repositories after ${cursor}, this is probably due to an API timeout, halving batch`) console.debug(`metrics/compute/${login}/base > failed to retrieve ${_batch} repositories after ${cursor}, this is probably due to an API timeout, halving batch`)
_batch = Math.floor(_batch/2) _batch = Math.floor(_batch / 2)
if (_batch < 1) { if (_batch < 1) {
console.debug(`metrics/compute/${login}/base > failed to retrieve repositories, cannot halve batch anymore`) console.debug(`metrics/compute/${login}/base > failed to retrieve repositories, cannot halve batch anymore`)
throw error throw error

View File

@@ -3,7 +3,7 @@ export default async function({login, q, imports, data, rest, account}, {enabled
//Plugin execution //Plugin execution
try { try {
//Check if plugin is enabled and requirements are met //Check if plugin is enabled and requirements are met
if ((!enabled)||(!q.code)) if ((!enabled) || (!q.code))
return null return null
//Context //Context
@@ -25,15 +25,22 @@ export default async function({login, q, imports, data, rest, account}, {enabled
try { try {
for (let page = 1; page <= pages; page++) { for (let page = 1; page <= pages; page++) {
console.debug(`metrics/compute/${login}/plugins > code > loading page ${page}/${pages}`) console.debug(`metrics/compute/${login}/plugins > code > loading page ${page}/${pages}`)
events.push(...[...await Promise.all([...(context.mode === "repository" ? await rest.activity.listRepoEvents({owner:context.owner, repo:context.repo}) : await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data events.push(
.filter(({type}) => type === "PushEvent") ...[
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase()) ...await Promise.all([
.filter(({repo:{name:repo}}) => !((skipped.includes(repo.split("/").pop())) || (skipped.includes(repo)))) ...(context.mode === "repository"
.filter(event => visibility === "public" ? event.public : true) ? await rest.activity.listRepoEvents({owner:context.owner, repo:context.repo})
.flatMap(({payload}) => Promise.all(payload.commits.map(async commit => (await rest.request(commit.url)).data)))])] : await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
.flat() .filter(({type}) => type === "PushEvent")
.filter(({parents}) => parents.length <= 1) .filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring)||author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length) .filter(({repo:{name:repo}}) => !((skipped.includes(repo.split("/").pop())) || (skipped.includes(repo))))
.filter(event => visibility === "public" ? event.public : true)
.flatMap(({payload}) => Promise.all(payload.commits.map(async commit => (await rest.request(commit.url)).data))),
]),
]
.flat()
.filter(({parents}) => parents.length <= 1)
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring) || author?.email?.toLocaleLowerCase().includes(authoring) || author?.name?.toLocaleLowerCase().includes(authoring)).length),
) )
} }
} }
@@ -48,8 +55,8 @@ export default async function({login, q, imports, data, rest, account}, {enabled
.filter(({patch}) => (patch ? (patch.match(/\n/mg)?.length ?? 1) : Infinity) < lines) .filter(({patch}) => (patch ? (patch.match(/\n/mg)?.length ?? 1) : Infinity) < lines)
for (const file of files) for (const file of files)
file.language = await imports.language({...file, prefix:login}).catch(() => "unknown") file.language = await imports.language({...file, prefix:login}).catch(() => "unknown")
files = files.filter(({language}) => (!languages.length)||(languages.includes(language.toLocaleLowerCase()))) files = files.filter(({language}) => (!languages.length) || (languages.includes(language.toLocaleLowerCase())))
const snippet = files[Math.floor(Math.random()*files.length)] ?? null const snippet = files[Math.floor(Math.random() * files.length)] ?? null
if (snippet) { if (snippet) {
//Trim common indent from content and change line feed //Trim common indent from content and change line feed
if (!snippet.patch.split("\n").shift().endsWith("@@")) if (!snippet.patch.split("\n").shift().endsWith("@@"))

View File

@@ -69,7 +69,7 @@ export default async function({login, q, imports, data, rest, graphql, queries,
//Contributions categories //Contributions categories
const types = Object.fromEntries([...new Set(Object.keys(categories))].map(type => [type, new Set()])) const types = Object.fromEntries([...new Set(Object.keys(categories))].map(type => [type, new Set()]))
if ((sections.includes("categories"))&&(extras)) { if ((sections.includes("categories")) && (extras)) {
//Temporary directory //Temporary directory
const repository = `${repo.owner}/${repo.repo}` const repository = `${repo.owner}/${repo.repo}`
const path = imports.paths.join(imports.os.tmpdir(), `${repository.replace(/[^\w]/g, "_")}`) const path = imports.paths.join(imports.os.tmpdir(), `${repository.replace(/[^\w]/g, "_")}`)
@@ -90,10 +90,11 @@ export default async function({login, q, imports, data, rest, graphql, queries,
stdout(line) { stdout(line) {
if (line.trim().length) if (line.trim().length)
files.push(line) files.push(line)
} },
}) })
//Search for contributions type in specified categories //Search for contributions type in specified categories
filesloop: for (const file of files) { filesloop:
for (const file of files) {
for (const [category, globs] of Object.entries(categories)) { for (const [category, globs] of Object.entries(categories)) {
for (const glob of [globs].flat(Infinity)) { for (const glob of [globs].flat(Infinity)) {
if (imports.minimatch(file, glob, {nocase:true})) { if (imports.minimatch(file, glob, {nocase:true})) {

View File

@@ -16,7 +16,13 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
} }
//Init //Init
const computed = {commits:0, sponsorships:0, licenses:{favorite:"", used:{}, about:{}}, token:{}, repositories:{watchers:0, stargazers:0, issues_open:0, issues_closed:0, pr_open:0, pr_closed:0, pr_merged:0, forks:0, forked:0, releases:0, deployments:0, environments:0}} const computed = {
commits:0,
sponsorships:0,
licenses:{favorite:"", used:{}, about:{}},
token:{},
repositories:{watchers:0, stargazers:0, issues_open:0, issues_closed:0, pr_open:0, pr_closed:0, pr_merged:0, forks:0, forked:0, releases:0, deployments:0, environments:0},
}
const avatar = imports.imgb64(data.user.avatarUrl) const avatar = imports.imgb64(data.user.avatarUrl)
data.computed = computed data.computed = computed
console.debug(`metrics/compute/${login} > formatting common metrics`) console.debug(`metrics/compute/${login} > formatting common metrics`)
@@ -38,6 +44,7 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
else if (process?.env?.TZ) else if (process?.env?.TZ)
data.config.timezone = {name:process.env.TZ, offset} data.config.timezone = {name:process.env.TZ, offset}
//Display //Display
data.large = display === "large" data.large = display === "large"
data.columns = display === "columns" data.columns = display === "columns"
@@ -101,7 +108,7 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
const months = diff.getUTCMonth() - new Date(0).getUTCMonth() const months = diff.getUTCMonth() - new Date(0).getUTCMonth()
const days = diff.getUTCDate() - new Date(0).getUTCDate() const days = diff.getUTCDate() - new Date(0).getUTCDate()
computed.registered = {years: years + days / 365.25, months} computed.registered = {years:years + days / 365.25, months}
computed.registration = years ? `${years} year${imports.s(years)} ago` : months ? `${months} month${imports.s(months)} ago` : `${days} day${imports.s(days)} ago` computed.registration = years ? `${years} year${imports.s(years)} ago` : months ? `${months} month${imports.s(months)} ago` : `${days} day${imports.s(days)} ago`
computed.cakeday = (years >= 1 && months === 0 && days === 0) ? true : false computed.cakeday = (years >= 1 && months === 0 && days === 0) ? true : false
@@ -124,7 +131,7 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
data.meta = { data.meta = {
version:conf.package.version, version:conf.package.version,
author:conf.package.author, author:conf.package.author,
generated:imports.format.date(new Date(), {date:true, time:true}) generated:imports.format.date(new Date(), {date:true, time:true}),
} }
//Debug flags //Debug flags

View File

@@ -1,69 +1,69 @@
//Setup //Setup
export default async function({login, q, imports, graphql, queries, data, account}, {enabled = false} = {}) { export default async function({login, q, imports, graphql, queries, data, account}, {enabled = false} = {}) {
//Plugin execution //Plugin execution
try { try {
//Check if plugin is enabled and requirements are met //Check if plugin is enabled and requirements are met
if ((!enabled)||(!q.discussions)) if ((!enabled) || (!q.discussions))
return null return null
//Load inputs //Load inputs
const {categories:_categories, "categories.limit":_categories_limit} = imports.metadata.plugins.discussions.inputs({data, account, q}) const {categories:_categories, "categories.limit":_categories_limit} = imports.metadata.plugins.discussions.inputs({data, account, q})
const discussions = {categories:{}, upvotes:{discussions:0, comments:0}} const discussions = {categories:{}, upvotes:{discussions:0, comments:0}}
discussions.display = {categories:_categories ? {limit:_categories_limit || Infinity} : null} discussions.display = {categories:_categories ? {limit:_categories_limit || Infinity} : null}
//Fetch general statistics //Fetch general statistics
const stats = Object.fromEntries(Object.entries((await graphql(queries.discussions.statistics({login}))).user).map(([key, value]) => [key, value.totalCount])) const stats = Object.fromEntries(Object.entries((await graphql(queries.discussions.statistics({login}))).user).map(([key, value]) => [key, value.totalCount]))
Object.assign(discussions, stats) Object.assign(discussions, stats)
//Load started discussions //Load started discussions
{ {
const fetched = [] const fetched = []
const categories = {} const categories = {}
let cursor = null let cursor = null
let pushed = 0 let pushed = 0
do { do {
console.debug(`metrics/compute/${login}/discussions > retrieving discussions after ${cursor}`) console.debug(`metrics/compute/${login}/discussions > retrieving discussions after ${cursor}`)
const {user:{repositoryDiscussions:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.categories({login, after:cursor ? `after: "${cursor}"` : ""})) const {user:{repositoryDiscussions:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.categories({login, after:cursor ? `after: "${cursor}"` : ""}))
cursor = edges?.[edges?.length - 1]?.cursor cursor = edges?.[edges?.length - 1]?.cursor
fetched.push(...nodes) fetched.push(...nodes)
pushed = nodes.length pushed = nodes.length
console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} discussions after ${cursor}`) console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} discussions after ${cursor}`)
} while ((pushed) && (cursor)) } while ((pushed) && (cursor))
//Compute upvotes //Compute upvotes
fetched.map(({upvoteCount}) => discussions.upvotes.discussions += upvoteCount) fetched.map(({upvoteCount}) => discussions.upvotes.discussions += upvoteCount)
//Compute favorite category //Compute favorite category
for (const category of [...fetched.map(({category:{emoji, name}}) => `${imports.emoji.get(emoji) ?? emoji} ${name}`)]) for (const category of [...fetched.map(({category:{emoji, name}}) => `${imports.emoji.get(emoji) ?? emoji} ${name}`)])
categories[category] = (categories[category] ?? 0) + 1 categories[category] = (categories[category] ?? 0) + 1
const categoryEntries = Object.entries(categories).sort((a, b) => b[1] - a[1]) const categoryEntries = Object.entries(categories).sort((a, b) => b[1] - a[1])
discussions.categories.stats = Object.fromEntries(categoryEntries) discussions.categories.stats = Object.fromEntries(categoryEntries)
discussions.categories.favorite = categoryEntries[0]?.[0] ?? null discussions.categories.favorite = categoryEntries[0]?.[0] ?? null
} }
//Load comments //Load comments
{ {
const fetched = [] const fetched = []
let cursor = null let cursor = null
let pushed = 0 let pushed = 0
do { do {
console.debug(`metrics/compute/${login}/discussions > retrieving comments after ${cursor}`) console.debug(`metrics/compute/${login}/discussions > retrieving comments after ${cursor}`)
const {user:{repositoryDiscussionComments:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.comments({login, after:cursor ? `after: "${cursor}"` : ""})) const {user:{repositoryDiscussionComments:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.comments({login, after:cursor ? `after: "${cursor}"` : ""}))
cursor = edges?.[edges?.length - 1]?.cursor cursor = edges?.[edges?.length - 1]?.cursor
fetched.push(...nodes) fetched.push(...nodes)
pushed = nodes.length pushed = nodes.length
console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} comments after ${cursor}`) console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} comments after ${cursor}`)
} while ((pushed) && (cursor)) } while ((pushed) && (cursor))
//Compute upvotes //Compute upvotes
fetched.map(({upvoteCount}) => discussions.upvotes.comments += upvoteCount) fetched.map(({upvoteCount}) => discussions.upvotes.comments += upvoteCount)
} }
//Results //Results
return discussions return discussions
}
//Handle errors
catch (error) {
throw {error:{message:"An error occured", instance:error}}
}
} }
//Handle errors
catch (error) {
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -29,7 +29,7 @@ export default async function({login, data, computed, imports, q, graphql, queri
closed:0, closed:0,
drafts:0, drafts:0,
skipped:0, skipped:0,
} },
}, },
pr:{ pr:{
get count() { get count() {
@@ -50,13 +50,12 @@ export default async function({login, data, computed, imports, q, graphql, queri
closed:0, closed:0,
merged:0, merged:0,
drafts:0, drafts:0,
} },
}, },
} }
//Extras features //Extras features
if (extras) { if (extras) {
//Indepth mode //Indepth mode
if (indepth) { if (indepth) {
console.debug(`metrics/compute/${login}/plugins > followup > indepth`) console.debug(`metrics/compute/${login}/plugins > followup > indepth`)
@@ -90,7 +89,7 @@ export default async function({login, data, computed, imports, q, graphql, queri
} }
//Load user issues and pull requests //Load user issues and pull requests
if ((account === "user")&&(sections.includes("user"))) { if ((account === "user") && (sections.includes("user"))) {
const search = await graphql(queries.followup.user({login})) const search = await graphql(queries.followup.user({login}))
followup.user = { followup.user = {
issues:{ issues:{

View File

@@ -1,5 +1,5 @@
//Legacy import //Legacy import
import { recent as recent_analyzer } from "./../languages/analyzers.mjs" import {recent as recent_analyzer} from "./../languages/analyzers.mjs"
//Setup //Setup
export default async function({login, data, rest, imports, q, account}, {enabled = false, extras = false, ...defaults} = {}) { export default async function({login, data, rest, imports, q, account}, {enabled = false, extras = false, ...defaults} = {}) {
@@ -45,7 +45,7 @@ export default async function({login, data, rest, imports, q, account}, {enabled
...await Promise.allSettled( ...await Promise.allSettled(
commits commits
.flatMap(({payload}) => payload.commits) .flatMap(({payload}) => payload.commits)
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring)||author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length) .filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring) || author?.email?.toLocaleLowerCase().includes(authoring) || author?.name?.toLocaleLowerCase().includes(authoring)).length)
.map(async commit => (await rest.request(commit)).data.files), .map(async commit => (await rest.request(commit)).data.files),
), ),
] ]
@@ -93,22 +93,23 @@ export default async function({login, data, rest, imports, q, account}, {enabled
//Compute average number of characters per line of code fetched //Compute average number of characters per line of code fetched
console.debug(`metrics/compute/${login}/plugins > habits > computing average number of characters per line of code`) console.debug(`metrics/compute/${login}/plugins > habits > computing average number of characters per line of code`)
const lines = patches.flatMap(({patch}) => patch.split("\n").map(line => line.length)) const lines = patches.flatMap(({patch}) => patch.split("\n").map(line => line.length))
habits.lines.average.chars = lines.reduce((a, b) => a + b, 0)/lines.length habits.lines.average.chars = lines.reduce((a, b) => a + b, 0) / lines.length
} }
//Linguist //Linguist
if ((extras)&&(charts)) { if ((extras) && (charts)) {
//Check if linguist exists //Check if linguist exists
console.debug(`metrics/compute/${login}/plugins > habits > searching recently used languages using linguist`) console.debug(`metrics/compute/${login}/plugins > habits > searching recently used languages using linguist`)
if (patches.length) { if (patches.length) {
//Call language analyzer (note: using content from other plugin is usually disallowed, this is mostly for legacy purposes) //Call language analyzer (note: using content from other plugin is usually disallowed, this is mostly for legacy purposes)
habits.linguist.available = true habits.linguist.available = true
const {total, stats} = await recent_analyzer({login, data, imports, rest, account}, {days, load:from || 1000, tempdir:"habits"}) const {total, stats} = await recent_analyzer({login, data, imports, rest, account}, {days, load:from || 1000, tempdir:"habits"})
habits.linguist.languages = Object.fromEntries(Object.entries(stats).map(([language, value]) => [language, value/total])) habits.linguist.languages = Object.fromEntries(Object.entries(stats).map(([language, value]) => [language, value / total]))
habits.linguist.ordered = Object.entries(habits.linguist.languages).sort(([_an, a], [_bn, b]) => b - a) habits.linguist.ordered = Object.entries(habits.linguist.languages).sort(([_an, a], [_bn, b]) => b - a)
} }
else else
console.debug(`metrics/compute/${login}/plugins > habits > linguist not available`) console.debug(`metrics/compute/${login}/plugins > habits > linguist not available`)
} }
//Results //Results

View File

@@ -61,15 +61,16 @@ export async function recent({login, data, imports, rest, account}, {skipped = [
//Get user recent activity //Get user recent activity
console.debug(`metrics/compute/${login}/plugins > languages > querying api`) console.debug(`metrics/compute/${login}/plugins > languages > querying api`)
const commits = [], pages = Math.ceil(load/100), results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0, days} const commits = [], pages = Math.ceil(load / 100), results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0, days}
try { try {
for (let page = 1; page <= pages; page++) { for (let page = 1; page <= pages; page++) {
console.debug(`metrics/compute/${login}/plugins > languages > loading page ${page}`) console.debug(`metrics/compute/${login}/plugins > languages > loading page ${page}`)
commits.push(...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data commits.push(
.filter(({type}) => type === "PushEvent") ...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase()) .filter(({type}) => type === "PushEvent")
.filter(({repo:{name:repo}}) => (!skipped.includes(repo.toLocaleLowerCase())) && (!skipped.includes(repo.toLocaleLowerCase().split("/").pop()))) .filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
.filter(({created_at}) => new Date(created_at) > new Date(Date.now() - days * 24 * 60 * 60 * 1000)) .filter(({repo:{name:repo}}) => (!skipped.includes(repo.toLocaleLowerCase())) && (!skipped.includes(repo.toLocaleLowerCase().split("/").pop())))
.filter(({created_at}) => new Date(created_at) > new Date(Date.now() - days * 24 * 60 * 60 * 1000)),
) )
} }
} }
@@ -86,17 +87,17 @@ export async function recent({login, data, imports, rest, account}, {skipped = [
...await Promise.allSettled( ...await Promise.allSettled(
commits commits
.flatMap(({payload}) => payload.commits) .flatMap(({payload}) => payload.commits)
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring)||author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length) .filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring) || author?.email?.toLocaleLowerCase().includes(authoring) || author?.name?.toLocaleLowerCase().includes(authoring)).length)
.map(commit => commit.url) .map(commit => commit.url)
.map(async commit => (await rest.request(commit)).data), .map(async commit => (await rest.request(commit)).data),
) ),
] ]
.filter(({status}) => status === "fulfilled") .filter(({status}) => status === "fulfilled")
.map(({value}) => value) .map(({value}) => value)
.filter(({parents}) => parents.length <= 1) .filter(({parents}) => parents.length <= 1)
.map(({files}) => files) .map(({files}) => files)
.flatMap(files => files.map(file => ({name:imports.paths.basename(file.filename), directory:imports.paths.dirname(file.filename), patch:file.patch ?? "", repo:file.raw_url?.match(/(?<=^https:..github.com\/)(?<repo>.*)(?=\/raw)/)?.groups.repo ?? "_"}))) .flatMap(files => files.map(file => ({name:imports.paths.basename(file.filename), directory:imports.paths.dirname(file.filename), patch:file.patch ?? "", repo:file.raw_url?.match(/(?<=^https:..github.com\/)(?<repo>.*)(?=\/raw)/)?.groups.repo ?? "_"})))
.map(({name, directory, patch, repo}) => ({name, directory:`${repo.replace(/[/]/g, "@")}/${directory}`, patch:patch.split("\n").filter(line => /^[+]/.test(line)).map(line => line.substring(1)).join("\n")})) .map(({name, directory, patch, repo}) => ({name, directory:`${repo.replace(/[/]/g, "@")}/${directory}`, patch:patch.split("\n").filter(line => /^[+]/.test(line)).map(line => line.substring(1)).join("\n")}))
//Temporary directory //Temporary directory
const path = imports.paths.join(imports.os.tmpdir(), `${data.user.databaseId}-${tempdir}`) const path = imports.paths.join(imports.os.tmpdir(), `${data.user.databaseId}-${tempdir}`)
@@ -164,13 +165,13 @@ async function analyze({login, imports, data}, {results, path, categories = ["pr
console.debug(`metrics/compute/${login}/plugins > languages > indepth > checking git log`) console.debug(`metrics/compute/${login}/plugins > languages > indepth > checking git log`)
for (let page = 0; ; page++) { for (let page = 0; ; page++) {
try { try {
console.debug(`metrics/compute/${login}/plugins > languages > indepth > processing commits ${page*per_page} from ${(page+1)*per_page}`) console.debug(`metrics/compute/${login}/plugins > languages > indepth > processing commits ${page * per_page} from ${(page + 1) * per_page}`)
let empty = true, file = null, lang = null let empty = true, file = null, lang = null
await imports.spawn("git", ["log", ...data.shared["commits.authoring"].map(authoring => `--author="${authoring}"`), "--regexp-ignore-case", "--format=short", "--patch", `--max-count=${per_page}`, `--skip=${page*per_page}`], {cwd:path}, { await imports.spawn("git", ["log", ...data.shared["commits.authoring"].map(authoring => `--author="${authoring}"`), "--regexp-ignore-case", "--format=short", "--patch", `--max-count=${per_page}`, `--skip=${page * per_page}`], {cwd:path}, {
stdout(line) { stdout(line) {
try { try {
//Unflag empty output //Unflag empty output
if ((empty)&&(line.trim().length)) if ((empty) && (line.trim().length))
empty = false empty = false
//Commits counter //Commits counter
if (/^commit [0-9a-f]{40}$/.test(line)) { if (/^commit [0-9a-f]{40}$/.test(line)) {
@@ -178,13 +179,13 @@ async function analyze({login, imports, data}, {results, path, categories = ["pr
return return
} }
//Ignore empty lines or unneeded lines //Ignore empty lines or unneeded lines
if ((!/^[+]/.test(line))||(!line.length)) if ((!/^[+]/.test(line)) || (!line.length))
return return
//File marker //File marker
if (/^[+]{3}\sb[/](?<file>[\s\S]+)$/.test(line)) { if (/^[+]{3}\sb[/](?<file>[\s\S]+)$/.test(line)) {
file = `${path}/${line.match(/^[+]{3}\sb[/](?<file>[\s\S]+)$/)?.groups?.file}`.replace(/\\/g, "/") file = `${path}/${line.match(/^[+]{3}\sb[/](?<file>[\s\S]+)$/)?.groups?.file}`.replace(/\\/g, "/")
lang = files[file] ?? null lang = files[file] ?? null
if ((lang)&&(!categories.includes(languageResults[lang].type))) if ((lang) && (!categories.includes(languageResults[lang].type)))
lang = null lang = null
edited.add(file) edited.add(file)
return return
@@ -203,7 +204,7 @@ async function analyze({login, imports, data}, {results, path, categories = ["pr
catch (error) { catch (error) {
console.debug(`metrics/compute/${login}/plugins > languages > indepth > an error occured while processing line (${error.message}), skipping...`) console.debug(`metrics/compute/${login}/plugins > languages > indepth > an error occured while processing line (${error.message}), skipping...`)
} }
} },
}) })
if (empty) { if (empty) {
console.debug(`metrics/compute/${login}/plugins > languages > indepth > no more commits`) console.debug(`metrics/compute/${login}/plugins > languages > indepth > no more commits`)
@@ -223,7 +224,7 @@ if (/languages.analyzers.mjs$/.test(process.argv[1])) {
(async function() { (async function() {
//Parse inputs //Parse inputs
const [_authoring, path] = process.argv.slice(2) const [_authoring, path] = process.argv.slice(2)
if ((!_authoring)||(!path)) { if ((!_authoring) || (!path)) {
console.log("Usage is:\n npm run indepth -- <commits authoring> <repository local path>\n\n") console.log("Usage is:\n npm run indepth -- <commits authoring> <repository local path>\n\n")
process.exit(1) process.exit(1)
} }
@@ -235,7 +236,7 @@ if (/languages.analyzers.mjs$/.test(process.argv[1])) {
//Prepare call //Prepare call
const imports = await import("../../app/metrics/utils.mjs") const imports = await import("../../app/metrics/utils.mjs")
const results = {total:0, lines:{}, colors:{}, stats:{}, missed:0} const results = {total:0, lines:{}, colors:{}, stats:{}, missed:0}
console.debug = log => /exited with code null/.test(log) ? console.error(log.replace(/^.*--max-count=(?<step>\d+) --skip=(?<start>\d+).*$/, (_, step, start) => `error: skipped commits ${start} from ${Number(start)+Number(step)}`)) : null console.debug = log => /exited with code null/.test(log) ? console.error(log.replace(/^.*--max-count=(?<step>\d+) --skip=(?<start>\d+).*$/, (_, step, start) => `error: skipped commits ${start} from ${Number(start) + Number(step)}`)) : null
//Analyze repository //Analyze repository
console.log(`commits authoring | ${authoring}\nrepository path | ${path}\n`) console.log(`commits authoring | ${authoring}\nrepository path | ${path}\n`)

View File

@@ -17,7 +17,11 @@ export default async function({login, data, imports, q, rest, account}, {enabled
} }
//Load inputs //Load inputs
let {ignored, skipped, colors, aliases, details, threshold, limit, indepth, "analysis.timeout":timeout, sections, categories, "recent.categories":_recent_categories, "recent.load":_recent_load, "recent.days":_recent_days} = imports.metadata.plugins.languages.inputs({data, account, q}) let {ignored, skipped, colors, aliases, details, threshold, limit, indepth, "analysis.timeout":timeout, sections, categories, "recent.categories":_recent_categories, "recent.load":_recent_load, "recent.days":_recent_days} = imports.metadata.plugins.languages.inputs({
data,
account,
q,
})
threshold = (Number(threshold.replace(/%$/, "")) || 0) / 100 threshold = (Number(threshold.replace(/%$/, "")) || 0) / 100
skipped.push(...data.shared["repositories.skipped"]) skipped.push(...data.shared["repositories.skipped"])
if (!limit) if (!limit)
@@ -59,7 +63,7 @@ export default async function({login, data, imports, q, rest, account}, {enabled
//Extras features //Extras features
if (extras) { if (extras) {
//Recently used languages //Recently used languages
if ((sections.includes("recently-used"))&&(context.mode === "user")) { if ((sections.includes("recently-used")) && (context.mode === "user")) {
try { try {
console.debug(`metrics/compute/${login}/plugins > languages > using recent analyzer`) console.debug(`metrics/compute/${login}/plugins > languages > using recent analyzer`)
languages["stats.recent"] = await recent_analyzer({login, data, imports, rest, account}, {skipped, categories:_recent_categories ?? categories, days:_recent_days, load:_recent_load, timeout}) languages["stats.recent"] = await recent_analyzer({login, data, imports, rest, account}, {skipped, categories:_recent_categories ?? categories, days:_recent_days, load:_recent_load, timeout})
@@ -102,7 +106,8 @@ export default async function({login, data, imports, q, rest, account}, {enabled
//Compute languages stats //Compute languages stats
for (const {section, stats = {}, lines = {}, total = 0} of [{section:"favorites", stats:languages.stats, lines:languages.lines, total:languages.total}, {section:"recent", ...languages["stats.recent"]}]) { for (const {section, stats = {}, lines = {}, total = 0} of [{section:"favorites", stats:languages.stats, lines:languages.lines, total:languages.total}, {section:"recent", ...languages["stats.recent"]}]) {
console.debug(`metrics/compute/${login}/plugins > languages > computing stats ${section}`) console.debug(`metrics/compute/${login}/plugins > languages > computing stats ${section}`)
languages[section] = Object.entries(stats).filter(([name]) => !ignored.includes(name.toLocaleLowerCase())).sort(([_an, a], [_bn, b]) => b - a).slice(0, limit).map(([name, value]) => ({name, value, size:value, color:languages.colors[name], x:0})).filter(({value}) => value / total > threshold) languages[section] = Object.entries(stats).filter(([name]) => !ignored.includes(name.toLocaleLowerCase())).sort(([_an, a], [_bn, b]) => b - a).slice(0, limit).map(([name, value]) => ({name, value, size:value, color:languages.colors[name], x:0})).filter(({value}) => value / total > threshold
)
const visible = {total:Object.values(languages[section]).map(({size}) => size).reduce((a, b) => a + b, 0)} const visible = {total:Object.values(languages[section]).map(({size}) => size).reduce((a, b) => a + b, 0)}
for (let i = 0; i < languages[section].length; i++) { for (let i = 0; i < languages[section].length; i++) {
const {name} = languages[section][i] const {name} = languages[section][i]

View File

@@ -23,7 +23,8 @@ export default async function({login, data, imports, rest, q, account}, {enabled
//Get contributors stats from repositories //Get contributors stats from repositories
console.debug(`metrics/compute/${login}/plugins > lines > querying api`) console.debug(`metrics/compute/${login}/plugins > lines > querying api`)
const lines = {added:0, deleted:0} const lines = {added:0, deleted:0}
const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getContributorsStats({owner, repo})))].filter(({status}) => status === "fulfilled").map(({value}) => value) const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getContributorsStats({owner, repo})))].filter(({status}) => status === "fulfilled"
).map(({value}) => value)
//Compute changed lines //Compute changed lines
console.debug(`metrics/compute/${login}/plugins > lines > computing total diff`) console.debug(`metrics/compute/${login}/plugins > lines > computing total diff`)

View File

@@ -132,11 +132,11 @@ export default async function({login, imports, data, q, account}, {enabled = fal
//Parse tracklist //Parse tracklist
tracks = [ tracks = [
...await frame.evaluate(() => [...document.querySelectorAll("ytmusic-playlist-shelf-renderer ytmusic-responsive-list-item-renderer")].map(item => ({ ...await frame.evaluate(() => [...document.querySelectorAll("ytmusic-playlist-shelf-renderer ytmusic-responsive-list-item-renderer")].map(item => ({
name:item.querySelector("yt-formatted-string.title > a")?.innerText ?? "", name:item.querySelector("yt-formatted-string.title > a")?.innerText ?? "",
artist:item.querySelector(".secondary-flex-columns > yt-formatted-string > a")?.innerText ?? "", artist:item.querySelector(".secondary-flex-columns > yt-formatted-string > a")?.innerText ?? "",
artwork:item.querySelector("img").src, artwork:item.querySelector("img").src,
}) }))
)), ),
] ]
break break
} }
@@ -257,12 +257,11 @@ export default async function({login, imports, data, q, account}, {enabled = fal
try { try {
//Request access token //Request access token
console.debug(`metrics/compute/${login}/plugins > music > requesting access token with youtube refresh token`) console.debug(`metrics/compute/${login}/plugins > music > requesting access token with youtube refresh token`)
const res = await imports.axios.post("https://music.youtube.com/youtubei/v1/browse?alt=json&key=AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30", const res = await imports.axios.post("https://music.youtube.com/youtubei/v1/browse?alt=json&key=AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30", {
{
browseEndpointContextSupportedConfigs:{ browseEndpointContextSupportedConfigs:{
browseEndpointContextMusicConfig:{ browseEndpointContextMusicConfig:{
pageType:"MUSIC_PAGE_TYPE_PLAYLIST", pageType:"MUSIC_PAGE_TYPE_PLAYLIST",
} },
}, },
context:{ context:{
client:{ client:{
@@ -272,9 +271,8 @@ export default async function({login, imports, data, q, account}, {enabled = fal
hl:"en", hl:"en",
}, },
}, },
browseId:"FEmusic_history" browseId:"FEmusic_history",
}, }, {
{
headers:{ headers:{
Authorization:SAPISIDHASH, Authorization:SAPISIDHASH,
Cookie:token, Cookie:token,
@@ -337,14 +335,14 @@ export default async function({login, imports, data, q, account}, {enabled = fal
Object.defineProperty(modes, "top", { Object.defineProperty(modes, "top", {
get() { get() {
return `Top played artists ${time_msg}` return `Top played artists ${time_msg}`
} },
}) })
} }
else { else {
Object.defineProperty(modes, "top", { Object.defineProperty(modes, "top", {
get() { get() {
return `Top played tracks ${time_msg}` return `Top played tracks ${time_msg}`
} },
}) })
} }
@@ -355,7 +353,7 @@ export default async function({login, imports, data, q, account}, {enabled = fal
//Prepare credentials //Prepare credentials
const [client_id, client_secret, refresh_token] = token.split(",").map(part => part.trim()) const [client_id, client_secret, refresh_token] = token.split(",").map(part => part.trim())
if ((!client_id) || (!client_secret) || (!refresh_token)) if ((!client_id) || (!client_secret) || (!refresh_token))
throw { error: { message: "Spotify token must contain client id/secret and refresh token" } } throw {error:{message:"Spotify token must contain client id/secret and refresh token"}}
else if (limit > 50) else if (limit > 50)
throw {error:{message:"Spotify top limit cannot be greater than 50"}} throw {error:{message:"Spotify top limit cannot be greater than 50"}}
@@ -372,40 +370,39 @@ export default async function({login, imports, data, q, account}, {enabled = fal
//Retrieve tracks //Retrieve tracks
console.debug(`metrics/compute/${login}/plugins > music > querying spotify api`) console.debug(`metrics/compute/${login}/plugins > music > querying spotify api`)
tracks = [] tracks = []
const loaded = const loaded = top_type === "artists"
top_type === "artists" ? (
? ( await imports.axios.get(
await imports.axios.get( `https://api.spotify.com/v1/me/top/artists?time_range=${time_range}_term&limit=${limit}`,
`https://api.spotify.com/v1/me/top/artists?time_range=${time_range}_term&limit=${limit}`, {
{ headers:{
headers: { "Content-Type":"application/json",
"Content-Type": "application/json", Accept:"application/json",
Accept: "application/json", Authorization:`Bearer ${access}`,
Authorization: `Bearer ${access}`, },
}, },
} )
) ).data.items.map(({name, genres, images}) => ({
).data.items.map(({ name, genres, images }) => ({ name,
name, artist:genres.join(" • "),
artist: genres.join(" • "), artwork:images[0].url,
artwork: images[0].url, }))
})) : (
: ( await imports.axios.get(
await imports.axios.get( `https://api.spotify.com/v1/me/top/tracks?time_range=${time_range}_term&limit=${limit}`,
`https://api.spotify.com/v1/me/top/tracks?time_range=${time_range}_term&limit=${limit}`, {
{ headers:{
headers: { "Content-Type":"application/json",
"Content-Type": "application/json", Accept:"application/json",
Accept: "application/json", Authorization:`Bearer ${access}`,
Authorization: `Bearer ${access}`, },
}, },
} )
) ).data.items.map(({name, artists, album}) => ({
).data.items.map(({ name, artists, album }) => ({ name,
name, artist:artists[0].name,
artist: artists[0].name, artwork:album.images[0].url,
artwork: album.images[0].url, }))
}))
//Ensure no duplicate are added //Ensure no duplicate are added
for (const track of loaded) { for (const track of loaded) {
if (!tracks.map(({name}) => name).includes(track.name)) if (!tracks.map(({name}) => name).includes(track.name))
@@ -431,38 +428,37 @@ export default async function({login, imports, data, q, account}, {enabled = fal
try { try {
console.debug(`metrics/compute/${login}/plugins > music > querying lastfm api`) console.debug(`metrics/compute/${login}/plugins > music > querying lastfm api`)
const period = time_range === "short" ? "1month" : time_range === "medium" ? "6month" : "overall" const period = time_range === "short" ? "1month" : time_range === "medium" ? "6month" : "overall"
tracks = tracks = top_type === "artists"
top_type === "artists" ? (
? ( await imports.axios.get(
await imports.axios.get( `https://ws.audioscrobbler.com/2.0/?method=user.gettopartists&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`,
`https://ws.audioscrobbler.com/2.0/?method=user.gettopartists&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`, {
{ headers:{
headers: { "User-Agent":"lowlighter/metrics",
"User-Agent": "lowlighter/metrics", Accept:"application/json",
Accept: "application/json", },
}, },
} )
) ).data.topartists.artist.map(artist => ({
).data.topartists.artist.map(artist => ({ name:artist.name,
name: artist.name, artist:`Play count: ${artist.playcount}`,
artist: `Play count: ${artist.playcount}`, artwork:artist.image.reverse()[0]["#text"],
artwork: artist.image.reverse()[0]["#text"], }))
})) : (
: ( await imports.axios.get(
await imports.axios.get( `https://ws.audioscrobbler.com/2.0/?method=user.gettoptracks&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`,
`https://ws.audioscrobbler.com/2.0/?method=user.gettoptracks&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`, {
{ headers:{
headers: { "User-Agent":"lowlighter/metrics",
"User-Agent": "lowlighter/metrics", Accept:"application/json",
Accept: "application/json", },
}, },
} )
) ).data.toptracks.track.map(track => ({
).data.toptracks.track.map(track => ({ name:track.name,
name: track.name, artist:track.artist.name,
artist: track.artist.name, artwork:track.image.reverse()[0]["#text"],
artwork: track.image.reverse()[0]["#text"], }))
}))
} }
//Handle errors //Handle errors
catch (error) { catch (error) {

View File

@@ -55,11 +55,11 @@ export default async function({login, q, imports, rest, graphql, data, account,
//Save user data //Save user data
contribution.user = { contribution.user = {
commits, commits,
percentage:commits/contribution.history, percentage:commits / contribution.history,
maintainer:maintainers.includes(login), maintainer:maintainers.includes(login),
get stars() { get stars() {
return this.maintainer ? stars : this.percentage*stars return this.maintainer ? stars : this.percentage * stars
} },
} }
console.debug(`metrics/compute/${login}/plugins > notable > indepth > successfully processed ${owner}/${repo}`) console.debug(`metrics/compute/${login}/plugins > notable > indepth > successfully processed ${owner}/${repo}`)
} }
@@ -91,6 +91,7 @@ export default async function({login, q, imports, rest, graphql, data, account,
} }
else else
aggregated.set(key, {name:key, handle, avatar, organization, stars, aggregated:1, ..._extras}) aggregated.set(key, {name:key, handle, avatar, organization, stars, aggregated:1, ..._extras})
} }
contributions = [...aggregated.values()] contributions = [...aggregated.values()]
if (extras) { if (extras) {
@@ -100,7 +101,6 @@ export default async function({login, q, imports, rest, graphql, data, account,
contributions = contributions.sort((a, b) => ((b.user?.percentage + b.user?.maintainer) || 0) - ((a.user?.percentage + a.user?.maintainer) || 0)) contributions = contributions.sort((a, b) => ((b.user?.percentage + b.user?.maintainer) || 0) - ((a.user?.percentage + a.user?.maintainer) || 0))
} }
//Results //Results
return {contributions} return {contributions}
} }

View File

@@ -3,10 +3,11 @@ export default async function({q, imports, data, account}, {enabled = false, tok
//Plugin execution //Plugin execution
try { try {
//Check if plugin is enabled and requirements are met //Check if plugin is enabled and requirements are met
if ((!enabled)||(!q.poopmap)) if ((!enabled) || (!q.poopmap))
return null return null
if (!token) return {poops:[], days:7} if (!token)
return {poops:[], days:7}
const {days} = imports.metadata.plugins.poopmap.inputs({data, account, q}) const {days} = imports.metadata.plugins.poopmap.inputs({data, account, q})
const {data:{poops}} = await imports.axios.get(`https://api.poopmap.net/api/v1/public_links/${token}`) const {data:{poops}} = await imports.axios.get(`https://api.poopmap.net/api/v1/public_links/${token}`)

View File

@@ -3,7 +3,7 @@ export default async function({login, q, imports, graphql, queries, data, accoun
//Plugin execution //Plugin execution
try { try {
//Check if plugin is enabled and requirements are met //Check if plugin is enabled and requirements are met
if ((!enabled)||(!q.repositories)) if ((!enabled) || (!q.repositories))
return null return null
//Load inputs //Load inputs

View File

@@ -3,7 +3,7 @@ export default async function({login, q, imports, data, graphql, queries, accoun
//Plugin execution //Plugin execution
try { try {
//Check if plugin is enabled and requirements are met //Check if plugin is enabled and requirements are met
if ((!enabled)||(!q.sponsors)) if ((!enabled) || (!q.sponsors))
return null return null
//Load inputs //Load inputs

View File

@@ -31,7 +31,7 @@ export default async function({login, graphql, data, imports, q, queries, accoun
console.debug(`metrics/compute/${login}/plugins > stargazers > loaded ${dates.length} stargazers in total`) console.debug(`metrics/compute/${login}/plugins > stargazers > loaded ${dates.length} stargazers in total`)
//Compute stargazers increments //Compute stargazers increments
const days = 14 * (1 + data.large/2) const days = 14 * (1 + data.large / 2)
const increments = {dates:Object.fromEntries([...new Array(days).fill(null).map((_, i) => [new Date(Date.now() - i * 24 * 60 * 60 * 1000).toISOString().slice(0, 10), 0]).reverse()]), max:NaN, min:NaN} const increments = {dates:Object.fromEntries([...new Array(days).fill(null).map((_, i) => [new Date(Date.now() - i * 24 * 60 * 60 * 1000).toISOString().slice(0, 10), 0]).reverse()]), max:NaN, min:NaN}
dates dates
.map(date => date.toISOString().slice(0, 10)) .map(date => date.toISOString().slice(0, 10))

View File

@@ -3,7 +3,7 @@ export default async function({login, q, imports, data, account}, {enabled = fal
//Plugin execution //Plugin execution
try { try {
//Check if plugin is enabled and requirements are met //Check if plugin is enabled and requirements are met
if ((!enabled)||(!q.starlists)) if ((!enabled) || (!q.starlists))
return null return null
//Load inputs //Load inputs
@@ -19,17 +19,18 @@ export default async function({login, q, imports, data, account}, {enabled = fal
console.debug(`metrics/compute/${login}/plugins > starlists > fetching lists`) console.debug(`metrics/compute/${login}/plugins > starlists > fetching lists`)
await page.goto(`https://github.com/${login}?tab=stars`) await page.goto(`https://github.com/${login}?tab=stars`)
let lists = (await page.evaluate(() => [...document.querySelectorAll("[href^='/stars/lowlighter/lists']")].map(element => ({ let lists = (await page.evaluate(() => [...document.querySelectorAll("[href^='/stars/lowlighter/lists']")].map(element => ({
link:element.href, link:element.href,
name:element.querySelector("h3")?.innerText ?? "", name:element.querySelector("h3")?.innerText ?? "",
description:element.querySelector("span")?.innerText ?? "", description:element.querySelector("span")?.innerText ?? "",
count:Number(element.querySelector("div")?.innerText.match(/(?<count>\d+)/)?.groups.count), count:Number(element.querySelector("div")?.innerText.match(/(?<count>\d+)/)?.groups.count),
repositories:[] repositories:[],
})))) }))
))
const count = lists.length const count = lists.length
console.debug(`metrics/compute/${login}/plugins > starlists > found [${lists.map(({name}) => name)}]`) console.debug(`metrics/compute/${login}/plugins > starlists > found [${lists.map(({name}) => name)}]`)
lists = lists lists = lists
.filter(({name}) => name) .filter(({name}) => name)
.filter(({name}) => (!only.length)||(only.includes(name.toLocaleLowerCase()))) .filter(({name}) => (!only.length) || (only.includes(name.toLocaleLowerCase())))
.filter(({name}) => !ignored.includes(name.toLocaleLowerCase())) .filter(({name}) => !ignored.includes(name.toLocaleLowerCase()))
.slice(0, limit) .slice(0, limit)
console.debug(`metrics/compute/${login}/plugins > starlists > extracted ${lists.length} lists`) console.debug(`metrics/compute/${login}/plugins > starlists > extracted ${lists.length} lists`)
@@ -39,9 +40,10 @@ export default async function({login, q, imports, data, account}, {enabled = fal
console.debug(`metrics/compute/${login}/plugins > starlists > fetching ${list.name}`) console.debug(`metrics/compute/${login}/plugins > starlists > fetching ${list.name}`)
await page.goto(list.link) await page.goto(list.link)
const repositories = await page.evaluate(() => [...document.querySelectorAll("#user-list-repositories > div")].map(element => ({ const repositories = await page.evaluate(() => [...document.querySelectorAll("#user-list-repositories > div")].map(element => ({
name:element.querySelector("div:first-child")?.innerText.replace(" / ", "/") ?? "", name:element.querySelector("div:first-child")?.innerText.replace(" / ", "/") ?? "",
description:element.querySelector(".py-1")?.innerText ?? "" description:element.querySelector(".py-1")?.innerText ?? "",
}))) }))
)
list.repositories.push(...repositories) list.repositories.push(...repositories)
if (_shuffle) if (_shuffle)
list.repositories = imports.shuffle(list.repositories) list.repositories = imports.shuffle(list.repositories)

View File

@@ -16,7 +16,8 @@ export default async function({login, imports, data, rest, q, account}, {enabled
//Get views stats from repositories //Get views stats from repositories
console.debug(`metrics/compute/${login}/plugins > traffic > querying api`) console.debug(`metrics/compute/${login}/plugins > traffic > querying api`)
const views = {count:0, uniques:0} const views = {count:0, uniques:0}
const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getViews({owner, repo})))].filter(({status}) => status === "fulfilled").map(({value}) => value) const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getViews({owner, repo})))].filter(({status}) => status === "fulfilled"
).map(({value}) => value)
//Compute views //Compute views
console.debug(`metrics/compute/${login}/plugins > traffic > computing stats`) console.debug(`metrics/compute/${login}/plugins > traffic > computing stats`)

View File

@@ -70,7 +70,7 @@ placeholder.run = async vars => {
//Setup //Setup
beforeAll(async () => { beforeAll(async () => {
//Clean community template //Clean community template
await fs.promises.rm(path.join(__dirname, "../source/templates/@classic"), { recursive: true, force:true }) await fs.promises.rm(path.join(__dirname, "../source/templates/@classic"), { recursive: true, force: true })
//Start web instance //Start web instance
await web.start() await web.start()
}) })
@@ -79,7 +79,7 @@ afterAll(async () => {
//Stop web instance //Stop web instance
await web.stop() await web.stop()
//Clean community template //Clean community template
await fs.promises.rm(path.join(__dirname, "../source/templates/@classic"), { recursive: true, force:true }) await fs.promises.rm(path.join(__dirname, "../source/templates/@classic"), { recursive: true, force: true })
}) })
//Load metadata (as jest doesn't support ESM modules, we use this dirty hack) //Load metadata (as jest doesn't support ESM modules, we use this dirty hack)

View File

@@ -1,5 +1,5 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Last.fm api //Last.fm api
if (/^https:..ws.audioscrobbler.com.*$/.test(url)) { if (/^https:..ws.audioscrobbler.com.*$/.test(url)) {
//Get recently played tracks //Get recently played tracks
@@ -10,146 +10,146 @@ export default function({faker, url, options, login = faker.internet.userName()}
const track = faker.random.words(5) const track = faker.random.words(5)
const date = faker.date.recent() const date = faker.date.recent()
return ({ return ({
status:200, status: 200,
data:{ data: {
recenttracks:{ recenttracks: {
"@attr":{ "@attr": {
page:"1", page: "1",
perPage:"1", perPage: "1",
user:"RJ", user: "RJ",
total:"100", total: "100",
pages:"100", pages: "100",
}, },
track:[ track: [
{ {
artist:{ artist: {
mbid:"", mbid: "",
"#text":artist, "#text": artist,
}, },
album:{ album: {
mbid:"", mbid: "",
"#text":album, "#text": album,
}, },
image:[ image: [
{ {
size:"small", size: "small",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"medium", size: "medium",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"large", size: "large",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"extralarge", size: "extralarge",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
], ],
streamable:"0", streamable: "0",
date:{ date: {
uts:Math.floor(date.getTime() / 1000), uts: Math.floor(date.getTime() / 1000),
"#text":date.toUTCString().slice(5, 22), "#text": date.toUTCString().slice(5, 22),
}, },
url:faker.internet.url(), url: faker.internet.url(),
name:track, name: track,
mbid:"", mbid: "",
}, },
], ],
}, },
}, },
}) })
} }
else if (/user.gettoptracks/.test(url)) { else if (/user.gettoptracks/.test(url)) {
console.debug(`metrics/compute/mocks > mocking lastfm api result > ${url}`) console.debug(`metrics/compute/mocks > mocking lastfm api result > ${url}`)
const artist = faker.random.word() const artist = faker.random.word()
const track = faker.random.words(5) const track = faker.random.words(5)
return ({ return ({
status:200, status: 200,
data:{ data: {
toptracks:{ toptracks: {
"@attr":{ "@attr": {
page:"1", page: "1",
perPage:"1", perPage: "1",
user:"RJ", user: "RJ",
total:"100", total: "100",
pages:"100", pages: "100",
}, },
track:[ track: [
{ {
artist:{ artist: {
mbid:"", mbid: "",
name:artist, name: artist,
}, },
image:[ image: [
{ {
size:"small", size: "small",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"medium", size: "medium",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"large", size: "large",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"extralarge", size: "extralarge",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
], ],
url:faker.internet.url(), url: faker.internet.url(),
name:track, name: track,
mbid:"", mbid: "",
}, },
], ],
}, },
}, },
}) })
} }
else if (/user.gettopartists/.test(url)) { else if (/user.gettopartists/.test(url)) {
console.debug(`metrics/compute/mocks > mocking lastfm api result > ${url}`) console.debug(`metrics/compute/mocks > mocking lastfm api result > ${url}`)
const artist = faker.random.word() const artist = faker.random.word()
const playcount = faker.random.number() const playcount = faker.random.number()
return ({ return ({
status:200, status: 200,
data:{ data: {
topartists:{ topartists: {
"@attr":{ "@attr": {
page:"1", page: "1",
perPage:"1", perPage: "1",
user:"RJ", user: "RJ",
total:"100", total: "100",
pages:"100", pages: "100",
}, },
artist:[ artist: [
{ {
image:[ image: [
{ {
size:"small", size: "small",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"medium", size: "medium",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"large", size: "large",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
{ {
size:"extralarge", size: "extralarge",
"#text":faker.image.abstract(), "#text": faker.image.abstract(),
}, },
], ],
streamable:"0", streamable: "0",
playcount, playcount,
url:faker.internet.url(), url: faker.internet.url(),
name:artist, name: artist,
mbid:"", mbid: "",
}, },
], ],
}, },

View File

@@ -1,27 +1,27 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url}) { export default function({ faker, url }) {
//Last.fm api //Last.fm api
if (/^https:..testapp.herokuapp.com.*$/.test(url)) { if (/^https:..testapp.herokuapp.com.*$/.test(url)) {
//Get Nightscout Data //Get Nightscout Data
console.debug(`metrics/compute/mocks > mocking nightscout api result > ${url}`) console.debug(`metrics/compute/mocks > mocking nightscout api result > ${url}`)
const lastInterval = Math.floor(new Date() / 300000) * 300000 const lastInterval = Math.floor(new Date() / 300000) * 300000
return ({ return ({
status:200, status: 200,
data:new Array(12).fill(null).map(_ => ({ data: new Array(12).fill(null).map(_ => ({
_id:faker.git.commitSha().substring(0, 23), _id: faker.git.commitSha().substring(0, 23),
device:"xDrip-DexcomG5", device: "xDrip-DexcomG5",
date:lastInterval, date: lastInterval,
dateString:new Date(lastInterval).toISOString(), dateString: new Date(lastInterval).toISOString(),
sgv:faker.datatype.number({min:40, max:400}), sgv: faker.datatype.number({ min: 40, max: 400 }),
delta:faker.datatype.number({min:-10, max:10}), delta: faker.datatype.number({ min: -10, max: 10 }),
direction:faker.random.arrayElement(["SingleUp", "DoubleUp", "FortyFiveUp", "Flat", "FortyFiveDown", "SingleDown", "DoubleDown"]), direction: faker.random.arrayElement(["SingleUp", "DoubleUp", "FortyFiveUp", "Flat", "FortyFiveDown", "SingleDown", "DoubleDown"]),
type:"sgv", type: "sgv",
filtered:0, filtered: 0,
unfiltered:0, unfiltered: 0,
rssi:100, rssi: 100,
noise:1, noise: 1,
sysTime:new Date(lastInterval).toISOString(), sysTime: new Date(lastInterval).toISOString(),
utcOffset:faker.datatype.number({min:-12, max:14}) * 60, utcOffset: faker.datatype.number({ min: -12, max: 14 }) * 60,
})), })),
}) })
} }

View File

@@ -1,5 +1,5 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Tested url //Tested url
const tested = url.match(/&url=(?<tested>.*?)(?:&|$)/)?.groups?.tested ?? faker.internet.url() const tested = url.match(/&url=(?<tested>.*?)(?:&|$)/)?.groups?.tested ?? faker.internet.url()
//Pagespeed api //Pagespeed api
@@ -8,96 +8,96 @@ export default function({faker, url, options, login = faker.internet.userName()}
if (/v5.runPagespeed.*&key=MOCKED_TOKEN/.test(url)) { if (/v5.runPagespeed.*&key=MOCKED_TOKEN/.test(url)) {
console.debug(`metrics/compute/mocks > mocking pagespeed api result > ${url}`) console.debug(`metrics/compute/mocks > mocking pagespeed api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
captchaResult:"CAPTCHA_NOT_NEEDED", captchaResult: "CAPTCHA_NOT_NEEDED",
id:tested, id: tested,
lighthouseResult:{ lighthouseResult: {
requestedUrl:tested, requestedUrl: tested,
finalUrl:tested, finalUrl: tested,
lighthouseVersion:"6.3.0", lighthouseVersion: "6.3.0",
audits:{ audits: {
"final-screenshot":{ "final-screenshot": {
id:"final-screenshot", id: "final-screenshot",
title:"Final Screenshot", title: "Final Screenshot",
score:null, score: null,
details:{ details: {
data:null, data: null,
type:"screenshot", type: "screenshot",
timestamp:Date.now(), timestamp: Date.now(),
}, },
}, },
metrics:{ metrics: {
id:"metrics", id: "metrics",
title:"Metrics", title: "Metrics",
score:null, score: null,
details:{ details: {
items:[ items: [
{ {
observedFirstContentfulPaint:faker.datatype.number(500), observedFirstContentfulPaint: faker.datatype.number(500),
observedFirstVisualChangeTs:faker.time.recent(), observedFirstVisualChangeTs: faker.time.recent(),
observedFirstContentfulPaintTs:faker.time.recent(), observedFirstContentfulPaintTs: faker.time.recent(),
firstContentfulPaint:faker.datatype.number(500), firstContentfulPaint: faker.datatype.number(500),
observedDomContentLoaded:faker.datatype.number(500), observedDomContentLoaded: faker.datatype.number(500),
observedFirstMeaningfulPaint:faker.datatype.number(1000), observedFirstMeaningfulPaint: faker.datatype.number(1000),
maxPotentialFID:faker.datatype.number(500), maxPotentialFID: faker.datatype.number(500),
observedLoad:faker.datatype.number(500), observedLoad: faker.datatype.number(500),
firstMeaningfulPaint:faker.datatype.number(500), firstMeaningfulPaint: faker.datatype.number(500),
observedCumulativeLayoutShift:faker.datatype.float({max:1}), observedCumulativeLayoutShift: faker.datatype.float({ max: 1 }),
observedSpeedIndex:faker.datatype.number(1000), observedSpeedIndex: faker.datatype.number(1000),
observedSpeedIndexTs:faker.time.recent(), observedSpeedIndexTs: faker.time.recent(),
observedTimeOriginTs:faker.time.recent(), observedTimeOriginTs: faker.time.recent(),
observedLargestContentfulPaint:faker.datatype.number(1000), observedLargestContentfulPaint: faker.datatype.number(1000),
cumulativeLayoutShift:faker.datatype.float({max:1}), cumulativeLayoutShift: faker.datatype.float({ max: 1 }),
observedFirstPaintTs:faker.time.recent(), observedFirstPaintTs: faker.time.recent(),
observedTraceEndTs:faker.time.recent(), observedTraceEndTs: faker.time.recent(),
largestContentfulPaint:faker.datatype.number(2000), largestContentfulPaint: faker.datatype.number(2000),
observedTimeOrigin:faker.datatype.number(10), observedTimeOrigin: faker.datatype.number(10),
speedIndex:faker.datatype.number(1000), speedIndex: faker.datatype.number(1000),
observedTraceEnd:faker.datatype.number(2000), observedTraceEnd: faker.datatype.number(2000),
observedDomContentLoadedTs:faker.time.recent(), observedDomContentLoadedTs: faker.time.recent(),
observedFirstPaint:faker.datatype.number(500), observedFirstPaint: faker.datatype.number(500),
totalBlockingTime:faker.datatype.number(500), totalBlockingTime: faker.datatype.number(500),
observedLastVisualChangeTs:faker.time.recent(), observedLastVisualChangeTs: faker.time.recent(),
observedFirstVisualChange:faker.datatype.number(500), observedFirstVisualChange: faker.datatype.number(500),
observedLargestContentfulPaintTs:faker.time.recent(), observedLargestContentfulPaintTs: faker.time.recent(),
estimatedInputLatency:faker.datatype.number(100), estimatedInputLatency: faker.datatype.number(100),
observedLoadTs:faker.time.recent(), observedLoadTs: faker.time.recent(),
observedLastVisualChange:faker.datatype.number(1000), observedLastVisualChange: faker.datatype.number(1000),
firstCPUIdle:faker.datatype.number(1000), firstCPUIdle: faker.datatype.number(1000),
interactive:faker.datatype.number(1000), interactive: faker.datatype.number(1000),
observedNavigationStartTs:faker.time.recent(), observedNavigationStartTs: faker.time.recent(),
observedNavigationStart:faker.datatype.number(10), observedNavigationStart: faker.datatype.number(10),
observedFirstMeaningfulPaintTs:faker.time.recent(), observedFirstMeaningfulPaintTs: faker.time.recent(),
}, },
], ],
}, },
}, },
}, },
categories:{ categories: {
"best-practices":{ "best-practices": {
id:"best-practices", id: "best-practices",
title:"Best Practices", title: "Best Practices",
score:faker.datatype.float({max:1}), score: faker.datatype.float({ max: 1 }),
}, },
seo:{ seo: {
id:"seo", id: "seo",
title:"SEO", title: "SEO",
score:faker.datatype.float({max:1}), score: faker.datatype.float({ max: 1 }),
}, },
accessibility:{ accessibility: {
id:"accessibility", id: "accessibility",
title:"Accessibility", title: "Accessibility",
score:faker.datatype.float({max:1}), score: faker.datatype.float({ max: 1 }),
}, },
performance:{ performance: {
id:"performance", id: "performance",
title:"Performance", title: "Performance",
score:faker.datatype.float({max:1}), score: faker.datatype.float({ max: 1 }),
}, },
}, },
}, },
analysisUTCTimestamp:`${faker.date.recent()}`, analysisUTCTimestamp: `${faker.date.recent()}`,
}, },
}) })
} }

View File

@@ -1,24 +1,24 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Wakatime api //Wakatime api
if (/^https:..api.poopmap.net$/.test(url)) { if (/^https:..api.poopmap.net$/.test(url)) {
//Get user profile //Get user profile
if (/public_links\/MOCKED_TOKEN/.test(url)) { if (/public_links\/MOCKED_TOKEN/.test(url)) {
console.debug(`metrics/compute/mocks > mocking poopmap api result > ${url}`) console.debug(`metrics/compute/mocks > mocking poopmap api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
poops:new Array(12 + faker.datatype.number(6)).fill(null).map(_ => ({ poops: new Array(12 + faker.datatype.number(6)).fill(null).map(_ => ({
id:79744699, id: 79744699,
latitude:faker.address.latitude(), latitude: faker.address.latitude(),
longitude:faker.address.longitude(), longitude: faker.address.longitude(),
created_at:faker.date.past().toISOString(), created_at: faker.date.past().toISOString(),
note:"", note: "",
place:"", place: "",
rating:faker.datatype.number(5), rating: faker.datatype.number(5),
followers_count:faker.datatype.number(100), followers_count: faker.datatype.number(100),
comments_count:faker.datatype.number(12) comments_count: faker.datatype.number(12),
})) })),
}, },
}) })
} }

View File

@@ -1,5 +1,5 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Spotify api //Spotify api
if (/^https:..api.spotify.com.*$/.test(url)) { if (/^https:..api.spotify.com.*$/.test(url)) {
//Get recently played tracks //Get recently played tracks
@@ -8,140 +8,140 @@ export default function({faker, url, options, login = faker.internet.userName()}
const artist = faker.random.words() const artist = faker.random.words()
const track = faker.random.words(5) const track = faker.random.words(5)
return ({ return ({
status:200, status: 200,
data:{ data: {
items:[ items: [
{ {
track:{ track: {
album:{ album: {
album_type:"single", album_type: "single",
artists:[ artists: [
{ {
name:artist, name: artist,
type:"artist", type: "artist",
}, },
], ],
images:[ images: [
{ {
height:640, height: 640,
url:faker.image.abstract(), url: faker.image.abstract(),
width:640, width: 640,
}, },
{ {
height:300, height: 300,
url:faker.image.abstract(), url: faker.image.abstract(),
width:300, width: 300,
}, },
{ {
height:64, height: 64,
url:faker.image.abstract(), url: faker.image.abstract(),
width:64, width: 64,
}, },
], ],
name:track, name: track,
release_date:`${faker.date.past()}`.substring(0, 10), release_date: `${faker.date.past()}`.substring(0, 10),
type:"album", type: "album",
}, },
artists:[ artists: [
{ {
name:artist, name: artist,
type:"artist", type: "artist",
}, },
], ],
name:track, name: track,
preview_url:faker.internet.url(), preview_url: faker.internet.url(),
type:"track", type: "track",
}, },
played_at:`${faker.date.recent()}`, played_at: `${faker.date.recent()}`,
context:{ context: {
type:"album", type: "album",
}, },
}, },
], ],
}, },
}) })
} }
else if (/me.top.tracks/.test(url) && (options?.headers?.Authorization === "Bearer MOCKED_TOKEN_ACCESS")) { else if (/me.top.tracks/.test(url) && (options?.headers?.Authorization === "Bearer MOCKED_TOKEN_ACCESS")) {
console.debug(`metrics/compute/mocks > mocking spotify api result > ${url}`) console.debug(`metrics/compute/mocks > mocking spotify api result > ${url}`)
const artist = faker.random.words() const artist = faker.random.words()
const track = faker.random.words(5) const track = faker.random.words(5)
return ({ return ({
status:200, status: 200,
data:{ data: {
items:[ items: [
{ {
album:{ album: {
album_type:"single", album_type: "single",
artists:[ artists: [
{ {
name:artist, name: artist,
type:"artist", type: "artist",
}, },
], ],
images:[ images: [
{ {
height:640, height: 640,
url:faker.image.abstract(), url: faker.image.abstract(),
width:640, width: 640,
}, },
{ {
height:300, height: 300,
url:faker.image.abstract(), url: faker.image.abstract(),
width:300, width: 300,
}, },
{ {
height:64, height: 64,
url:faker.image.abstract(), url: faker.image.abstract(),
width:64, width: 64,
}, },
], ],
name:track, name: track,
release_date:`${faker.date.past()}`.substring(0, 10), release_date: `${faker.date.past()}`.substring(0, 10),
type:"album", type: "album",
}, },
artists:[ artists: [
{ {
name:artist, name: artist,
type:"artist", type: "artist",
}, },
], ],
name:track, name: track,
preview_url:faker.internet.url(), preview_url: faker.internet.url(),
type:"track", type: "track",
}, },
], ],
}, },
}) })
} }
else if (/me.top.artists/.test(url) && (options?.headers?.Authorization === "Bearer MOCKED_TOKEN_ACCESS")) { else if (/me.top.artists/.test(url) && (options?.headers?.Authorization === "Bearer MOCKED_TOKEN_ACCESS")) {
console.debug(`metrics/compute/mocks > mocking spotify api result > ${url}`) console.debug(`metrics/compute/mocks > mocking spotify api result > ${url}`)
const genre = faker.random.words() const genre = faker.random.words()
const track = faker.random.words(5) const track = faker.random.words(5)
return ({ return ({
status:200, status: 200,
data:{ data: {
items:[ items: [
{ {
genres: [genre], genres: [genre],
images:[ images: [
{ {
height:640, height: 640,
url:faker.image.abstract(), url: faker.image.abstract(),
width:640, width: 640,
}, },
{ {
height:300, height: 300,
url:faker.image.abstract(), url: faker.image.abstract(),
width:300, width: 300,
}, },
{ {
height:64, height: 64,
url:faker.image.abstract(), url: faker.image.abstract(),
width:64, width: 64,
}, },
], ],
name:track, name: track,
type:"artist", type: "artist",
}, },
], ],
}, },

View File

@@ -1,5 +1,5 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Stackoverflow api //Stackoverflow api
if (/^https:..api.stackexchange.com.2.2.*$/.test(url)) { if (/^https:..api.stackexchange.com.2.2.*$/.test(url)) {
//Extract user id //Extract user id
@@ -9,24 +9,24 @@ export default function({faker, url, options, login = faker.internet.userName()}
if (/users[/]\d+[/][?]site=stackoverflow$/.test(url)) { if (/users[/]\d+[/][?]site=stackoverflow$/.test(url)) {
console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`) console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
items:[ items: [
{ {
badge_counts:{bronze:faker.datatype.number(500), silver:faker.datatype.number(300), gold:faker.datatype.number(100)}, badge_counts: { bronze: faker.datatype.number(500), silver: faker.datatype.number(300), gold: faker.datatype.number(100) },
accept_rate:faker.datatype.number(100), accept_rate: faker.datatype.number(100),
answer_count:faker.datatype.number(1000), answer_count: faker.datatype.number(1000),
question_count:faker.datatype.number(1000), question_count: faker.datatype.number(1000),
view_count:faker.datatype.number(10000), view_count: faker.datatype.number(10000),
creation_date:faker.date.past(), creation_date: faker.date.past(),
display_name:faker.internet.userName(), display_name: faker.internet.userName(),
user_id, user_id,
reputation:faker.datatype.number(100000), reputation: faker.datatype.number(100000),
}, },
], ],
has_more:false, has_more: false,
quota_max:300, quota_max: 300,
quota_remaining:faker.datatype.number(300), quota_remaining: faker.datatype.number(300),
}, },
}) })
} }
@@ -34,9 +34,9 @@ export default function({faker, url, options, login = faker.internet.userName()}
if (/[?]site=stackoverflow&filter=total$/.test(url)) { if (/[?]site=stackoverflow&filter=total$/.test(url)) {
console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`) console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
total:faker.datatype.number(10000), total: faker.datatype.number(10000),
}, },
}) })
} }
@@ -44,29 +44,29 @@ export default function({faker, url, options, login = faker.internet.userName()}
if ((/questions[?]site=stackoverflow/.test(url)) || (/questions[/][\d;]+[?]site=stackoverflow/.test(url))) { if ((/questions[?]site=stackoverflow/.test(url)) || (/questions[/][\d;]+[?]site=stackoverflow/.test(url))) {
console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`) console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
items:new Array(pagesize).fill(null).map(_ => ({ items: new Array(pagesize).fill(null).map(_ => ({
tags:new Array(5).fill(null).map(_ => faker.lorem.slug()), tags: new Array(5).fill(null).map(_ => faker.lorem.slug()),
owner:{display_name:faker.internet.userName()}, owner: { display_name: faker.internet.userName() },
is_answered:faker.datatype.boolean(), is_answered: faker.datatype.boolean(),
view_count:faker.datatype.number(10000), view_count: faker.datatype.number(10000),
accepted_answer_id:faker.datatype.number(1000000), accepted_answer_id: faker.datatype.number(1000000),
answer_count:faker.datatype.number(100), answer_count: faker.datatype.number(100),
score:faker.datatype.number(1000), score: faker.datatype.number(1000),
creation_date:faker.time.recent(), creation_date: faker.time.recent(),
down_vote_count:faker.datatype.number(1000), down_vote_count: faker.datatype.number(1000),
up_vote_count:faker.datatype.number(1000), up_vote_count: faker.datatype.number(1000),
comment_count:faker.datatype.number(1000), comment_count: faker.datatype.number(1000),
favorite_count:faker.datatype.number(1000), favorite_count: faker.datatype.number(1000),
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
body_markdown:faker.lorem.paragraphs(), body_markdown: faker.lorem.paragraphs(),
link:faker.internet.url(), link: faker.internet.url(),
question_id:faker.datatype.number(1000000), question_id: faker.datatype.number(1000000),
})), })),
has_more:false, has_more: false,
quota_max:300, quota_max: 300,
quota_remaining:faker.datatype.number(300), quota_remaining: faker.datatype.number(300),
}, },
}) })
} }
@@ -74,24 +74,24 @@ export default function({faker, url, options, login = faker.internet.userName()}
if ((/answers[?]site=stackoverflow/.test(url)) || (/answers[/][\d;]+[?]site=stackoverflow/.test(url))) { if ((/answers[?]site=stackoverflow/.test(url)) || (/answers[/][\d;]+[?]site=stackoverflow/.test(url))) {
console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`) console.debug(`metrics/compute/mocks > mocking stackoverflow api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
items:new Array(pagesize).fill(null).map(_ => ({ items: new Array(pagesize).fill(null).map(_ => ({
owner:{display_name:faker.internet.userName()}, owner: { display_name: faker.internet.userName() },
link:faker.internet.url(), link: faker.internet.url(),
is_accepted:faker.datatype.boolean(), is_accepted: faker.datatype.boolean(),
score:faker.datatype.number(1000), score: faker.datatype.number(1000),
down_vote_count:faker.datatype.number(1000), down_vote_count: faker.datatype.number(1000),
up_vote_count:faker.datatype.number(1000), up_vote_count: faker.datatype.number(1000),
comment_count:faker.datatype.number(1000), comment_count: faker.datatype.number(1000),
creation_date:faker.time.recent(), creation_date: faker.time.recent(),
question_id:faker.datatype.number(1000000), question_id: faker.datatype.number(1000000),
body_markdown:faker.lorem.paragraphs(), body_markdown: faker.lorem.paragraphs(),
answer_id:faker.datatype.number(1000000), answer_id: faker.datatype.number(1000000),
})), })),
has_more:false, has_more: false,
quota_max:300, quota_max: 300,
quota_remaining:faker.datatype.number(300), quota_remaining: faker.datatype.number(300),
}, },
}) })
} }

View File

@@ -1,5 +1,5 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Twitter api //Twitter api
if (/^https:..api.twitter.com.*$/.test(url)) { if (/^https:..api.twitter.com.*$/.test(url)) {
//Get user profile //Get user profile
@@ -7,13 +7,13 @@ export default function({faker, url, options, login = faker.internet.userName()}
console.debug(`metrics/compute/mocks > mocking twitter api result > ${url}`) console.debug(`metrics/compute/mocks > mocking twitter api result > ${url}`)
const username = url.match(/username[/](?<username>.*?)[?]/)?.groups?.username ?? faker.internet.userName() const username = url.match(/username[/](?<username>.*?)[?]/)?.groups?.username ?? faker.internet.userName()
return ({ return ({
status:200, status: 200,
data:{ data: {
data:{ data: {
profile_image_url:faker.image.people(), profile_image_url: faker.image.people(),
name:faker.name.findName(), name: faker.name.findName(),
verified:faker.datatype.boolean(), verified: faker.datatype.boolean(),
id:faker.datatype.number(1000000).toString(), id: faker.datatype.number(1000000).toString(),
username, username,
}, },
}, },
@@ -23,39 +23,39 @@ export default function({faker, url, options, login = faker.internet.userName()}
if ((/tweets.search.recent/.test(url)) && (options?.headers?.Authorization === "Bearer MOCKED_TOKEN")) { if ((/tweets.search.recent/.test(url)) && (options?.headers?.Authorization === "Bearer MOCKED_TOKEN")) {
console.debug(`metrics/compute/mocks > mocking twitter api result > ${url}`) console.debug(`metrics/compute/mocks > mocking twitter api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
data:[ data: [
{ {
id:faker.datatype.number(100000000000000).toString(), id: faker.datatype.number(100000000000000).toString(),
created_at:`${faker.date.recent()}`, created_at: `${faker.date.recent()}`,
entities:{ entities: {
mentions:[ mentions: [
{start:22, end:33, username:"lowlighter"}, { start: 22, end: 33, username: "lowlighter" },
], ],
}, },
text:"Checkout metrics from @lowlighter ! #GitHub", text: "Checkout metrics from @lowlighter ! #GitHub",
}, },
{ {
id:faker.datatype.number(100000000000000).toString(), id: faker.datatype.number(100000000000000).toString(),
created_at:`${faker.date.recent()}`, created_at: `${faker.date.recent()}`,
text:faker.lorem.paragraph(), text: faker.lorem.paragraph(),
}, },
], ],
includes:{ includes: {
users:[ users: [
{ {
id:faker.datatype.number(100000000000000).toString(), id: faker.datatype.number(100000000000000).toString(),
name:"lowlighter", name: "lowlighter",
username:"lowlighter", username: "lowlighter",
}, },
], ],
}, },
meta:{ meta: {
newest_id:faker.datatype.number(100000000000000).toString(), newest_id: faker.datatype.number(100000000000000).toString(),
oldest_id:faker.datatype.number(100000000000000).toString(), oldest_id: faker.datatype.number(100000000000000).toString(),
result_count:2, result_count: 2,
next_token:"MOCKED_CURSOR", next_token: "MOCKED_CURSOR",
}, },
}, },
}) })

View File

@@ -1,5 +1,5 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Wakatime api //Wakatime api
if (/^https:..wakatime.com.api.v1.users..*.stats.*$/.test(url)) { if (/^https:..wakatime.com.api.v1.users..*.stats.*$/.test(url)) {
//Get user profile //Get user profile
@@ -11,13 +11,13 @@ export default function({faker, url, options, login = faker.internet.userName()}
get digital() { get digital() {
return `${this.hours}:${this.minutes}` return `${this.hours}:${this.minutes}`
}, },
hours:faker.datatype.number(1000), hours: faker.datatype.number(1000),
minutes:faker.datatype.number(1000), minutes: faker.datatype.number(1000),
name:array ? faker.random.arrayElement(array) : faker.random.words(2).replace(/ /g, "-").toLocaleLowerCase(), name: array ? faker.random.arrayElement(array) : faker.random.words(2).replace(/ /g, "-").toLocaleLowerCase(),
percent:0, percent: 0,
total_seconds:faker.datatype.number(1000000), total_seconds: faker.datatype.number(1000000),
})) }))
results = results.filter(({name}) => elements.includes(name) ? false : (elements.push(name), true)) results = results.filter(({ name }) => elements.includes(name) ? false : (elements.push(name), true))
let percents = 100 let percents = 100
for (const result of results) { for (const result of results) {
result.percent = 1 + faker.datatype.number(percents - 1) result.percent = 1 + faker.datatype.number(percents - 1)
@@ -26,26 +26,26 @@ export default function({faker, url, options, login = faker.internet.userName()}
return results return results
} }
return ({ return ({
status:200, status: 200,
data:{ data: {
data:{ data: {
best_day:{ best_day: {
created_at:faker.date.recent(), created_at: faker.date.recent(),
date:`${faker.date.recent()}`.substring(0, 10), date: `${faker.date.recent()}`.substring(0, 10),
total_seconds:faker.datatype.number(1000000), total_seconds: faker.datatype.number(1000000),
}, },
categories:stats(), categories: stats(),
daily_average:faker.datatype.number(12 * 60 * 60), daily_average: faker.datatype.number(12 * 60 * 60),
daily_average_including_other_language:faker.datatype.number(12 * 60 * 60), daily_average_including_other_language: faker.datatype.number(12 * 60 * 60),
dependencies:stats(), dependencies: stats(),
editors:stats(["VS Code", "Chrome", "IntelliJ", "PhpStorm", "WebStorm", "Android Studio", "Visual Studio", "Sublime Text", "PyCharm", "Vim", "Atom", "Xcode"]), editors: stats(["VS Code", "Chrome", "IntelliJ", "PhpStorm", "WebStorm", "Android Studio", "Visual Studio", "Sublime Text", "PyCharm", "Vim", "Atom", "Xcode"]),
languages:stats(["JavaScript", "TypeScript", "PHP", "Java", "Python", "Vue.js", "HTML", "C#", "JSON", "Dart", "SCSS", "Kotlin", "JSX", "Go", "Ruby", "YAML"]), languages: stats(["JavaScript", "TypeScript", "PHP", "Java", "Python", "Vue.js", "HTML", "C#", "JSON", "Dart", "SCSS", "Kotlin", "JSX", "Go", "Ruby", "YAML"]),
machines:stats(), machines: stats(),
operating_systems:stats(["Mac", "Windows", "Linux"]), operating_systems: stats(["Mac", "Windows", "Linux"]),
project:null, project: null,
projects:/api_key=MOCKED_TOKEN_NO_PROJECTS/.test(url) ? null : stats(), projects: /api_key=MOCKED_TOKEN_NO_PROJECTS/.test(url) ? null : stats(),
total_seconds:faker.datatype.number(1000000000), total_seconds: faker.datatype.number(1000000000),
total_seconds_including_other_language:faker.datatype.number(1000000000), total_seconds_including_other_language: faker.datatype.number(1000000000),
}, },
}, },
}) })

View File

@@ -1,32 +1,32 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
//Wakatime api //Wakatime api
if (/^https:..apidojo-yahoo-finance-v1.p.rapidapi.com.stock.v2.*$/.test(url)) { if (/^https:..apidojo-yahoo-finance-v1.p.rapidapi.com.stock.v2.*$/.test(url)) {
//Get company profile //Get company profile
if (/get-profile/.test(url)) { if (/get-profile/.test(url)) {
console.debug(`metrics/compute/mocks > mocking yahoo finance api result > ${url}`) console.debug(`metrics/compute/mocks > mocking yahoo finance api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
price:{ price: {
marketCap:{ marketCap: {
raw:faker.datatype.number(1000000000), raw: faker.datatype.number(1000000000),
}, },
symbol:"OCTO", symbol: "OCTO",
}, },
quoteType:{ quoteType: {
shortName:faker.company.companyName(), shortName: faker.company.companyName(),
longName:faker.company.companyName(), longName: faker.company.companyName(),
exchangeTimezoneName:faker.address.timeZone(), exchangeTimezoneName: faker.address.timeZone(),
symbol:"OCTO", symbol: "OCTO",
}, },
calendarEvents:{}, calendarEvents: {},
summaryDetail:{}, summaryDetail: {},
symbol:"OCTO", symbol: "OCTO",
assetProfile:{ assetProfile: {
fullTimeEmployees:faker.datatype.number(10000), fullTimeEmployees: faker.datatype.number(10000),
city:faker.address.city(), city: faker.address.city(),
country:faker.address.country(), country: faker.address.country(),
}, },
}, },
}) })
@@ -35,23 +35,23 @@ export default function({faker, url, options, login = faker.internet.userName()}
if (/get-chart/.test(url)) { if (/get-chart/.test(url)) {
console.debug(`metrics/compute/mocks > mocking yahoo finance api result > ${url}`) console.debug(`metrics/compute/mocks > mocking yahoo finance api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
chart:{ chart: {
result:[ result: [
{ {
meta:{ meta: {
currency:"USD", currency: "USD",
symbol:"OCTO", symbol: "OCTO",
regularMarketPrice:faker.datatype.number(10000) / 100, regularMarketPrice: faker.datatype.number(10000) / 100,
chartPreviousClose:faker.datatype.number(10000) / 100, chartPreviousClose: faker.datatype.number(10000) / 100,
previousClose:faker.datatype.number(10000) / 100, previousClose: faker.datatype.number(10000) / 100,
}, },
timestamp:new Array(1000).fill(Date.now()).map((x, i) => x + i * 60000), timestamp: new Array(1000).fill(Date.now()).map((x, i) => x + i * 60000),
indicators:{ indicators: {
quote:[ quote: [
{ {
close:new Array(1000).fill(null).map(_ => faker.datatype.number(10000) / 100), close: new Array(1000).fill(null).map(_ => faker.datatype.number(10000) / 100),
get low() { get low() {
return this.close return this.close
}, },
@@ -61,7 +61,7 @@ export default function({faker, url, options, login = faker.internet.userName()}
get open() { get open() {
return this.close return this.close
}, },
volume:[], volume: [],
}, },
], ],
}, },

View File

@@ -1,45 +1,45 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, body, login = faker.internet.userName()}) { export default function({ faker, url, body, login = faker.internet.userName() }) {
if (/^https:..graphql.anilist.co.*$/.test(url)) { if (/^https:..graphql.anilist.co.*$/.test(url)) {
//Initialization and media generator //Initialization and media generator
const {query} = body const { query } = body
const media = ({type}) => ({ const media = ({ type }) => ({
title:{romaji:faker.lorem.words(), english:faker.lorem.words(), native:faker.lorem.words()}, title: { romaji: faker.lorem.words(), english: faker.lorem.words(), native: faker.lorem.words() },
description:faker.lorem.paragraphs(), description: faker.lorem.paragraphs(),
type, type,
status:faker.random.arrayElement(["FINISHED", "RELEASING", "NOT_YET_RELEASED", "CANCELLED", "HIATUS"]), status: faker.random.arrayElement(["FINISHED", "RELEASING", "NOT_YET_RELEASED", "CANCELLED", "HIATUS"]),
episodes:100 + faker.datatype.number(100), episodes: 100 + faker.datatype.number(100),
volumes:faker.datatype.number(100), volumes: faker.datatype.number(100),
chapters:100 + faker.datatype.number(1000), chapters: 100 + faker.datatype.number(1000),
averageScore:faker.datatype.number(100), averageScore: faker.datatype.number(100),
countryOfOrigin:"JP", countryOfOrigin: "JP",
genres:new Array(6).fill(null).map(_ => faker.lorem.word()), genres: new Array(6).fill(null).map(_ => faker.lorem.word()),
coverImage:{medium:null}, coverImage: { medium: null },
startDate:{year:faker.date.past(20).getFullYear()}, startDate: { year: faker.date.past(20).getFullYear() },
}) })
//User statistics query //User statistics query
if (/^query Statistics /.test(query)) { if (/^query Statistics /.test(query)) {
console.debug("metrics/compute/mocks > mocking anilist api result > Statistics") console.debug("metrics/compute/mocks > mocking anilist api result > Statistics")
return ({ return ({
status:200, status: 200,
data:{ data: {
data:{ data: {
User:{ User: {
id:faker.datatype.number(100000), id: faker.datatype.number(100000),
name:faker.internet.userName(), name: faker.internet.userName(),
about:null, about: null,
statistics:{ statistics: {
anime:{ anime: {
count:faker.datatype.number(1000), count: faker.datatype.number(1000),
minutesWatched:faker.datatype.number(100000), minutesWatched: faker.datatype.number(100000),
episodesWatched:faker.datatype.number(10000), episodesWatched: faker.datatype.number(10000),
genres:new Array(4).fill(null).map(_ => ({genre:faker.lorem.word()})), genres: new Array(4).fill(null).map(_ => ({ genre: faker.lorem.word() })),
}, },
manga:{ manga: {
count:faker.datatype.number(1000), count: faker.datatype.number(1000),
chaptersRead:faker.datatype.number(100000), chaptersRead: faker.datatype.number(100000),
volumesRead:faker.datatype.number(10000), volumesRead: faker.datatype.number(10000),
genres:new Array(4).fill(null).map(_ => ({genre:faker.lorem.word()})), genres: new Array(4).fill(null).map(_ => ({ genre: faker.lorem.word() })),
}, },
}, },
}, },
@@ -51,17 +51,17 @@ export default function({faker, url, body, login = faker.internet.userName()}) {
if (/^query FavoritesCharacters /.test(query)) { if (/^query FavoritesCharacters /.test(query)) {
console.debug("metrics/compute/mocks > mocking anilist api result > Favorites characters") console.debug("metrics/compute/mocks > mocking anilist api result > Favorites characters")
return ({ return ({
status:200, status: 200,
data:{ data: {
data:{ data: {
User:{ User: {
favourites:{ favourites: {
characters:{ characters: {
nodes:new Array(2 + faker.datatype.number(16)).fill(null).map(_ => ({ nodes: new Array(2 + faker.datatype.number(16)).fill(null).map(_ => ({
name:{full:faker.name.findName(), native:faker.name.findName()}, name: { full: faker.name.findName(), native: faker.name.findName() },
image:{medium:null}, image: { medium: null },
})), })),
pageInfo:{currentPage:1, hasNextPage:false}, pageInfo: { currentPage: 1, hasNextPage: false },
}, },
}, },
}, },
@@ -74,14 +74,14 @@ export default function({faker, url, body, login = faker.internet.userName()}) {
console.debug("metrics/compute/mocks > mocking anilist api result > Favorites") console.debug("metrics/compute/mocks > mocking anilist api result > Favorites")
const type = /anime[(]/.test(query) ? "ANIME" : /manga[(]/.test(query) ? "MANGA" : "OTHER" const type = /anime[(]/.test(query) ? "ANIME" : /manga[(]/.test(query) ? "MANGA" : "OTHER"
return ({ return ({
status:200, status: 200,
data:{ data: {
data:{ data: {
User:{ User: {
favourites:{ favourites: {
[type.toLocaleLowerCase()]:{ [type.toLocaleLowerCase()]: {
nodes:new Array(16).fill(null).map(_ => media({type})), nodes: new Array(16).fill(null).map(_ => media({ type })),
pageInfo:{currentPage:1, hasNextPage:false}, pageInfo: { currentPage: 1, hasNextPage: false },
}, },
}, },
}, },
@@ -92,24 +92,24 @@ export default function({faker, url, body, login = faker.internet.userName()}) {
//Medias query //Medias query
if (/^query Medias /.test(query)) { if (/^query Medias /.test(query)) {
console.debug("metrics/compute/mocks > mocking anilist api result > Medias") console.debug("metrics/compute/mocks > mocking anilist api result > Medias")
const {type} = body.variables const { type } = body.variables
return ({ return ({
status:200, status: 200,
data:{ data: {
data:{ data: {
MediaListCollection:{ MediaListCollection: {
lists:[ lists: [
{ {
name:{ANIME:"Watching", MANGA:"Reading", OTHER:"Completed"}[type], name: { ANIME: "Watching", MANGA: "Reading", OTHER: "Completed" }[type],
isCustomList:false, isCustomList: false,
entries:new Array(16).fill(null).map(_ => ({ entries: new Array(16).fill(null).map(_ => ({
status:faker.random.arrayElement(["CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING"]), status: faker.random.arrayElement(["CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING"]),
progress:faker.datatype.number(100), progress: faker.datatype.number(100),
progressVolumes:null, progressVolumes: null,
score:0, score: 0,
startedAt:{year:null, month:null, day:null}, startedAt: { year: null, month: null, day: null },
completedAt:{year:null, month:null, day:null}, completedAt: { year: null, month: null, day: null },
media:media({type}), media: media({ type }),
})), })),
}, },
], ],

View File

@@ -1,18 +1,18 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, body, login = faker.internet.userName()}) { export default function({ faker, url, body, login = faker.internet.userName() }) {
if (/^https:..api.hashnode.com.*$/.test(url)) { if (/^https:..api.hashnode.com.*$/.test(url)) {
console.debug(`metrics/compute/mocks > mocking hashnode result > ${url}`) console.debug(`metrics/compute/mocks > mocking hashnode result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
data:{ data: {
user:{ user: {
publication:{ publication: {
posts:new Array(30).fill(null).map(_ => ({ posts: new Array(30).fill(null).map(_ => ({
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
brief:faker.lorem.paragraph(), brief: faker.lorem.paragraph(),
coverImage:null, coverImage: null,
dateAdded:faker.date.recent(), dateAdded: faker.date.recent(),
})), })),
}, },
}, },

View File

@@ -2,19 +2,19 @@
import urls from "url" import urls from "url"
/**Mocked data */ /**Mocked data */
export default function({faker, url, body, login = faker.internet.userName()}) { export default function({ faker, url, body, login = faker.internet.userName() }) {
if (/^https:..accounts.spotify.com.api.token.*$/.test(url)) { if (/^https:..accounts.spotify.com.api.token.*$/.test(url)) {
//Access token generator //Access token generator
const params = new urls.URLSearchParams(body) const params = new urls.URLSearchParams(body)
if ((params.get("grant_type") === "refresh_token") && (params.get("client_id") === "MOCKED_CLIENT_ID") && (params.get("client_secret") === "MOCKED_CLIENT_SECRET") && (params.get("refresh_token") === "MOCKED_REFRESH_TOKEN")) { if ((params.get("grant_type") === "refresh_token") && (params.get("client_id") === "MOCKED_CLIENT_ID") && (params.get("client_secret") === "MOCKED_CLIENT_SECRET") && (params.get("refresh_token") === "MOCKED_REFRESH_TOKEN")) {
console.debug(`metrics/compute/mocks > mocking spotify api result > ${url}`) console.debug(`metrics/compute/mocks > mocking spotify api result > ${url}`)
return ({ return ({
status:200, status: 200,
data:{ data: {
access_token:"MOCKED_TOKEN_ACCESS", access_token: "MOCKED_TOKEN_ACCESS",
token_type:"Bearer", token_type: "Bearer",
expires_in:3600, expires_in: 3600,
scope:"user-read-recently-played user-read-private", scope: "user-read-recently-played user-read-private",
}, },
}) })
} }

View File

@@ -1,59 +1,58 @@
/**Mocked data */ /**Mocked data */
export default function({faker, url, options, login = faker.internet.userName()}) { export default function({ faker, url, options, login = faker.internet.userName() }) {
if (/^https:..music.youtube.com.youtubei.v1.*$/.test(url)) { if (/^https:..music.youtube.com.youtubei.v1.*$/.test(url)) {
//Get recently played tracks //Get recently played tracks
if (/browse/.test(url)) { if (/browse/.test(url)) {
console.debug(`metrics/compute/mocks > mocking yt music api result > ${url}`) console.debug(`metrics/compute/mocks > mocking yt music api result > ${url}`)
const artist = faker.random.word() const artist = faker.random.word()
const track = faker.random.words(5) const track = faker.random.words(5)
const artwork = faker.image.imageUrl() const artwork = faker.image.imageUrl()
return ({ return ({
contents:{ contents: {
singleColumnBrowseResultsRenderer:{ singleColumnBrowseResultsRenderer: {
tabs:[{ tabs: [{
tabRenderer:{ tabRenderer: {
content:{ content: {
sectionListRenderer:{ sectionListRenderer: {
contents:[{ contents: [{
contents:[{ contents: [{
musicResponsiveListItemRenderer:{ musicResponsiveListItemRenderer: {
thumbnail:{ thumbnail: {
musicThumbnailRenderer:{ musicThumbnailRenderer: {
thumbnail:{ thumbnail: {
thumbnails:[{ thumbnails: [{
url:artwork, url: artwork,
}] }],
},
}
},
flexColumns:[{
musicResponsiveListItemFlexColumnRenderer:{
text:{
runs:[{
text:track,
}]
},
}
},
{
musicResponsiveListItemFlexColumnRenderer:{
text:{
runs:[{
text:artist,
}]
},
}
}],
}
}],
}],
},
}, },
},
}, },
flexColumns: [{
musicResponsiveListItemFlexColumnRenderer: {
text: {
runs: [{
text: track,
}],
},
},
}, {
musicResponsiveListItemFlexColumnRenderer: {
text: {
runs: [{
text: artist,
}],
},
},
}],
},
}], }],
}],
}, },
},
}, },
}) }],
} },
},
})
}
} }
} }

View File

@@ -1,70 +1,70 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > achievements/metrics") console.debug("metrics/compute/mocks > mocking graphql api result > achievements/metrics")
return ({ return ({
user:{ user: {
repositories:{ repositories: {
nodes:[ nodes: [
{ {
createdAt:faker.date.recent(), createdAt: faker.date.recent(),
nameWithOwner:`${faker.internet.userName()}/${faker.lorem.slug()}`, nameWithOwner: `${faker.internet.userName()}/${faker.lorem.slug()}`,
}, },
], ],
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
forks:{ forks: {
nodes:[ nodes: [
{ {
createdAt:faker.date.recent(), createdAt: faker.date.recent(),
nameWithOwner:`${faker.internet.userName()}/${faker.lorem.slug()}`, nameWithOwner: `${faker.internet.userName()}/${faker.lorem.slug()}`,
}, },
], ],
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
popular:{ popular: {
nodes:[{stargazers:{totalCount:faker.datatype.number(50000)}}], nodes: [{ stargazers: { totalCount: faker.datatype.number(50000) } }],
}, },
pullRequests:{ pullRequests: {
nodes:[ nodes: [
{ {
createdAt:faker.date.recent(), createdAt: faker.date.recent(),
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
repository:{nameWithOwner:`${faker.internet.userName()}/${faker.lorem.slug()}`}, repository: { nameWithOwner: `${faker.internet.userName()}/${faker.lorem.slug()}` },
}, },
], ],
totalCount:faker.datatype.number(50000), totalCount: faker.datatype.number(50000),
}, },
contributionsCollection:{ contributionsCollection: {
pullRequestReviewContributions:{ pullRequestReviewContributions: {
nodes:[ nodes: [
{ {
occurredAt:faker.date.recent(), occurredAt: faker.date.recent(),
pullRequest:{ pullRequest: {
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
number:faker.datatype.number(1000), number: faker.datatype.number(1000),
repository:{nameWithOwner:`${faker.internet.userName()}/${faker.lorem.slug()}`}, repository: { nameWithOwner: `${faker.internet.userName()}/${faker.lorem.slug()}` },
}, },
}, },
], ],
totalCount:faker.datatype.number(1000), totalCount: faker.datatype.number(1000),
}, },
}, },
projects:{totalCount:faker.datatype.number(100)}, projects: { totalCount: faker.datatype.number(100) },
packages:{totalCount:faker.datatype.number(100)}, packages: { totalCount: faker.datatype.number(100) },
organizations:{nodes:[], totalCount:faker.datatype.number(5)}, organizations: { nodes: [], totalCount: faker.datatype.number(5) },
gists:{ gists: {
nodes:[{createdAt:faker.date.recent(), name:faker.lorem.slug()}], nodes: [{ createdAt: faker.date.recent(), name: faker.lorem.slug() }],
totalCount:faker.datatype.number(1000), totalCount: faker.datatype.number(1000),
}, },
starredRepositories:{totalCount:faker.datatype.number(1000)}, starredRepositories: { totalCount: faker.datatype.number(1000) },
followers:{totalCount:faker.datatype.number(10000)}, followers: { totalCount: faker.datatype.number(10000) },
following:{totalCount:faker.datatype.number(10000)}, following: { totalCount: faker.datatype.number(10000) },
bio:faker.lorem.sentence(), bio: faker.lorem.sentence(),
status:{message:faker.lorem.paragraph()}, status: { message: faker.lorem.paragraph() },
sponsorshipsAsSponsor:{totalCount:faker.datatype.number(100)}, sponsorshipsAsSponsor: { totalCount: faker.datatype.number(100) },
discussionsStarted:{totalCount:faker.datatype.number(1000)}, discussionsStarted: { totalCount: faker.datatype.number(1000) },
discussionsComments:{totalCount:faker.datatype.number(1000)}, discussionsComments: { totalCount: faker.datatype.number(1000) },
discussionAnswers:{totalCount:faker.datatype.number(1000)}, discussionAnswers: { totalCount: faker.datatype.number(1000) },
}, },
}) })
} }

View File

@@ -1,8 +1,8 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > achievements/metrics") console.debug("metrics/compute/mocks > mocking graphql api result > achievements/metrics")
return ({ return ({
repository:{viewerHasStarred:faker.datatype.boolean()}, repository: { viewerHasStarred: faker.datatype.boolean() },
viewer:{login}, viewer: { login },
}) })
} }

View File

@@ -1,8 +1,8 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > achievements/octocat") console.debug("metrics/compute/mocks > mocking graphql api result > achievements/octocat")
return ({ return ({
user:{viewerIsFollowing:faker.datatype.boolean()}, user: { viewerIsFollowing: faker.datatype.boolean() },
viewer:{login}, viewer: { login },
}) })
} }

View File

@@ -1,33 +1,33 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > achievements/organizations") console.debug("metrics/compute/mocks > mocking graphql api result > achievements/organizations")
return ({ return ({
organization:{ organization: {
repositories:{ repositories: {
nodes:[ nodes: [
{ {
createdAt:faker.date.recent(), createdAt: faker.date.recent(),
nameWithOwner:`${faker.internet.userName()}/${faker.lorem.slug()}`, nameWithOwner: `${faker.internet.userName()}/${faker.lorem.slug()}`,
}, },
], ],
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
forks:{ forks: {
nodes:[ nodes: [
{ {
createdAt:faker.date.recent(), createdAt: faker.date.recent(),
nameWithOwner:`${faker.internet.userName()}/${faker.lorem.slug()}`, nameWithOwner: `${faker.internet.userName()}/${faker.lorem.slug()}`,
}, },
], ],
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
popular:{ popular: {
nodes:[{stargazers:{totalCount:faker.datatype.number(50000)}}], nodes: [{ stargazers: { totalCount: faker.datatype.number(50000) } }],
}, },
projects:{totalCount:faker.datatype.number(100)}, projects: { totalCount: faker.datatype.number(100) },
packages:{totalCount:faker.datatype.number(100)}, packages: { totalCount: faker.datatype.number(100) },
membersWithRole:{totalCount:faker.datatype.number(100)}, membersWithRole: { totalCount: faker.datatype.number(100) },
sponsorshipsAsSponsor:{totalCount:faker.datatype.number(100)}, sponsorshipsAsSponsor: { totalCount: faker.datatype.number(100) },
}, },
}) })
} }

View File

@@ -1,12 +1,12 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > achievements/ranking") console.debug("metrics/compute/mocks > mocking graphql api result > achievements/ranking")
return ({ return ({
repo_rank:{repositoryCount:faker.datatype.number(100000)}, repo_rank: { repositoryCount: faker.datatype.number(100000) },
forks_rank:{repositoryCount:faker.datatype.number(100000)}, forks_rank: { repositoryCount: faker.datatype.number(100000) },
created_rank:{userCount:faker.datatype.number(100000)}, created_rank: { userCount: faker.datatype.number(100000) },
user_rank:{userCount:faker.datatype.number(100000)}, user_rank: { userCount: faker.datatype.number(100000) },
repo_total:{repositoryCount:faker.datatype.number(100000)}, repo_total: { repositoryCount: faker.datatype.number(100000) },
user_total:{userCount:faker.datatype.number(100000)}, user_total: { userCount: faker.datatype.number(100000) },
}) })
} }

View File

@@ -1,36 +1,36 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > base/user") console.debug("metrics/compute/mocks > mocking graphql api result > base/user")
return ({ return ({
user:{ user: {
calendar:{ calendar: {
contributionCalendar:{ contributionCalendar: {
weeks:[ weeks: [
{ {
contributionDays:[ contributionDays: [
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
], ],
}, },
{ {
contributionDays:[ contributionDays: [
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
], ],
}, },
{ {
contributionDays:[ contributionDays: [
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
{color:faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"])}, { color: faker.random.arrayElement(["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"]) },
], ],
}, },
], ],

View File

@@ -1,15 +1,15 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > base/user") console.debug("metrics/compute/mocks > mocking graphql api result > base/user")
return ({ return ({
user:{ user: {
contributionsCollection:{ contributionsCollection: {
totalRepositoriesWithContributedCommits:faker.datatype.number(100), totalRepositoriesWithContributedCommits: faker.datatype.number(100),
totalCommitContributions:faker.datatype.number(10000), totalCommitContributions: faker.datatype.number(10000),
restrictedContributionsCount:faker.datatype.number(10000), restrictedContributionsCount: faker.datatype.number(10000),
totalIssueContributions:faker.datatype.number(100), totalIssueContributions: faker.datatype.number(100),
totalPullRequestContributions:faker.datatype.number(1000), totalPullRequestContributions: faker.datatype.number(1000),
totalPullRequestReviewContributions:faker.datatype.number(1000), totalPullRequestReviewContributions: faker.datatype.number(1000),
}, },
}, },
}) })

View File

@@ -1,18 +1,18 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > base/user") console.debug("metrics/compute/mocks > mocking graphql api result > base/user")
return ({ return ({
user:{ user: {
packages:{totalCount:faker.datatype.number(10)}, packages: { totalCount: faker.datatype.number(10) },
starredRepositories:{totalCount:faker.datatype.number(1000)}, starredRepositories: { totalCount: faker.datatype.number(1000) },
watching:{totalCount:faker.datatype.number(100)}, watching: { totalCount: faker.datatype.number(100) },
sponsorshipsAsSponsor:{totalCount:faker.datatype.number(10)}, sponsorshipsAsSponsor: { totalCount: faker.datatype.number(10) },
sponsorshipsAsMaintainer:{totalCount:faker.datatype.number(10)}, sponsorshipsAsMaintainer: { totalCount: faker.datatype.number(10) },
repositoriesContributedTo:{totalCount:faker.datatype.number(100)}, repositoriesContributedTo: { totalCount: faker.datatype.number(100) },
followers:{totalCount:faker.datatype.number(1000)}, followers: { totalCount: faker.datatype.number(1000) },
following:{totalCount:faker.datatype.number(1000)}, following: { totalCount: faker.datatype.number(1000) },
issueComments:{totalCount:faker.datatype.number(1000)}, issueComments: { totalCount: faker.datatype.number(1000) },
organizations:{totalCount:faker.datatype.number(10)}, organizations: { totalCount: faker.datatype.number(10) },
}, },
}) })
} }

View File

@@ -1,9 +1,9 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > base/user") console.debug("metrics/compute/mocks > mocking graphql api result > base/user")
return ({ return ({
user:{ user: {
repositories:{totalCount:faker.datatype.number(100), totalDiskUsage:faker.datatype.number(100000)}, repositories: { totalCount: faker.datatype.number(100), totalDiskUsage: faker.datatype.number(100000) },
}, },
}) })
} }

View File

@@ -1,56 +1,56 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > base/repositories") console.debug("metrics/compute/mocks > mocking graphql api result > base/repositories")
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
get repositoriesContributedTo() { get repositoriesContributedTo() {
return this.repositories return this.repositories
}, },
repositories:{ repositories: {
edges:[], edges: [],
nodes:[], nodes: [],
}, },
}, },
}) })
: ({ : ({
user:{ user: {
get repositoriesContributedTo() { get repositoriesContributedTo() {
return this.repositories return this.repositories
}, },
repositories:{ repositories: {
edges:[ edges: [
{ {
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
}, },
], ],
nodes:[ nodes: [
{ {
name:faker.random.words(), name: faker.random.words(),
watchers:{totalCount:faker.datatype.number(1000)}, watchers: { totalCount: faker.datatype.number(1000) },
stargazers:{totalCount:faker.datatype.number(10000)}, stargazers: { totalCount: faker.datatype.number(10000) },
owner:{login}, owner: { login },
languages:{ languages: {
edges:[ edges: [
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
], ],
}, },
issues_open:{totalCount:faker.datatype.number(100)}, issues_open: { totalCount: faker.datatype.number(100) },
issues_closed:{totalCount:faker.datatype.number(100)}, issues_closed: { totalCount: faker.datatype.number(100) },
pr_open:{totalCount:faker.datatype.number(100)}, pr_open: { totalCount: faker.datatype.number(100) },
pr_closed:{totalCount:faker.datatype.number(100)}, pr_closed: { totalCount: faker.datatype.number(100) },
pr_merged:{totalCount:faker.datatype.number(100)}, pr_merged: { totalCount: faker.datatype.number(100) },
releases:{totalCount:faker.datatype.number(100)}, releases: { totalCount: faker.datatype.number(100) },
forkCount:faker.datatype.number(100), forkCount: faker.datatype.number(100),
licenseInfo:{spdxId:"MIT"}, licenseInfo: { spdxId: "MIT" },
deployments:{totalCount:faker.datatype.number(100)}, deployments: { totalCount: faker.datatype.number(100) },
environments:{totalCount:faker.datatype.number(100)}, environments: { totalCount: faker.datatype.number(100) },
}, },
], ],
}, },

View File

@@ -1,37 +1,37 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > base/repository") console.debug("metrics/compute/mocks > mocking graphql api result > base/repository")
return ({ return ({
user:{ user: {
repository:{ repository: {
name:"metrics", name: "metrics",
owner:{login}, owner: { login },
createdAt:new Date().toISOString(), createdAt: new Date().toISOString(),
diskUsage:Math.floor(Math.random() * 10000), diskUsage: Math.floor(Math.random() * 10000),
homepageUrl:faker.internet.url(), homepageUrl: faker.internet.url(),
watchers:{totalCount:faker.datatype.number(1000)}, watchers: { totalCount: faker.datatype.number(1000) },
stargazers:{totalCount:faker.datatype.number(10000)}, stargazers: { totalCount: faker.datatype.number(10000) },
languages:{ languages: {
edges:[ edges: [
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
{size:faker.datatype.number(100000), node:{color:faker.internet.color(), name:faker.lorem.word()}}, { size: faker.datatype.number(100000), node: { color: faker.internet.color(), name: faker.lorem.word() } },
], ],
}, },
issues_open:{totalCount:faker.datatype.number(100)}, issues_open: { totalCount: faker.datatype.number(100) },
issues_closed:{totalCount:faker.datatype.number(100)}, issues_closed: { totalCount: faker.datatype.number(100) },
pr_open:{totalCount:faker.datatype.number(100)}, pr_open: { totalCount: faker.datatype.number(100) },
pr_closed:{totalCount:faker.datatype.number(100)}, pr_closed: { totalCount: faker.datatype.number(100) },
pr_merged:{totalCount:faker.datatype.number(100)}, pr_merged: { totalCount: faker.datatype.number(100) },
releases:{totalCount:faker.datatype.number(100)}, releases: { totalCount: faker.datatype.number(100) },
forkCount:faker.datatype.number(100), forkCount: faker.datatype.number(100),
licenseInfo:{spdxId:"MIT"}, licenseInfo: { spdxId: "MIT" },
deployments:{totalCount:faker.datatype.number(100)}, deployments: { totalCount: faker.datatype.number(100) },
environments:{totalCount:faker.datatype.number(100)}, environments: { totalCount: faker.datatype.number(100) },
}, },
}, },
}) })

View File

@@ -1,16 +1,16 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > base/user") console.debug("metrics/compute/mocks > mocking graphql api result > base/user")
return ({ return ({
user:{ user: {
databaseId:faker.datatype.number(10000000), databaseId: faker.datatype.number(10000000),
name:faker.name.findName(), name: faker.name.findName(),
login, login,
createdAt:`${faker.date.past(10)}`, createdAt: `${faker.date.past(10)}`,
avatarUrl:faker.image.people(), avatarUrl: faker.image.people(),
websiteUrl:faker.internet.url(), websiteUrl: faker.internet.url(),
isHireable:faker.datatype.boolean(), isHireable: faker.datatype.boolean(),
twitterUsername:login, twitterUsername: login,
}, },
}) })
} }

View File

@@ -1,13 +1,13 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > contributors/commit") console.debug("metrics/compute/mocks > mocking graphql api result > contributors/commit")
return ({ return ({
repository:{ repository: {
object:{ object: {
oid:"MOCKED_SHA", oid: "MOCKED_SHA",
abbreviatedOid:"MOCKED_SHA", abbreviatedOid: "MOCKED_SHA",
messageHeadline:faker.lorem.sentence(), messageHeadline: faker.lorem.sentence(),
committedDate:faker.date.recent(), committedDate: faker.date.recent(),
}, },
}, },
}) })

View File

@@ -1,27 +1,26 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > discussions/categories") console.debug("metrics/compute/mocks > mocking graphql api result > discussions/categories")
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
repositoryDiscussions:{ repositoryDiscussions: {
edges:[], edges: [],
nodes:[], nodes: [],
} },
} },
}) })
: ({ : ({
user:{ user: {
repositoryDiscussions:{ repositoryDiscussions: {
edges:new Array(100).fill(null).map(_ => ({cursor:"MOCKED_CURSOR"})), edges: new Array(100).fill(null).map(_ => ({ cursor: "MOCKED_CURSOR" })),
nodes:new Array(100).fill(null).map(_ => ({ nodes: new Array(100).fill(null).map(_ => ({
category:{ category: {
emoji:faker.random.arrayElement([":chart_with_upwards_trend:", ":chart_with_downwards_trend:", ":bar_char:"]), emoji: faker.random.arrayElement([":chart_with_upwards_trend:", ":chart_with_downwards_trend:", ":bar_char:"]),
name:faker.lorem.slug() name: faker.lorem.slug(),
} },
})) })),
} },
} },
}) })
} }

View File

@@ -1,22 +1,21 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > discussions/comments") console.debug("metrics/compute/mocks > mocking graphql api result > discussions/comments")
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
repositoryDiscussionsComments:{ repositoryDiscussionsComments: {
edges:[], edges: [],
nodes:[], nodes: [],
} },
} },
}) })
: ({ : ({
user:{ user: {
repositoryDiscussionsComments:{ repositoryDiscussionsComments: {
edges:new Array(100).fill(null).map(_ => ({cursor:"MOCKED_CURSOR"})), edges: new Array(100).fill(null).map(_ => ({ cursor: "MOCKED_CURSOR" })),
nodes:new Array(100).fill(null).map(_ => ({upvoteCount: faker.datatype.number(10)})) nodes: new Array(100).fill(null).map(_ => ({ upvoteCount: faker.datatype.number(10) })),
} },
} },
}) })
} }

View File

@@ -1,11 +1,11 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > discussions/statistics") console.debug("metrics/compute/mocks > mocking graphql api result > discussions/statistics")
return ({ return ({
user:{ user: {
started:{totalCount:faker.datatype.number(1000)}, started: { totalCount: faker.datatype.number(1000) },
comments:{totalCount:faker.datatype.number(1000)}, comments: { totalCount: faker.datatype.number(1000) },
answers:{totalCount:faker.datatype.number(1000)} answers: { totalCount: faker.datatype.number(1000) },
} },
}) })
} }

View File

@@ -1,11 +1,11 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > followup/repository/collaborators") console.debug("metrics/compute/mocks > mocking graphql api result > followup/repository/collaborators")
return ({ return ({
repository:{ repository: {
collaborators:{ collaborators: {
nodes:["github-user"] nodes: ["github-user"],
} },
}, },
}) })
} }

View File

@@ -1,14 +1,14 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > followup/repository") console.debug("metrics/compute/mocks > mocking graphql api result > followup/repository")
return ({ return ({
issues_open:{issueCount:faker.datatype.number(100)}, issues_open: { issueCount: faker.datatype.number(100) },
issues_drafts:{issueCount:faker.datatype.number(100)}, issues_drafts: { issueCount: faker.datatype.number(100) },
issues_skipped:{issueCount:faker.datatype.number(100)}, issues_skipped: { issueCount: faker.datatype.number(100) },
issues_closed:{issueCount:faker.datatype.number(100)}, issues_closed: { issueCount: faker.datatype.number(100) },
pr_open:{issueCount:faker.datatype.number(100)}, pr_open: { issueCount: faker.datatype.number(100) },
pr_drafts:{issueCount:faker.datatype.number(100)}, pr_drafts: { issueCount: faker.datatype.number(100) },
pr_closed:{issueCount:faker.datatype.number(100)}, pr_closed: { issueCount: faker.datatype.number(100) },
pr_merged:{issueCount:faker.datatype.number(100)}, pr_merged: { issueCount: faker.datatype.number(100) },
}) })
} }

View File

@@ -1,14 +1,14 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > followup/user") console.debug("metrics/compute/mocks > mocking graphql api result > followup/user")
return ({ return ({
issues_open:{issueCount:faker.datatype.number(100)}, issues_open: { issueCount: faker.datatype.number(100) },
issues_drafts:{issueCount:faker.datatype.number(100)}, issues_drafts: { issueCount: faker.datatype.number(100) },
issues_skipped:{issueCount:faker.datatype.number(100)}, issues_skipped: { issueCount: faker.datatype.number(100) },
issues_closed:{issueCount:faker.datatype.number(100)}, issues_closed: { issueCount: faker.datatype.number(100) },
pr_open:{issueCount:faker.datatype.number(100)}, pr_open: { issueCount: faker.datatype.number(100) },
pr_drafts:{issueCount:faker.datatype.number(100)}, pr_drafts: { issueCount: faker.datatype.number(100) },
pr_closed:{issueCount:faker.datatype.number(100)}, pr_closed: { issueCount: faker.datatype.number(100) },
pr_merged:{issueCount:faker.datatype.number(100)}, pr_merged: { issueCount: faker.datatype.number(100) },
}) })
} }

View File

@@ -1,38 +1,38 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > gists/default") console.debug("metrics/compute/mocks > mocking graphql api result > gists/default")
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
gists:{ gists: {
edges:[], edges: [],
nodes:[], nodes: [],
}, },
}, },
}) })
: ({ : ({
user:{ user: {
gists:{ gists: {
edges:[ edges: [
{ {
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
}, },
], ],
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
nodes:[ nodes: [
{ {
stargazerCount:faker.datatype.number(10), stargazerCount: faker.datatype.number(10),
isFork:false, isFork: false,
forks:{totalCount:faker.datatype.number(10)}, forks: { totalCount: faker.datatype.number(10) },
files:[{name:faker.system.fileName()}], files: [{ name: faker.system.fileName() }],
comments:{totalCount:faker.datatype.number(10)}, comments: { totalCount: faker.datatype.number(10) },
}, },
{ {
stargazerCount:faker.datatype.number(10), stargazerCount: faker.datatype.number(10),
isFork:false, isFork: false,
forks:{totalCount:faker.datatype.number(10)}, forks: { totalCount: faker.datatype.number(10) },
files:[{name:faker.system.fileName()}], files: [{ name: faker.system.fileName() }],
comments:{totalCount:faker.datatype.number(10)}, comments: { totalCount: faker.datatype.number(10) },
}, },
], ],
}, },

View File

@@ -1,9 +1,9 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > introduction/organization") console.debug("metrics/compute/mocks > mocking graphql api result > introduction/organization")
return ({ return ({
organization:{ organization: {
description:faker.lorem.sentences(), description: faker.lorem.sentences(),
}, },
}) })
} }

View File

@@ -1,9 +1,9 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > introduction/repository") console.debug("metrics/compute/mocks > mocking graphql api result > introduction/repository")
return ({ return ({
repository:{ repository: {
description:faker.lorem.sentences(), description: faker.lorem.sentences(),
}, },
}) })
} }

View File

@@ -1,9 +1,9 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > introduction/user") console.debug("metrics/compute/mocks > mocking graphql api result > introduction/user")
return ({ return ({
user:{ user: {
bio:faker.lorem.sentences(), bio: faker.lorem.sentences(),
}, },
}) })
} }

View File

@@ -1,5 +1,5 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > isocalendar/calendar") console.debug("metrics/compute/mocks > mocking graphql api result > isocalendar/calendar")
//Generate calendar //Generate calendar
const date = new Date(query.match(/from: "(?<date>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z)"/)?.groups?.date) const date = new Date(query.match(/from: "(?<date>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z)"/)?.groups?.date)
@@ -9,21 +9,21 @@ export default function({faker, query, login = faker.internet.userName()}) {
for (; date <= to; date.setDate(date.getDate() + 1)) { for (; date <= to; date.setDate(date.getDate() + 1)) {
//Create new week on sunday //Create new week on sunday
if (date.getDay() === 0) { if (date.getDay() === 0) {
weeks.push({contributionDays}) weeks.push({ contributionDays })
contributionDays = [] contributionDays = []
} }
//Random contributions //Random contributions
const contributionCount = Math.min(10, Math.max(0, faker.datatype.number(14) - 4)) const contributionCount = Math.min(10, Math.max(0, faker.datatype.number(14) - 4))
contributionDays.push({ contributionDays.push({
contributionCount, contributionCount,
color:["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"][Math.ceil(contributionCount / 10 / 0.25)], color: ["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"][Math.ceil(contributionCount / 10 / 0.25)],
date:date.toISOString().substring(0, 10), date: date.toISOString().substring(0, 10),
}) })
} }
return ({ return ({
user:{ user: {
calendar:{ calendar: {
contributionCalendar:{ contributionCalendar: {
weeks, weeks,
}, },
}, },

View File

@@ -1,276 +1,276 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > licenses/default") console.debug("metrics/compute/mocks > mocking graphql api result > licenses/default")
return ({ return ({
licenses:[ licenses: [
{ {
spdxId:"AGPL-3.0", spdxId: "AGPL-3.0",
name:"GNU Affero General Public License v3.0", name: "GNU Affero General Public License v3.0",
nickname:"GNU AGPLv3", nickname: "GNU AGPLv3",
key:"agpl-3.0", key: "agpl-3.0",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
{key:"document-changes", label:"State changes"}, { key: "document-changes", label: "State changes" },
{key:"disclose-source", label:"Disclose source"}, { key: "disclose-source", label: "Disclose source" },
{key:"network-use-disclose", label:"Network use is distribution"}, { key: "network-use-disclose", label: "Network use is distribution" },
{key:"same-license", label:"Same license"}, { key: "same-license", label: "Same license" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"patent-use", label:"Patent use"}, { key: "patent-use", label: "Patent use" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"Apache-2.0", spdxId: "Apache-2.0",
name:"Apache License 2.0", name: "Apache License 2.0",
nickname:null, nickname: null,
key:"apache-2.0", key: "apache-2.0",
limitations:[ limitations: [
{key:"trademark-use", label:"Trademark use"}, { key: "trademark-use", label: "Trademark use" },
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
{key:"document-changes", label:"State changes"}, { key: "document-changes", label: "State changes" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"patent-use", label:"Patent use"}, { key: "patent-use", label: "Patent use" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"BSD-2-Clause", spdxId: "BSD-2-Clause",
name:'BSD 2-Clause "Simplified" License', name: 'BSD 2-Clause "Simplified" License',
nickname:null, nickname: null,
key:"bsd-2-clause", key: "bsd-2-clause",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"BSD-3-Clause", spdxId: "BSD-3-Clause",
name:'BSD 3-Clause "New" or "Revised" License', name: 'BSD 3-Clause "New" or "Revised" License',
nickname:null, nickname: null,
key:"bsd-3-clause", key: "bsd-3-clause",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"BSL-1.0", spdxId: "BSL-1.0",
name:"Boost Software License 1.0", name: "Boost Software License 1.0",
nickname:null, nickname: null,
key:"bsl-1.0", key: "bsl-1.0",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright--source", label:"License and copyright notice for source"}, { key: "include-copyright--source", label: "License and copyright notice for source" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"CC0-1.0", spdxId: "CC0-1.0",
name:"Creative Commons Zero v1.0 Universal", name: "Creative Commons Zero v1.0 Universal",
nickname:null, nickname: null,
key:"cc0-1.0", key: "cc0-1.0",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"trademark-use", label:"Trademark use"}, { key: "trademark-use", label: "Trademark use" },
{key:"patent-use", label:"Patent use"}, { key: "patent-use", label: "Patent use" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[], conditions: [],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"EPL-2.0", spdxId: "EPL-2.0",
name:"Eclipse Public License 2.0", name: "Eclipse Public License 2.0",
nickname:null, nickname: null,
key:"epl-2.0", key: "epl-2.0",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"disclose-source", label:"Disclose source"}, { key: "disclose-source", label: "Disclose source" },
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
{key:"same-license", label:"Same license"}, { key: "same-license", label: "Same license" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"patent-use", label:"Patent use"}, { key: "patent-use", label: "Patent use" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"GPL-2.0", spdxId: "GPL-2.0",
name:"GNU General Public License v2.0", name: "GNU General Public License v2.0",
nickname:"GNU GPLv2", nickname: "GNU GPLv2",
key:"gpl-2.0", key: "gpl-2.0",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
{key:"document-changes", label:"State changes"}, { key: "document-changes", label: "State changes" },
{key:"disclose-source", label:"Disclose source"}, { key: "disclose-source", label: "Disclose source" },
{key:"same-license", label:"Same license"}, { key: "same-license", label: "Same license" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"GPL-3.0", spdxId: "GPL-3.0",
name:"GNU General Public License v3.0", name: "GNU General Public License v3.0",
nickname:"GNU GPLv3", nickname: "GNU GPLv3",
key:"gpl-3.0", key: "gpl-3.0",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
{key:"document-changes", label:"State changes"}, { key: "document-changes", label: "State changes" },
{key:"disclose-source", label:"Disclose source"}, { key: "disclose-source", label: "Disclose source" },
{key:"same-license", label:"Same license"}, { key: "same-license", label: "Same license" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"patent-use", label:"Patent use"}, { key: "patent-use", label: "Patent use" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"LGPL-2.1", spdxId: "LGPL-2.1",
name:"GNU Lesser General Public License v2.1", name: "GNU Lesser General Public License v2.1",
nickname:"GNU LGPLv2.1", nickname: "GNU LGPLv2.1",
key:"lgpl-2.1", key: "lgpl-2.1",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
{key:"disclose-source", label:"Disclose source"}, { key: "disclose-source", label: "Disclose source" },
{key:"document-changes", label:"State changes"}, { key: "document-changes", label: "State changes" },
{key:"same-license--library", label:"Same license (library)"}, { key: "same-license--library", label: "Same license (library)" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"MIT", spdxId: "MIT",
name:"MIT License", name: "MIT License",
nickname:null, nickname: null,
key:"mit", key: "mit",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"MPL-2.0", spdxId: "MPL-2.0",
name:"Mozilla Public License 2.0", name: "Mozilla Public License 2.0",
nickname:null, nickname: null,
key:"mpl-2.0", key: "mpl-2.0",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"trademark-use", label:"Trademark use"}, { key: "trademark-use", label: "Trademark use" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[ conditions: [
{key:"disclose-source", label:"Disclose source"}, { key: "disclose-source", label: "Disclose source" },
{key:"include-copyright", label:"License and copyright notice"}, { key: "include-copyright", label: "License and copyright notice" },
{key:"same-license--file", label:"Same license (file)"}, { key: "same-license--file", label: "Same license (file)" },
], ],
permissions:[ permissions: [
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
{key:"patent-use", label:"Patent use"}, { key: "patent-use", label: "Patent use" },
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
], ],
}, },
{ {
spdxId:"Unlicense", spdxId: "Unlicense",
name:"The Unlicense", name: "The Unlicense",
nickname:null, nickname: null,
key:"unlicense", key: "unlicense",
limitations:[ limitations: [
{key:"liability", label:"Liability"}, { key: "liability", label: "Liability" },
{key:"warranty", label:"Warranty"}, { key: "warranty", label: "Warranty" },
], ],
conditions:[], conditions: [],
permissions:[ permissions: [
{key:"private-use", label:"Private use"}, { key: "private-use", label: "Private use" },
{key:"commercial-use", label:"Commercial use"}, { key: "commercial-use", label: "Commercial use" },
{key:"modifications", label:"Modification"}, { key: "modifications", label: "Modification" },
{key:"distribution", label:"Distribution"}, { key: "distribution", label: "Distribution" },
], ],
}, },
], ],

View File

@@ -1,12 +1,12 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > licenses/repository") console.debug("metrics/compute/mocks > mocking graphql api result > licenses/repository")
return ({ return ({
user:{ user: {
repository:{ repository: {
licenseInfo:{spdxId:"MIT", name:"MIT License", nickname:null, key:"mit"}, licenseInfo: { spdxId: "MIT", name: "MIT License", nickname: null, key: "mit" },
url:"https://github.com/lowlighter/metrics", url: "https://github.com/lowlighter/metrics",
databaseId:293860197, databaseId: 293860197,
}, },
}, },
}) })

View File

@@ -1,30 +1,30 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > notable/contributions") console.debug("metrics/compute/mocks > mocking graphql api result > notable/contributions")
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
repositoriesContributedTo:{ repositoriesContributedTo: {
edges:[], edges: [],
}, },
}, },
}) })
: ({ : ({
user:{ user: {
repositoriesContributedTo:{ repositoriesContributedTo: {
edges:[ edges: [
{ {
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
node:{ node: {
isInOrganization:true, isInOrganization: true,
owner:{ owner: {
login:faker.internet.userName(), login: faker.internet.userName(),
avatarUrl:null, avatarUrl: null,
}, },
nameWithOwner:`${faker.internet.userName()}/${faker.lorem.slug()}`, nameWithOwner: `${faker.internet.userName()}/${faker.lorem.slug()}`,
stargazers:{totalCount:faker.datatype.number(1000)}, stargazers: { totalCount: faker.datatype.number(1000) },
watchers:{totalCount:faker.datatype.number(1000)}, watchers: { totalCount: faker.datatype.number(1000) },
forks:{totalCount:faker.datatype.number(1000)}, forks: { totalCount: faker.datatype.number(1000) },
}, },
}, },
], ],

View File

@@ -1,23 +1,23 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > people/default") console.debug("metrics/compute/mocks > mocking graphql api result > people/default")
const type = query.match(/(?<type>followers|following)[(]/)?.groups?.type ?? "(unknown type)" const type = query.match(/(?<type>followers|following)[(]/)?.groups?.type ?? "(unknown type)"
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
[type]:{ [type]: {
edges:[], edges: [],
}, },
}, },
}) })
: ({ : ({
user:{ user: {
[type]:{ [type]: {
edges:new Array(Math.ceil(20 + 80 * Math.random())).fill(null).map((login = faker.internet.userName()) => ({ edges: new Array(Math.ceil(20 + 80 * Math.random())).fill(null).map((login = faker.internet.userName()) => ({
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
node:{ node: {
login, login,
avatarUrl:null, avatarUrl: null,
}, },
})), })),
}, },

View File

@@ -1,26 +1,26 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > people/repository") console.debug("metrics/compute/mocks > mocking graphql api result > people/repository")
const type = query.match(/(?<type>stargazers|watchers)[(]/)?.groups?.type ?? "(unknown type)" const type = query.match(/(?<type>stargazers|watchers)[(]/)?.groups?.type ?? "(unknown type)"
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
repository:{ repository: {
[type]:{ [type]: {
edges:[], edges: [],
}, },
}, },
}, },
}) })
: ({ : ({
user:{ user: {
repository:{ repository: {
[type]:{ [type]: {
edges:new Array(Math.ceil(20 + 80 * Math.random())).fill(null).map((login = faker.internet.userName()) => ({ edges: new Array(Math.ceil(20 + 80 * Math.random())).fill(null).map((login = faker.internet.userName()) => ({
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
node:{ node: {
login, login,
avatarUrl:null, avatarUrl: null,
}, },
})), })),
}, },

View File

@@ -1,30 +1,30 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > people/sponsors") console.debug("metrics/compute/mocks > mocking graphql api result > people/sponsors")
const type = query.match(/(?<type>sponsorshipsAsSponsor|sponsorshipsAsMaintainer)[(]/)?.groups?.type ?? "(unknown type)" const type = query.match(/(?<type>sponsorshipsAsSponsor|sponsorshipsAsMaintainer)[(]/)?.groups?.type ?? "(unknown type)"
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
login, login,
[type]:{ [type]: {
edges:[], edges: [],
}, },
}, },
}) })
: ({ : ({
user:{ user: {
login, login,
[type]:{ [type]: {
edges:new Array(Math.ceil(20 + 80 * Math.random())).fill(null).map((login = faker.internet.userName()) => ({ edges: new Array(Math.ceil(20 + 80 * Math.random())).fill(null).map((login = faker.internet.userName()) => ({
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
node:{ node: {
sponsorEntity:{ sponsorEntity: {
login:faker.internet.userName(), login: faker.internet.userName(),
avatarUrl:null, avatarUrl: null,
}, },
sponsorable:{ sponsorable: {
login:faker.internet.userName(), login: faker.internet.userName(),
avatarUrl:null, avatarUrl: null,
}, },
}, },
})), })),

View File

@@ -1,18 +1,18 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > projects/repository") console.debug("metrics/compute/mocks > mocking graphql api result > projects/repository")
return ({ return ({
user:{ user: {
repository:{ repository: {
project:{ project: {
name:"Repository project example", name: "Repository project example",
updatedAt:`${faker.date.recent()}`, updatedAt: `${faker.date.recent()}`,
body:faker.lorem.paragraph(), body: faker.lorem.paragraph(),
progress:{ progress: {
doneCount:faker.datatype.number(10), doneCount: faker.datatype.number(10),
inProgressCount:faker.datatype.number(10), inProgressCount: faker.datatype.number(10),
todoCount:faker.datatype.number(10), todoCount: faker.datatype.number(10),
enabled:true, enabled: true,
}, },
}, },
}, },

View File

@@ -1,20 +1,20 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > projects/user") console.debug("metrics/compute/mocks > mocking graphql api result > projects/user")
return ({ return ({
user:{ user: {
projects:{ projects: {
totalCount:1, totalCount: 1,
nodes:[ nodes: [
{ {
name:"User-owned project", name: "User-owned project",
updatedAt:`${faker.date.recent()}`, updatedAt: `${faker.date.recent()}`,
body:faker.lorem.paragraph(), body: faker.lorem.paragraph(),
progress:{ progress: {
doneCount:faker.datatype.number(10), doneCount: faker.datatype.number(10),
inProgressCount:faker.datatype.number(10), inProgressCount: faker.datatype.number(10),
todoCount:faker.datatype.number(10), todoCount: faker.datatype.number(10),
enabled:true, enabled: true,
}, },
}, },
], ],

View File

@@ -1,27 +1,27 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > reactions/default") console.debug("metrics/compute/mocks > mocking graphql api result > reactions/default")
const type = query.match(/(?<type>issues|issueComments)[(]/)?.groups?.type ?? "(unknown type)" const type = query.match(/(?<type>issues|issueComments)[(]/)?.groups?.type ?? "(unknown type)"
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
user:{ user: {
[type]:{ [type]: {
edges:[], edges: [],
nodes:[], nodes: [],
}, },
}, },
}) })
: ({ : ({
user:{ user: {
[type]:{ [type]: {
edges:new Array(100).fill(null).map(_ => ({ edges: new Array(100).fill(null).map(_ => ({
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
node:{ node: {
createdAt:faker.date.recent(), createdAt: faker.date.recent(),
reactions:{ reactions: {
nodes:new Array(50).fill(null).map(_ => ({ nodes: new Array(50).fill(null).map(_ => ({
user:{login:faker.internet.userName()}, user: { login: faker.internet.userName() },
content:faker.random.arrayElement(["HEART", "THUMBS_UP", "THUMBS_DOWN", "LAUGH", "CONFUSED", "EYES", "ROCKET", "HOORAY"]), content: faker.random.arrayElement(["HEART", "THUMBS_UP", "THUMBS_DOWN", "LAUGH", "CONFUSED", "EYES", "ROCKET", "HOORAY"]),
})), })),
}, },
}, },

View File

@@ -1,28 +1,28 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > stars/default") console.debug("metrics/compute/mocks > mocking graphql api result > stars/default")
return ({ return ({
repository:{ repository: {
createdAt: faker.date.past(), createdAt: faker.date.past(),
description:"📊 An image generator with 20+ metrics about your GitHub account such as activity, community, repositories, coding habits, website performances, music played, starred topics, etc. that you can put on your profile or elsewhere !", description: "📊 An image generator with 20+ metrics about your GitHub account such as activity, community, repositories, coding habits, website performances, music played, starred topics, etc. that you can put on your profile or elsewhere !",
forkCount:faker.datatype.number(100), forkCount: faker.datatype.number(100),
isFork:false, isFork: false,
issues:{ issues: {
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
nameWithOwner:"lowlighter/metrics", nameWithOwner: "lowlighter/metrics",
openGraphImageUrl:"https://repository-images.githubusercontent.com/293860197/7fd72080-496d-11eb-8fe0-238b38a0746a", openGraphImageUrl: "https://repository-images.githubusercontent.com/293860197/7fd72080-496d-11eb-8fe0-238b38a0746a",
pullRequests:{ pullRequests: {
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
stargazerCount:faker.datatype.number(10000), stargazerCount: faker.datatype.number(10000),
licenseInfo:{ licenseInfo: {
nickname:null, nickname: null,
name:"MIT License", name: "MIT License",
}, },
primaryLanguage:{ primaryLanguage: {
color:"#f1e05a", color: "#f1e05a",
name:"JavaScript", name: "JavaScript",
}, },
}, },
}) })

View File

@@ -1,28 +1,28 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > sponsors/default") console.debug("metrics/compute/mocks > mocking graphql api result > sponsors/default")
return ({ return ({
user:{ user: {
sponsorsListing:{ sponsorsListing: {
fullDescription:faker.lorem.sentences(), fullDescription: faker.lorem.sentences(),
activeGoal:{ activeGoal: {
percentComplete:faker.datatype.number(100), percentComplete: faker.datatype.number(100),
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
description:faker.lorem.sentence(), description: faker.lorem.sentence(),
} },
}, },
sponsorshipsAsMaintainer:{ sponsorshipsAsMaintainer: {
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
nodes:new Array(10).fill(null).map(_ => ({ nodes: new Array(10).fill(null).map(_ => ({
sponsorEntity:{ sponsorEntity: {
login:faker.internet.userName(), login: faker.internet.userName(),
avatarUrl:null, avatarUrl: null,
}, },
tier:{ tier: {
monthlyPriceInDollars:faker.datatype.number(10), monthlyPriceInDollars: faker.datatype.number(10),
} },
})) })),
} },
}, },
}) })
} }

View File

@@ -1,20 +1,20 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > stargazers/default") console.debug("metrics/compute/mocks > mocking graphql api result > stargazers/default")
return /after: "MOCKED_CURSOR"/m.test(query) return /after: "MOCKED_CURSOR"/m.test(query)
? ({ ? ({
repository:{ repository: {
stargazers:{ stargazers: {
edges:[], edges: [],
}, },
}, },
}) })
: ({ : ({
repository:{ repository: {
stargazers:{ stargazers: {
edges:new Array(faker.datatype.number({min:50, max:100})).fill(null).map(() => ({ edges: new Array(faker.datatype.number({ min: 50, max: 100 })).fill(null).map(() => ({
starredAt:`${faker.date.recent(30)}`, starredAt: `${faker.date.recent(30)}`,
cursor:"MOCKED_CURSOR", cursor: "MOCKED_CURSOR",
})), })),
}, },
}, },

View File

@@ -1,32 +1,32 @@
/**Mocked data */ /**Mocked data */
export default function({faker, query, login = faker.internet.userName()}) { export default function({ faker, query, login = faker.internet.userName() }) {
console.debug("metrics/compute/mocks > mocking graphql api result > stars/default") console.debug("metrics/compute/mocks > mocking graphql api result > stars/default")
return ({ return ({
user:{ user: {
starredRepositories:{ starredRepositories: {
edges:[ edges: [
{ {
starredAt:`${faker.date.recent(14)}`, starredAt: `${faker.date.recent(14)}`,
node:{ node: {
description:"📊 An image generator with 20+ metrics about your GitHub account such as activity, community, repositories, coding habits, website performances, music played, starred topics, etc. that you can put on your profile or elsewhere !", description: "📊 An image generator with 20+ metrics about your GitHub account such as activity, community, repositories, coding habits, website performances, music played, starred topics, etc. that you can put on your profile or elsewhere !",
forkCount:faker.datatype.number(100), forkCount: faker.datatype.number(100),
isFork:false, isFork: false,
issues:{ issues: {
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
nameWithOwner:"lowlighter/metrics", nameWithOwner: "lowlighter/metrics",
openGraphImageUrl:"https://repository-images.githubusercontent.com/293860197/7fd72080-496d-11eb-8fe0-238b38a0746a", openGraphImageUrl: "https://repository-images.githubusercontent.com/293860197/7fd72080-496d-11eb-8fe0-238b38a0746a",
pullRequests:{ pullRequests: {
totalCount:faker.datatype.number(100), totalCount: faker.datatype.number(100),
}, },
stargazerCount:faker.datatype.number(10000), stargazerCount: faker.datatype.number(10000),
licenseInfo:{ licenseInfo: {
nickname:null, nickname: null,
name:"MIT License", name: "MIT License",
}, },
primaryLanguage:{ primaryLanguage: {
color:"#f1e05a", color: "#f1e05a",
name:"JavaScript", name: "JavaScript",
}, },
}, },
}, },

View File

@@ -1,340 +1,340 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, [{username:login, page, per_page}]) { export default function({ faker }, target, that, [{ username: login, page, per_page }]) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.activity.listEventsForAuthenticatedUser") console.debug("metrics/compute/mocks > mocking rest api result > rest.activity.listEventsForAuthenticatedUser")
return ({ return ({
status:200, status: 200,
url:`https://api.github.com/users/${login}/events?per_page=${per_page}&page=${page}`, url: `https://api.github.com/users/${login}/events?per_page=${per_page}&page=${page}`,
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:page < 1 ? [] : [ data: page < 1 ? [] : [
{ {
id:"10000000000", id: "10000000000",
type:"CommitCommentEvent", type: "CommitCommentEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
comment:{ comment: {
user:{ user: {
login, login,
}, },
path:faker.system.fileName(), path: faker.system.fileName(),
commit_id:"MOCKED_SHA", commit_id: "MOCKED_SHA",
body:faker.lorem.sentence(), body: faker.lorem.sentence(),
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000001", id: "10000000001",
type:"PullRequestReviewCommentEvent", type: "PullRequestReviewCommentEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
action:"created", action: "created",
comment:{ comment: {
user:{ user: {
login, login,
}, },
body:faker.lorem.paragraph(), body: faker.lorem.paragraph(),
}, },
pull_request:{ pull_request: {
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
number:1, number: 1,
user:{ user: {
login:faker.internet.userName(), login: faker.internet.userName(),
}, },
body:"", body: "",
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000002", id: "10000000002",
type:"IssuesEvent", type: "IssuesEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
action:faker.random.arrayElement(["opened", "closed", "reopened"]), action: faker.random.arrayElement(["opened", "closed", "reopened"]),
issue:{ issue: {
number:2, number: 2,
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
user:{ user: {
login, login,
}, },
body:faker.lorem.paragraph(), body: faker.lorem.paragraph(),
performed_via_github_app:null, performed_via_github_app: null,
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000003", id: "10000000003",
type:"GollumEvent", type: "GollumEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
pages:[ pages: [
{ {
page_name:faker.lorem.sentence(), page_name: faker.lorem.sentence(),
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
summary:null, summary: null,
action:"created", action: "created",
sha:"MOCKED_SHA", sha: "MOCKED_SHA",
}, },
], ],
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000004", id: "10000000004",
type:"IssueCommentEvent", type: "IssueCommentEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
action:"created", action: "created",
issue:{ issue: {
number:3, number: 3,
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
user:{ user: {
login, login,
}, },
labels:[ labels: [
{ {
name:"lorem ipsum", name: "lorem ipsum",
color:"d876e3", color: "d876e3",
}, },
], ],
state:"open", state: "open",
}, },
comment:{ comment: {
body:faker.lorem.paragraph(), body: faker.lorem.paragraph(),
performed_via_github_app:null, performed_via_github_app: null,
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000005", id: "10000000005",
type:"ForkEvent", type: "ForkEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
forkee:{ forkee: {
name:faker.random.word(), name: faker.random.word(),
full_name:`${faker.random.word()}/${faker.random.word()}`, full_name: `${faker.random.word()}/${faker.random.word()}`,
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000006", id: "10000000006",
type:"PullRequestReviewEvent", type: "PullRequestReviewEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
action:"created", action: "created",
review:{ review: {
user:{ user: {
login, login,
}, },
state:"approved", state: "approved",
}, },
pull_request:{ pull_request: {
state:"open", state: "open",
number:4, number: 4,
locked:false, locked: false,
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
user:{ user: {
login:faker.internet.userName(), login: faker.internet.userName(),
}, },
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000007", id: "10000000007",
type:"ReleaseEvent", type: "ReleaseEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
action:"published", action: "published",
release:{ release: {
tag_name:`v${faker.datatype.number()}.${faker.datatype.number()}`, tag_name: `v${faker.datatype.number()}.${faker.datatype.number()}`,
name:faker.random.words(4), name: faker.random.words(4),
draft:faker.datatype.boolean(), draft: faker.datatype.boolean(),
prerelease:faker.datatype.boolean(), prerelease: faker.datatype.boolean(),
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000008", id: "10000000008",
type:"CreateEvent", type: "CreateEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
ref:faker.lorem.slug(), ref: faker.lorem.slug(),
ref_type:faker.random.arrayElement(["tag", "branch"]), ref_type: faker.random.arrayElement(["tag", "branch"]),
master_branch:"master", master_branch: "master",
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"100000000009", id: "100000000009",
type:"WatchEvent", type: "WatchEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:"lowlighter/metrics", name: "lowlighter/metrics",
}, },
payload:{action:"started"}, payload: { action: "started" },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000010", id: "10000000010",
type:"DeleteEvent", type: "DeleteEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
ref:faker.lorem.slug(), ref: faker.lorem.slug(),
ref_type:faker.random.arrayElement(["tag", "branch"]), ref_type: faker.random.arrayElement(["tag", "branch"]),
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000011", id: "10000000011",
type:"PushEvent", type: "PushEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
size:1, size: 1,
ref:"refs/heads/master", ref: "refs/heads/master",
commits:[ commits: [
{ {
sha:"MOCKED_SHA", sha: "MOCKED_SHA",
message:faker.lorem.sentence(), message: faker.lorem.sentence(),
url:"https://api.github.com/repos/lowlighter/metrics/commits/MOCKED_SHA", url: "https://api.github.com/repos/lowlighter/metrics/commits/MOCKED_SHA",
}, },
], ],
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000012", id: "10000000012",
type:"PullRequestEvent", type: "PullRequestEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
action:faker.random.arrayElement(["opened", "closed"]), action: faker.random.arrayElement(["opened", "closed"]),
number:5, number: 5,
pull_request:{ pull_request: {
user:{ user: {
login, login,
}, },
state:"open", state: "open",
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
additions:faker.datatype.number(1000), additions: faker.datatype.number(1000),
deletions:faker.datatype.number(1000), deletions: faker.datatype.number(1000),
changed_files:faker.datatype.number(10), changed_files: faker.datatype.number(10),
}, },
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000013", id: "10000000013",
type:"MemberEvent", type: "MemberEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{ payload: {
member:{ member: {
login:faker.internet.userName(), login: faker.internet.userName(),
}, },
action:"added", action: "added",
}, },
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
{ {
id:"10000000014", id: "10000000014",
type:"PublicEvent", type: "PublicEvent",
actor:{ actor: {
login, login,
}, },
repo:{ repo: {
name:`${faker.random.word()}/${faker.random.word()}`, name: `${faker.random.word()}/${faker.random.word()}`,
}, },
payload:{}, payload: {},
created_at:faker.date.recent(7), created_at: faker.date.recent(7),
public:true, public: true,
}, },
], ],
}) })

View File

@@ -2,7 +2,7 @@
import listEventsForAuthenticatedUser from "./listEventsForAuthenticatedUser.mjs" import listEventsForAuthenticatedUser from "./listEventsForAuthenticatedUser.mjs"
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, [{username:login, page, per_page}]) { export default function({ faker }, target, that, [{ username: login, page, per_page }]) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.activity.listRepoEvents") console.debug("metrics/compute/mocks > mocking rest api result > rest.activity.listRepoEvents")
return listEventsForAuthenticatedUser(...arguments) return listEventsForAuthenticatedUser(...arguments)
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +1,23 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, args) { export default function({ faker }, target, that, args) {
return ({ return ({
status:200, status: 200,
url:"https://api.github.com/rate_limit", url: "https://api.github.com/rate_limit",
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:{ data: {
resources:{ resources: {
core:{limit:5000, used:0, remaining:5000, reset:0}, core: { limit: 5000, used: 0, remaining: 5000, reset: 0 },
search:{limit:30, used:0, remaining:30, reset:0}, search: { limit: 30, used: 0, remaining: 30, reset: 0 },
graphql:{limit:5000, used:0, remaining:5000, reset:0}, graphql: { limit: 5000, used: 0, remaining: 5000, reset: 0 },
integration_manifest:{limit:5000, used:0, remaining:5000, reset:0}, integration_manifest: { limit: 5000, used: 0, remaining: 5000, reset: 0 },
source_import:{limit:100, used:0, remaining:100, reset:0}, source_import: { limit: 100, used: 0, remaining: 100, reset: 0 },
code_scanning_upload:{limit:500, used:0, remaining:500, reset:0}, code_scanning_upload: { limit: 500, used: 0, remaining: 500, reset: 0 },
}, },
rate:{limit:5000, used:0, remaining:"MOCKED", reset:0}, rate: { limit: 5000, used: 0, remaining: "MOCKED", reset: 0 },
}, },
}) })
} }

View File

@@ -1,25 +1,25 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, [{owner, repo}]) { export default function({ faker }, target, that, [{ owner, repo }]) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getContributorsStats") console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getContributorsStats")
return ({ return ({
status:200, status: 200,
url:`https://api.github.com/repos/${owner}/${repo}/stats/contributors`, url: `https://api.github.com/repos/${owner}/${repo}/stats/contributors`,
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:[ data: [
{ {
total:faker.datatype.number(10000), total: faker.datatype.number(10000),
weeks:[ weeks: [
{w:1, a:faker.datatype.number(10000), d:faker.datatype.number(10000), c:faker.datatype.number(10000)}, { w: 1, a: faker.datatype.number(10000), d: faker.datatype.number(10000), c: faker.datatype.number(10000) },
{w:2, a:faker.datatype.number(10000), d:faker.datatype.number(10000), c:faker.datatype.number(10000)}, { w: 2, a: faker.datatype.number(10000), d: faker.datatype.number(10000), c: faker.datatype.number(10000) },
{w:3, a:faker.datatype.number(10000), d:faker.datatype.number(10000), c:faker.datatype.number(10000)}, { w: 3, a: faker.datatype.number(10000), d: faker.datatype.number(10000), c: faker.datatype.number(10000) },
{w:4, a:faker.datatype.number(10000), d:faker.datatype.number(10000), c:faker.datatype.number(10000)}, { w: 4, a: faker.datatype.number(10000), d: faker.datatype.number(10000), c: faker.datatype.number(10000) },
], ],
author:{ author: {
login:owner, login: owner,
}, },
}, },
], ],

View File

@@ -1,22 +1,22 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, [{owner, repo}]) { export default function({ faker }, target, that, [{ owner, repo }]) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getViews") console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getViews")
const count = faker.datatype.number(10000) * 2 const count = faker.datatype.number(10000) * 2
const uniques = faker.datatype.number(count) * 2 const uniques = faker.datatype.number(count) * 2
return ({ return ({
status:200, status: 200,
url:`https://api.github.com/repos/${owner}/${repo}/traffic/views`, url: `https://api.github.com/repos/${owner}/${repo}/traffic/views`,
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:{ data: {
count, count,
uniques, uniques,
views:[ views: [
{timestamp:`${faker.date.recent()}`, count:count / 2, uniques:uniques / 2}, { timestamp: `${faker.date.recent()}`, count: count / 2, uniques: uniques / 2 },
{timestamp:`${faker.date.recent()}`, count:count / 2, uniques:uniques / 2}, { timestamp: `${faker.date.recent()}`, count: count / 2, uniques: uniques / 2 },
], ],
}, },
}) })

View File

@@ -1,33 +1,33 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, [{page, per_page, owner, repo}]) { export default function({ faker }, target, that, [{ page, per_page, owner, repo }]) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.listCommits") console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.listCommits")
return ({ return ({
status:200, status: 200,
url:`https://api.github.com/repos/${owner}/${repo}/commits?per_page=${per_page}&page=${page}`, url: `https://api.github.com/repos/${owner}/${repo}/commits?per_page=${per_page}&page=${page}`,
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:page < 2 data: page < 2
? new Array(per_page).fill(null).map(() => ({ ? new Array(per_page).fill(null).map(() => ({
sha:"MOCKED_SHA", sha: "MOCKED_SHA",
get author() { get author() {
return this.commit.author return this.commit.author
}, },
commit:{ commit: {
message:faker.lorem.sentence(), message: faker.lorem.sentence(),
author:{ author: {
name:owner, name: owner,
login:faker.internet.userName(), login: faker.internet.userName(),
avatar_url:null, avatar_url: null,
date:`${faker.date.recent(14)}`, date: `${faker.date.recent(14)}`,
}, },
committer:{ committer: {
name:owner, name: owner,
login:faker.internet.userName(), login: faker.internet.userName(),
avatar_url:null, avatar_url: null,
date:`${faker.date.recent(14)}`, date: `${faker.date.recent(14)}`,
}, },
}, },
})) }))

View File

@@ -1,18 +1,18 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, [{owner, repo}]) { export default function({ faker }, target, that, [{ owner, repo }]) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.listContributors") console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.listContributors")
return ({ return ({
status:200, status: 200,
url:`https://api.github.com/repos/${owner}/${repo}/contributors`, url: `https://api.github.com/repos/${owner}/${repo}/contributors`,
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:new Array(40 + faker.datatype.number(60)).fill(null).map(() => ({ data: new Array(40 + faker.datatype.number(60)).fill(null).map(() => ({
login:faker.internet.userName(), login: faker.internet.userName(),
avatar_url:null, avatar_url: null,
contributions:faker.datatype.number(1000), contributions: faker.datatype.number(1000),
})), })),
}) })
} }

View File

@@ -1,55 +1,55 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, args) { export default function({ faker }, target, that, args) {
//Arguments //Arguments
const [url] = args const [url] = args
//Head request //Head request
if (/^HEAD .$/.test(url)) { if (/^HEAD .$/.test(url)) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.request HEAD") console.debug("metrics/compute/mocks > mocking rest api result > rest.request HEAD")
return ({ return ({
status:200, status: 200,
url:"https://api.github.com/", url: "https://api.github.com/",
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:undefined, data: undefined,
}) })
} }
//Commit content //Commit content
if (/^https:..api.github.com.repos.lowlighter.metrics.commits.MOCKED_SHA/.test(url)) { if (/^https:..api.github.com.repos.lowlighter.metrics.commits.MOCKED_SHA/.test(url)) {
console.debug(`metrics/compute/mocks > mocking rest api result > rest.request ${url}`) console.debug(`metrics/compute/mocks > mocking rest api result > rest.request ${url}`)
return ({ return ({
status:200, status: 200,
url:"https://api.github.com/repos/lowlighter/metrics/commits/MOCKED_SHA", url: "https://api.github.com/repos/lowlighter/metrics/commits/MOCKED_SHA",
data:{ data: {
sha:"MOCKED_SHA", sha: "MOCKED_SHA",
commit:{ commit: {
author:{ author: {
name:faker.internet.userName(), name: faker.internet.userName(),
email:faker.internet.email(), email: faker.internet.email(),
date:`${faker.date.recent(7)}`, date: `${faker.date.recent(7)}`,
}, },
committer:{ committer: {
name:faker.internet.userName(), name: faker.internet.userName(),
email:faker.internet.email(), email: faker.internet.email(),
date:`${faker.date.recent(7)}`, date: `${faker.date.recent(7)}`,
}, },
url:"https://api.github.com/repos/lowlighter/metrics/commits/MOCKED_SHA", url: "https://api.github.com/repos/lowlighter/metrics/commits/MOCKED_SHA",
}, },
author:{ author: {
login:faker.internet.userName(), login: faker.internet.userName(),
id:faker.datatype.number(100000000), id: faker.datatype.number(100000000),
}, },
committer:{ committer: {
login:faker.internet.userName(), login: faker.internet.userName(),
id:faker.datatype.number(100000000), id: faker.datatype.number(100000000),
}, },
files:[ files: [
{ {
sha:"MOCKED_SHA", sha: "MOCKED_SHA",
filename:faker.system.fileName(), filename: faker.system.fileName(),
patch:'@@ -0,0 +1,5 @@\n+//Imports\n+ import app from "./src/app.mjs"\n+\n+//Start app\n+ await app()\n\\ No newline at end of file', patch: '@@ -0,0 +1,5 @@\n+//Imports\n+ import app from "./src/app.mjs"\n+\n+//Start app\n+ await app()\n\\ No newline at end of file',
}, },
], ],
}, },

View File

@@ -1,18 +1,18 @@
/**Mocked data */ /**Mocked data */
export default function({faker}, target, that, [{username}]) { export default function({ faker }, target, that, [{ username }]) {
console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getByUsername") console.debug("metrics/compute/mocks > mocking rest api result > rest.repos.getByUsername")
return ({ return ({
status:200, status: 200,
url:`https://api.github.com/users/${username}/`, url: `https://api.github.com/users/${username}/`,
headers:{ headers: {
server:"GitHub.com", server: "GitHub.com",
status:"200 OK", status: "200 OK",
"x-oauth-scopes":"repo", "x-oauth-scopes": "repo",
}, },
data:{ data: {
login:faker.internet.userName(), login: faker.internet.userName(),
avatar_url:null, avatar_url: null,
contributions:faker.datatype.number(1000), contributions: faker.datatype.number(1000),
}, },
}) })
} }

View File

@@ -10,29 +10,29 @@ import urls from "url"
let mocked = false let mocked = false
//Mocking //Mocking
export default async function({graphql, rest}) { export default async function({ graphql, rest }) {
//Check if already mocked //Check if already mocked
if (mocked) if (mocked)
return {graphql, rest} return { graphql, rest }
mocked = true mocked = true
console.debug("metrics/compute/mocks > mocking") console.debug("metrics/compute/mocks > mocking")
//Load mocks //Load mocks
const __mocks = paths.join(paths.dirname(urls.fileURLToPath(import.meta.url))) const __mocks = paths.join(paths.dirname(urls.fileURLToPath(import.meta.url)))
const mock = async ({directory, mocks}) => { const mock = async ({ directory, mocks }) => {
for (const entry of await fs.readdir(directory)) { for (const entry of await fs.readdir(directory)) {
if ((await fs.lstat(paths.join(directory, entry))).isDirectory()) { if ((await fs.lstat(paths.join(directory, entry))).isDirectory()) {
if (!mocks[entry]) if (!mocks[entry])
mocks[entry] = {} mocks[entry] = {}
await mock({directory:paths.join(directory, entry), mocks:mocks[entry]}) await mock({ directory: paths.join(directory, entry), mocks: mocks[entry] })
} }
else else {
mocks[entry.replace(/[.]mjs$/, "")] = (await import(urls.pathToFileURL(paths.join(directory, entry)).href)).default mocks[entry.replace(/[.]mjs$/, "")] = (await import(urls.pathToFileURL(paths.join(directory, entry)).href)).default
}
} }
return mocks return mocks
} }
const mocks = await mock({directory:paths.join(__mocks, "api"), mocks:{}}) const mocks = await mock({ directory: paths.join(__mocks, "api"), mocks: {} })
//GraphQL API mocking //GraphQL API mocking
{ {
@@ -49,7 +49,7 @@ export default async function({graphql, rest}) {
//Search for mocked query //Search for mocked query
for (const mocked of Object.keys(mocks.github.graphql)) { for (const mocked of Object.keys(mocks.github.graphql)) {
if (new RegExp(`^query ${mocked.replace(/([.]\w)/g, (_, g) => g.toLocaleUpperCase().substring(1)).replace(/^(\w)/g, (_, g) => g.toLocaleUpperCase())} `).test(query)) if (new RegExp(`^query ${mocked.replace(/([.]\w)/g, (_, g) => g.toLocaleUpperCase().substring(1)).replace(/^(\w)/g, (_, g) => g.toLocaleUpperCase())} `).test(query))
return mocks.github.graphql[mocked]({faker, query, login}) return mocks.github.graphql[mocked]({ faker, query, login })
} }
//Unmocked call //Unmocked call
@@ -64,26 +64,26 @@ export default async function({graphql, rest}) {
console.debug("metrics/compute/mocks > mocking rest api") console.debug("metrics/compute/mocks > mocking rest api")
const unmocked = {} const unmocked = {}
//Mocked //Mocked
const mocker = ({path = "rest", mocks, mocked}) => { const mocker = ({ path = "rest", mocks, mocked }) => {
for (const [key, value] of Object.entries(mocks)) { for (const [key, value] of Object.entries(mocks)) {
console.debug(`metrics/compute/mocks > mocking rest api > mocking ${path}.${key}`) console.debug(`metrics/compute/mocks > mocking rest api > mocking ${path}.${key}`)
if (typeof value === "function") { if (typeof value === "function") {
unmocked[path] = value unmocked[path] = value
mocked[key] = new Proxy(unmocked[path], {apply:value.bind(null, {faker})}) mocked[key] = new Proxy(unmocked[path], { apply: value.bind(null, { faker }) })
}
else {
mocker({ path: `${path}.${key}`, mocks: mocks[key], mocked: mocked[key] })
} }
else
mocker({path:`${path}.${key}`, mocks:mocks[key], mocked:mocked[key]})
} }
} }
mocker({mocks:mocks.github.rest, mocked:rest}) mocker({ mocks: mocks.github.rest, mocked: rest })
} }
//Axios mocking //Axios mocking
{ {
//Unmocked //Unmocked
console.debug("metrics/compute/mocks > mocking axios") console.debug("metrics/compute/mocks > mocking axios")
const unmocked = {get:axios.get, post:axios.post} const unmocked = { get: axios.get, post: axios.post }
//Mocked post requests //Mocked post requests
axios.post = new Proxy(unmocked.post, { axios.post = new Proxy(unmocked.post, {
@@ -93,7 +93,7 @@ export default async function({graphql, rest}) {
//Search for mocked request //Search for mocked request
for (const service of Object.keys(mocks.axios.post)) { for (const service of Object.keys(mocks.axios.post)) {
const mocked = mocks.axios.post[service]({faker, url, body}) const mocked = mocks.axios.post[service]({ faker, url, body })
if (mocked) if (mocked)
return mocked return mocked
} }
@@ -111,7 +111,7 @@ export default async function({graphql, rest}) {
//Search for mocked request //Search for mocked request
for (const service of Object.keys(mocks.axios.get)) { for (const service of Object.keys(mocks.axios.get)) {
const mocked = mocks.axios.get[service]({faker, url, options}) const mocked = mocks.axios.get[service]({ faker, url, options })
if (mocked) if (mocked)
return mocked return mocked
} }
@@ -131,20 +131,20 @@ export default async function({graphql, rest}) {
rss.prototype.parseURL = function(url) { rss.prototype.parseURL = function(url) {
console.debug(`metrics/compute/mocks > mocking rss feed result > ${url}`) console.debug(`metrics/compute/mocks > mocking rss feed result > ${url}`)
return ({ return ({
items:new Array(30).fill(null).map(_ => ({ items: new Array(30).fill(null).map(_ => ({
title:faker.lorem.sentence(), title: faker.lorem.sentence(),
link:faker.internet.url(), link: faker.internet.url(),
content:faker.lorem.paragraphs(), content: faker.lorem.paragraphs(),
contentSnippet:faker.lorem.paragraph(), contentSnippet: faker.lorem.paragraph(),
isoDate:faker.date.recent(), isoDate: faker.date.recent(),
})), })),
title:faker.lorem.words(), title: faker.lorem.words(),
description:faker.lorem.paragraph(), description: faker.lorem.paragraph(),
link:url, link: url,
}) })
} }
} }
//Return mocked elements //Return mocked elements
return {graphql, rest} return { graphql, rest }
} }