Feat miscelleanous 1 (#28)

* Improve logs and better handling of plugins errors

* Add support for timezones

* Prepare next release
This commit is contained in:
Simon Lecoq
2020-12-27 22:30:53 +01:00
committed by GitHub
parent 619113295c
commit 016ab9aca1
24 changed files with 242 additions and 148 deletions

View File

@@ -8,6 +8,7 @@
import setup from "./setup.mjs"
import metrics from "./metrics.mjs"
import Templates from "./templates/index.mjs"
import util from "util"
/** App */
export default async function () {
@@ -107,7 +108,7 @@
//Compute rendering
try {
//Render
console.debug(`metrics/app/${login} > ${JSON.stringify(req.query)}`)
console.debug(`metrics/app/${login} > ${util.inspect(req.query, {depth:Infinity, maxStringLength:256})}`)
const rendered = await metrics({login, q:parse(req.query)}, {graphql, rest, plugins, conf})
//Cache
if ((!debug)&&(cached)&&(login !== "placeholder"))
@@ -140,7 +141,7 @@
`Debug mode | ${debug}`,
`Restricted to users | ${restricted.size ? [...restricted].join(", ") : "(unrestricted)"}`,
`Cached time | ${cached} seconds`,
`Rate limiter | ${ratelimiter ? JSON.stringify(ratelimiter) : "(enabled)"}`,
`Rate limiter | ${ratelimiter ? util.inspect(req.query, {depth:Infinity, maxStringLength:256}) : "(enabled)"}`,
`Max simultaneous users | ${maxusers ? `${maxusers} users` : "(unrestricted)"}`,
`Plugins enabled | ${enabled.join(", ")}`
].join("\n")))

View File

@@ -11,6 +11,7 @@
import fs from "fs/promises"
import os from "os"
import paths from "path"
import util from "util"
//Setup
export default async function metrics({login, q, dflags = []}, {graphql, rest, plugins, conf, die = false}) {
@@ -19,7 +20,7 @@
//Init
console.debug(`metrics/compute/${login} > start`)
console.debug(JSON.stringify(q))
console.debug(util.inspect(q, {depth:Infinity, maxStringLength:256}))
const template = q.template || conf.settings.templates.default
const repositories = Math.max(0, Number(q.repositories)) || conf.settings.repositories || 100
const pending = []
@@ -27,7 +28,7 @@
if ((!(template in Templates))||(!(template in conf.templates))||((conf.settings.templates.enabled.length)&&(!conf.settings.templates.enabled.includes(template))))
throw new Error("unsupported template")
const {query, image, style, fonts} = conf.templates[template]
const data = {base:{}}
const data = {base:{}, config:{}}
//Base parts
{
@@ -53,13 +54,10 @@
//Compute metrics
console.debug(`metrics/compute/${login} > compute`)
const computer = Templates[template].default || Templates[template]
await computer({login, q, dflags}, {conf, data, rest, graphql, plugins}, {s, pending, imports:{plugins:Plugins, url, imgb64, axios, puppeteer, run, fs, os, paths, format, bytes, shuffle, htmlescape, urlexpand}})
await computer({login, q, dflags}, {conf, data, rest, graphql, plugins}, {s, pending, imports:{plugins:Plugins, url, imgb64, axios, puppeteer, run, fs, os, paths, util, format, bytes, shuffle, htmlescape, urlexpand}})
const promised = await Promise.all(pending)
//Check plugins errors
if (conf.settings.debug)
for (const {name, result = null} of promised)
console.debug(`plugin ${name} ${result ? result.error ? "failed" : "success" : "ignored"} : ${JSON.stringify(result).replace(/^(.{888}).+/, "$1...")}`)
if (die) {
const errors = promised.filter(({result = null}) => !!result?.error).length
if (errors)

View File

@@ -23,7 +23,6 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -5,7 +5,8 @@
//Check if plugin is enabled and requirements are met
if ((!enabled)||(!q.gists))
return null
//Retrieve contribution calendar from graphql api
//Retrieve gists from graphql api
console.debug(`metrics/compute/${login}/plugins > gists > querying api`)
const {user:{gists}} = await graphql(`
query Gists {
user(login: "${login}") {
@@ -27,6 +28,7 @@
`
)
//Iterate through gists
console.debug(`metrics/compute/${login}/plugins > gists > processing ${gists.nodes.length} gists`)
let stargazers = 0, forks = 0, comments = 0
for (const gist of gists.nodes) {
//Skip forks
@@ -42,7 +44,6 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -1,5 +1,5 @@
//Setup
export default async function ({login, rest, imports, q}, {enabled = false, from:defaults = 100} = {}) {
export default async function ({login, rest, imports, data, q}, {enabled = false, from:defaults = 100} = {}) {
//Plugin execution
try {
//Check if plugin is enabled and requirements are met
@@ -14,23 +14,25 @@
//Initialization
const habits = {facts, charts, commits:{hour:NaN, hours:{}, day:NaN, days:{}}, indents:{style:"", spaces:0, tabs:0}, linguist:{available:false, ordered:[], languages:{}}}
const pages = Math.ceil(from/100)
const offset = data.config.timezone?.offset ?? 0
//Get user recent activity
console.debug(`metrics/compute/${login}/plugins > habits > querying api`)
const events = []
try {
for (let page = 0; page < pages; page++) {
console.debug(`metrics/compute/${login}/plugins > habits > loaded page ${page}`)
console.debug(`metrics/compute/${login}/plugins > habits > loading page ${page}`)
events.push(...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data)
}
} catch { console.debug(`metrics/compute/${login}/plugins > habits > no more events to load`) }
console.debug(`metrics/compute/${login}/plugins > habits > no more events to load (${events.length} loaded)`)
} catch { console.debug(`metrics/compute/${login}/plugins > habits > no more page to load`) }
console.debug(`metrics/compute/${login}/plugins > habits > ${events.length} events loaded`)
//Get user recent commits
const commits = events
.filter(({type}) => type === "PushEvent")
.filter(({actor}) => actor.login === login)
.filter(({created_at}) => new Date(created_at) > new Date(Date.now()-days*24*60*60*1000))
console.debug(`metrics/compute/${login}/plugins > habits > filtered out ${commits.length} commits`)
const actor = commits[0]?.actor?.id ?? 0
console.debug(`metrics/compute/${login}/plugins > habits > filtered out ${commits.length} push events over last ${days} days`)
//Retrieve edited files and filter edited lines (those starting with +/-) from patches
console.debug(`metrics/compute/${login}/plugins > habits > loading patches`)
const patches = [...await Promise.allSettled(commits
.flatMap(({payload}) => payload.commits).map(commit => commit.url)
.map(async commit => (await rest.request(commit)).data.files)
@@ -42,7 +44,8 @@
//Commit day
{
//Compute commit days
const days = commits.map(({created_at}) => (new Date(created_at)).getDay())
console.debug(`metrics/compute/${login}/plugins > habits > searching most active day of week`)
const days = commits.map(({created_at}) => (new Date(new Date(created_at).getTime() + offset)).getDay())
for (const day of days)
habits.commits.days[day] = (habits.commits.days[day] ?? 0) + 1
habits.commits.days.max = Math.max(...Object.values(habits.commits.days))
@@ -52,7 +55,8 @@
//Commit hour
{
//Compute commit hours
const hours = commits.map(({created_at}) => (new Date(created_at)).getHours())
console.debug(`metrics/compute/${login}/plugins > habits > searching most active time of day`)
const hours = commits.map(({created_at}) => (new Date(new Date(created_at).getTime() + offset)).getHours())
for (const hour of hours)
habits.commits.hours[hour] = (habits.commits.hours[hour] ?? 0) + 1
habits.commits.hours.max = Math.max(...Object.values(habits.commits.hours))
@@ -62,6 +66,7 @@
//Indent style
{
//Attempt to guess whether tabs or spaces are used in patches
console.debug(`metrics/compute/${login}/plugins > habits > searching indent style`)
patches
.map(({patch}) => patch.match(/((?:\t)|(?: )) /gm) ?? [])
.forEach(indent => habits.indents[/^\t/.test(indent) ? "tabs" : "spaces"]++)
@@ -70,36 +75,38 @@
//Linguist
if (charts) {
//Check if linguist exists
console.debug(`metrics/compute/${login}/plugins > habits > searching recently used languages using linguist`)
const prefix = {win32:"wsl"}[process.platform] ?? ""
if ((patches.length)&&(await imports.run(`${prefix} which github-linguist`))) {
//Setup for linguist
habits.linguist.available = true
const path = imports.paths.join(imports.os.tmpdir(), `${actor}`)
const path = imports.paths.join(imports.os.tmpdir(), `${commits[0]?.actor?.id ?? 0}`)
//Create temporary directory and save patches
console.debug(`metrics/compute/${login}/plugins > habits > creating temp dir ${path} with ${patches.length} files`)
await imports.fs.mkdir(path, {recursive:true})
await Promise.all(patches.map(async ({name, patch}, i) => await imports.fs.writeFile(imports.paths.join(path, `${i}${imports.paths.extname(name)}`), patch)))
console.debug(`metrics/compute/${login}/plugins > habits > created temp dir ${path} with ${patches.length} files`)
//Create temporary git repository
console.debug(`metrics/compute/${login}/plugins > habits > creating temp git repository`)
await imports.run(`git init && git add . && git config user.name "linguist" && git config user.email "null@github.com" && git commit -m "linguist"`, {cwd:path}).catch(console.debug)
await imports.run(`git status`, {cwd:path})
console.debug(`metrics/compute/${login}/plugins > habits > created temp git repository`)
//Spawn linguist process
console.debug(`metrics/compute/${login}/plugins > habits > running linguist`)
;(await imports.run(`${prefix} github-linguist`, {cwd:path}))
;(await imports.run(`${prefix} github-linguist --breakdown`, {cwd:path}))
//Parse linguist result
.split("\n").map(line => line.match(/(?<value>[\d.]+)%\s+(?<language>\w+)/)?.groups).filter(line => line)
.map(({value, language}) => habits.linguist.languages[language] = (habits.linguist.languages[language] ?? 0) + value/100)
habits.linguist.ordered = Object.entries(habits.linguist.languages).sort(([an, a], [bn, b]) => b - a)
}
else
console.debug(`metrics/compute/${login}/plugins > habits > linguist is not available`)
console.debug(`metrics/compute/${login}/plugins > habits > linguist not available`)
}
//Results
return habits
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
if (error.error?.message)
throw error
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -20,9 +20,10 @@
const padding = new Date(start)
padding.setHours(-14*24)
//Retrieve contribution calendar from graphql api
console.debug(`metrics/compute/${login}/plugins > isocalendar > querying api`)
const calendar = {}
for (const [name, from, to] of [["padding", padding, start], ["weeks", start, now]]) {
console.debug(`metrics/compute/${login}/plugins > isocalendar > loading "${name}" from "${from.toISOString()}" to "${to.toISOString()}"`)
console.debug(`metrics/compute/${login}/plugins > isocalendar > loading ${name} from "${from.toISOString()}" to "${to.toISOString()}"`)
const {user:{calendar:{contributionCalendar:{weeks}}}} = await graphql(`
query Calendar {
user(login: "${login}") {
@@ -44,11 +45,13 @@
calendar[name] = weeks
}
//Apply padding
console.debug(`metrics/compute/${login}/plugins > isocalendar > applying padding`)
const firstweek = calendar.weeks[0].contributionDays
const padded = calendar.padding.flatMap(({contributionDays}) => contributionDays).filter(({date}) => !firstweek.map(({date}) => date).includes(date))
while (firstweek.length < 7)
firstweek.unshift(padded.pop())
//Compute the highest contributions in a day, streaks and average commits per day
console.debug(`metrics/compute/${login}/plugins > isocalendar > computing stats`)
let max = 0, streak = {max:0, current:0}, values = [], average = 0
for (const week of calendar.weeks) {
for (const day of week.contributionDays) {
@@ -60,6 +63,7 @@
}
average = (values.reduce((a, b) => a + b, 0)/values.length).toFixed(2).replace(/[.]0+$/, "")
//Compute SVG
console.debug(`metrics/compute/${login}/plugins > isocalendar > computing svg render`)
const size = 6
let i = 0, j = 0
let svg = `
@@ -96,7 +100,6 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -12,6 +12,7 @@
//Skipped repositories
skipped = decodeURIComponent(skipped).split(",").map(x => x.trim().toLocaleLowerCase()).filter(x => x)
//Iterate through user's repositories and retrieve languages data
console.debug(`metrics/compute/${login}/plugins > languages > processing ${data.user.repositories.nodes.length} repositories`)
const languages = {colors:{}, total:0, stats:{}}
for (const repository of data.user.repositories.nodes) {
//Skip repository if asked
@@ -33,6 +34,7 @@
}
}
//Compute languages stats
console.debug(`metrics/compute/${login}/plugins > languages > computing stats`)
Object.keys(languages.stats).map(name => languages.stats[name] /= languages.total)
languages.favorites = Object.entries(languages.stats).sort(([an, a], [bn, b]) => b - a).slice(0, 8).map(([name, value]) => ({name, value, color:languages.colors[name], x:0}))
for (let i = 1; i < languages.favorites.length; i++)
@@ -42,7 +44,6 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -8,9 +8,11 @@
//Repositories
const repositories = data.user.repositories.nodes.map(({name}) => name) ?? []
//Get contributors stats from repositories
console.debug(`metrics/compute/${login}/plugins > lines > querying api`)
const lines = {added:0, deleted:0}
const response = await Promise.all(repositories.map(async repo => await rest.repos.getContributorsStats({owner:login, repo})))
//Compute changed lines
console.debug(`metrics/compute/${login}/plugins > lines > computing total diff`)
response.map(({data:repository}) => {
//Check if data are available
if (!Array.isArray(repository))
@@ -29,8 +31,7 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -56,13 +56,14 @@
//Limit
limit = Math.max(1, Math.min(100, Number(limit)))
//Handle mode
console.debug(`metrics/compute/${login}/plugins > music > processing mode ${mode} with provider ${provider}`)
switch (mode) {
//Playlist mode
case "playlist":{
//Start puppeteer and navigate to playlist
console.debug(`metrics/compute/${login}/plugins > music > starting browser`)
const browser = await imports.puppeteer.launch({headless:true, executablePath:process.env.PUPPETEER_BROWSER_PATH, args:["--no-sandbox", "--disable-extensions", "--disable-setuid-sandbox", "--disable-dev-shm-usage"]})
console.debug(`metrics/compute/${login}/plugins > music > loaded ${await browser.version()}`)
console.debug(`metrics/compute/${login}/plugins > music > started ${await browser.version()}`)
const page = await browser.newPage()
console.debug(`metrics/compute/${login}/plugins > music > loading page`)
await page.goto(playlist)
@@ -103,7 +104,7 @@
if (Array.isArray(tracks)) {
//Tracks
console.debug(`metrics/compute/${login}/plugins > music > found ${tracks.length} tracks`)
console.debug(JSON.stringify(tracks))
console.debug(imports.util.inspect(tracks, {depth:Infinity, maxStringLength:256}))
//Shuffle tracks
tracks = imports.shuffle(tracks)
}
@@ -120,17 +121,18 @@
//Prepare credentials
const [client_id, client_secret, refresh_token] = token.split(",").map(part => part.trim())
if ((!client_id)||(!client_secret)||(!refresh_token))
throw {error:`Spotify token must contain client id/secret and refresh token`}
throw {error:{message:`Spotify token must contain client id/secret and refresh token`}}
//API call and parse tracklist
try {
//Request access token
console.debug(`metrics/compute/${login}/plugins > music > requesting access token with refresh token for spotify`)
console.debug(`metrics/compute/${login}/plugins > music > requesting access token with spotify refresh token`)
const {data:{access_token:access}} = await imports.axios.post("https://accounts.spotify.com/api/token",
`${new imports.url.URLSearchParams({grant_type:"refresh_token", refresh_token, client_id, client_secret})}`,
{headers:{"Content-Type":"application/x-www-form-urlencoded"}},
)
console.debug(`metrics/compute/${login}/plugins > music > got new access token`)
console.debug(`metrics/compute/${login}/plugins > music > got access token`)
//Retriev tracks
console.debug(`metrics/compute/${login}/plugins > music > querying spotify api`)
tracks = (await imports.axios(`https://api.spotify.com/v1/me/player/recently-played?limit=${limit}&after=${timestamp}`, {headers:{
"Accept":"application/json",
"Content-Type":"application/json",
@@ -143,8 +145,13 @@
}
//Handle errors
catch (error) {
if ((error.response?.status))
throw {error:{message:`API returned ${error.response.status}${error.response.data?.error_description ? ` (${error.response.data.error_description})` : ""}`}, ...raw}
if (error.isAxiosError) {
const status = error.response?.status
const description = error.response.data?.error_description ?? null
message = `API returned ${status}${description ? ` (${description})` : ""}`
error = error.response?.data ?? null
throw {error:{message, instance:error}, ...raw}
}
throw error
}
break
@@ -173,7 +180,6 @@
track.artwork = await imports.imgb64(track.artwork)
}
//Save results
console.debug(`metrics/compute/${login}/plugins > music > success`)
return {...raw, tracks}
}
//Unhandled error
@@ -183,7 +189,6 @@
catch (error) {
if (error.error?.message)
throw error
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -15,29 +15,37 @@
url = `https://${url}`
const result = {url, detailed, scores:[], metrics:{}}
//Load scores from API
console.debug(`metrics/compute/${login}/plugins > pagespeed > querying api for ${url}`)
const scores = new Map()
await Promise.all(["performance", "accessibility", "best-practices", "seo"].map(async category => {
const {score, title} = (await imports.axios.get(`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=${category}&url=${url}&key=${token}`)).data.lighthouseResult.categories[category]
console.debug(`metrics/compute/${login}/plugins > pagespeed > performing audit ${category}`)
const request = await imports.axios.get(`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=${category}&url=${url}&key=${token}`)
console.debug(request.data)
const {score, title} = request.data.lighthouseResult.categories[category]
scores.set(category, {score, title})
console.debug(`metrics/compute/${login}/plugins > pagespeed > ${category} audit performed`)
console.debug(`metrics/compute/${login}/plugins > pagespeed > performed audit ${category} (status code ${request.status})`)
}))
result.scores = [scores.get("performance"), scores.get("accessibility"), scores.get("best-practices"), scores.get("seo")]
//Detailed metrics
if (detailed)
Object.assign(result.metrics, ...(await imports.axios.get(`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?&url=${url}&key=${token}`)).data.lighthouseResult.audits.metrics.details.items)
//Integrity check
if (result.scores.filter(score => score).length < 4)
throw {error:{message:"Incomplete PageSpeed results"}, url}
if (detailed) {
console.debug(`metrics/compute/${login}/plugins > pagespeed > performing detailed audit`)
const request = await imports.axios.get(`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?&url=${url}&key=${token}`)
console.debug(request.data)
Object.assign(result.metrics, ...request.data.lighthouseResult.audits.metrics.details.items)
console.debug(`metrics/compute/${login}/plugins > pagespeed > performed detailed audit (status code ${request.status})`)
}
//Results
return result
}
//Handle errors
catch (error) {
if (error.response?.status)
throw {error:{message:`PageSpeed token error (code ${error.response.status})`}, url}
if (error.error?.message)
throw error
console.debug(error)
throw {error:{message:`An error occured`}}
let message = "An error occured"
if (error.isAxiosError) {
const status = error.response?.status
const description = error.response?.data?.error?.message?.match(/Lighthouse returned error: (?<description>[A-Z_]+)/)?.groups?.description ?? null
message = `API returned ${status}${description ? ` (${description})` : ""}`
error = error.response?.data ?? null
}
throw {error:{message, instance:error}}
}
}

View File

@@ -11,10 +11,12 @@
//Limit
limit = Math.max(1, Math.min(30, Number(limit)))
//Retrieve posts
console.debug(`metrics/compute/${login}/plugins > posts > processing with source ${source}`)
let posts = null
switch (source) {
//Dev.to
case "dev.to":{
console.debug(`metrics/compute/${login}/plugins > posts > querying api`)
posts = (await imports.axios.get(`https://dev.to/api/articles?username=${login}&state=fresh`)).data.map(({title, readable_publish_date:date}) => ({title, date}))
break
}
@@ -37,7 +39,6 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -10,6 +10,7 @@
//Limit
limit = Math.max(1, Math.min(100, Number(limit)))
//Retrieve contribution calendar from graphql api
console.debug(`metrics/compute/${login}/plugins > projects > querying api`)
const {user:{projects}} = await graphql(`
query Projects {
user(login: "${login}") {
@@ -31,6 +32,7 @@
`
)
//Iterate through projects and format them
console.debug(`metrics/compute/${login}/plugins > posts > processing ${projects.nodes.length} projects`)
const list = []
for (const project of projects.nodes) {
//Format date
@@ -52,7 +54,9 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
let message = "An error occured"
if (error.errors?.map(({type}) => type)?.includes("INSUFFICIENT_SCOPES"))
message = "Insufficient token rights"
throw {error:{message, instance:error}}
}
}

View File

@@ -14,10 +14,11 @@
//Limit
limit = Math.max(1, Math.min(20, Number(limit)))
//Start puppeteer and navigate to topics
console.debug(`metrics/compute/${login}/plugins > topics > searching starred topics`)
let topics = []
console.debug(`metrics/compute/${login}/plugins > topics > starting browser`)
const browser = await imports.puppeteer.launch({headless:true, executablePath:process.env.PUPPETEER_BROWSER_PATH, args:["--no-sandbox", "--disable-extensions", "--disable-setuid-sandbox", "--disable-dev-shm-usage"]})
console.debug(`metrics/compute/${login}/plugins > topics > loaded ${await browser.version()}`)
console.debug(`metrics/compute/${login}/plugins > topics > started ${await browser.version()}`)
const page = await browser.newPage()
//Iterate through pages
for (let i = 1; i <= 100; i++) {
@@ -32,17 +33,22 @@
description:li.querySelector(".f5").innerText,
icon:li.querySelector("img")?.src ?? null,
})))
console.debug(`metrics/compute/${login}/plugins > topics > extracted ${starred.length} starred topics`)
//Check if next page exists
if (!starred.length)
if (!starred.length) {
console.debug(`metrics/compute/${login}/plugins > topics > no more page to load`)
break
}
topics.push(...starred)
}
//Close browser
console.debug(`metrics/compute/${login}/plugins > music > closing browser`)
await browser.close()
//Shuffle topics
if (shuffle)
if (shuffle) {
console.debug(`metrics/compute/${login}/plugins > topics > shuffling topics`)
topics = imports.shuffle(topics)
}
//Limit topics
if (limit > 0) {
console.debug(`metrics/compute/${login}/plugins > topics > keeping only ${limit} topics`)
@@ -51,6 +57,7 @@
topics.push({name:`And ${removed.length} more...`, description:removed.map(({name}) => name).join(", "), icon:null})
}
//Convert icons to base64
console.debug(`metrics/compute/${login}/plugins > topics > loading artworks`)
for (const topic of topics) {
if (topic.icon) {
console.debug(`metrics/compute/${login}/plugins > topics > processing ${topic.name}`)
@@ -64,7 +71,6 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
throw {error:{message:"An error occured", instance:error}}
}
}

View File

@@ -8,9 +8,11 @@
//Repositories
const repositories = data.user.repositories.nodes.map(({name}) => name) ?? []
//Get views stats from repositories
console.debug(`metrics/compute/${login}/plugins > traffic > querying api`)
const views = {count:0, uniques:0}
const response = await Promise.all(repositories.map(async repo => await rest.repos.getViews({owner:login, repo})))
//Compute views
console.debug(`metrics/compute/${login}/plugins > traffic > computing stats`)
response.filter(({data}) => data).map(({data:{count, uniques}}) => (views.count += count, views.uniques += uniques))
//Format values
views.count = imports.format(views.count)
@@ -20,9 +22,9 @@
}
//Handle errors
catch (error) {
let message = "An error occured"
if (error.status === 403)
throw {error:{message:`Insufficient token rights`}}
console.debug(error)
throw {error:{message:`An error occured`}}
message = "Insufficient token rights"
throw {error:{message, instance:error}}
}
}

View File

@@ -14,7 +14,7 @@
console.debug(`metrics/compute/${login}/plugins > tweets > loading twitter profile (@${username})`)
const {data:{data:profile = null}} = await imports.axios.get(`https://api.twitter.com/2/users/by/username/${username}?user.fields=profile_image_url,verified`, {headers:{Authorization:`Bearer ${token}`}})
//Load tweets
console.debug(`metrics/compute/${login}/plugins > tweets > loading tweets`)
console.debug(`metrics/compute/${login}/plugins > tweets > querying api`)
const {data:{data:tweets = []}} = await imports.axios.get(`https://api.twitter.com/2/tweets/search/recent?query=from:${username}&tweet.fields=created_at&expansions=entities.mentions.username`, {headers:{Authorization:`Bearer ${token}`}})
//Load profile image
if (profile?.profile_image_url) {
@@ -50,7 +50,13 @@
}
//Handle errors
catch (error) {
console.debug(error)
throw {error:{message:`An error occured`}}
let message = "An error occured"
if (error.isAxiosError) {
const status = error.response?.status
const description = error.response?.data?.errors?.[0]?.message ?? null
message = `API returned ${status}${description ? ` (${description})` : ""}`
error = error.response?.data ?? null
}
throw {error:{message, instance:error}}
}
}

View File

@@ -1,6 +1,7 @@
//Imports
import fs from "fs"
import path from "path"
import util from "util"
/** Setup */
export default async function ({log = true} = {}) {
@@ -30,7 +31,7 @@
conf.settings.plugins = {}
conf.settings.plugins.base = {parts:["header", "activity", "community", "repositories", "metadata"]}
if (conf.settings.debug)
logger(conf.settings)
logger(util.inspect(conf.settings, {depth:Infinity, maxStringLength:256}))
//Load package settings
logger(`metrics/setup > load package.json`)

View File

@@ -13,7 +13,7 @@
+ (!!plugins.isocalendar)*192 + (plugins.isocalendar?.duration === 'full-year')*100
+ (!!plugins.gists)*68
+ (!!plugins.topics)*160
+ (!!plugins.projects)*22 + (plugins.projects?.list?.length ?? 0)*60
+ (!!plugins.projects)*22 + (plugins.projects?.list?.length ?? 0)*60 + (!!plugins.projects?.error)*22
+ (!!plugins.tweets)*64 + (plugins.tweets?.list?.length ?? 0)*90
+ Math.max(0, (((!!base.metadata)+(!!base.header)+((!!base.activity)||(!!base.community))+(!!base.repositories)+((!!plugins.habits))+(!!plugins.pagespeed)+(!!plugins.languages)+(!!plugins.music)+(!!plugins.posts)+(!!plugins.isocalendar)+(!!plugins.gists)+(!!plugins.topics)+(!!plugins.projects))-1))*4
%>">
@@ -774,7 +774,7 @@
<% if (base.metadata) { %>
<footer>
<span>These metrics <%= !computed.token.scopes.includes("repo") ? "does not include" : "includes" %> private contributions</span>
<span>These metrics <%= !computed.token.scopes.includes("repo") ? "does not include all" : "includes" %> private contributions<% if ((config.timezone?.name)&&(!config.timezone?.error)) { %>, timezone <%= config.timezone.name %><% } %></span>
<span>Last updated <%= new Date().toGMTString() %> with lowlighter/metrics@<%= meta.version %></span>
</footer>
<% } %>

Before

Width:  |  Height:  |  Size: 73 KiB

After

Width:  |  Height:  |  Size: 73 KiB

View File

@@ -5,18 +5,36 @@
const computed = data.computed = {commits:0, sponsorships:0, licenses:{favorite:"", used:{}}, token:{}, repositories:{watchers:0, stargazers:0, issues_open:0, issues_closed:0, pr_open:0, pr_merged:0, forks:0, releases:0}}
const avatar = imports.imgb64(data.user.avatarUrl)
data.plugins = {}
console.debug(`metrics/compute/${login} > formatting common metrics`)
//Timezone config
if (q["config.timezone"]) {
const timezone = data.config.timezone = {name:q["config.timezone"], offset:0}
try {
timezone.offset = Number(new Date().toLocaleString("fr", {timeZoneName:"short", timeZone:timezone.name}).match(/UTC[+](?<offset>\d+)/)?.groups?.offset*60*60*1000) || 0
console.debug(`metrics/compute/${login} > timezone set to ${timezone.name} (${timezone.offset > 0 ? "+" : ""}${Math.round(timezone.offset/(60*60*1000))} hours)`)
} catch {
timezone.error = `Failed to use timezone "${timezone.name}"`
console.debug(`metrics/compute/${login} > failed to use timezone "${timezone.name}"`)
}
}
//Plugins
for (const name of Object.keys(imports.plugins)) {
pending.push((async () => {
try {
console.debug(`metrics/compute/${login}/plugins > ${name} > started`)
data.plugins[name] = await imports.plugins[name]({login, q, imports, data, computed, rest, graphql}, plugins[name])
console.debug(`metrics/compute/${login}/plugins > ${name} > completed (${data.plugins[name] !== null ? "success" : "skipped"})`)
}
catch (error) {
console.debug(`metrics/compute/${login}/plugins > ${name} > completed (error)`)
data.plugins[name] = error
}
finally {
return {name, result:data.plugins[name]}
const result = {name, result:data.plugins[name]}
console.debug(imports.util.inspect(result, {depth:Infinity, maxStringLength:256}))
return result
}
})())
}
@@ -30,14 +48,14 @@
computed.repositories.forks += repository.forkCount
//License
if (repository.licenseInfo)
computed.licenses.used[repository.licenseInfo.spdxId] = (computed.licenses.used[repository.licenseInfo.spdxId] || 0) + 1
computed.licenses.used[repository.licenseInfo.spdxId] = (computed.licenses.used[repository.licenseInfo.spdxId] ?? 0) + 1
}
//Total disk usage
computed.diskUsage = `${imports.bytes(data.user.repositories.totalDiskUsage*1000)}`
//Compute licenses stats
computed.licenses.favorite = Object.entries(computed.licenses.used).sort(([an, a], [bn, b]) => b - a).slice(0, 1).map(([name, value]) => name) || ""
computed.licenses.favorite = Object.entries(computed.licenses.used).sort(([an, a], [bn, b]) => b - a).slice(0, 1).map(([name, value]) => name) ?? ""
//Compute total commits
computed.commits += data.user.contributionsCollection.totalCommitContributions + data.user.contributionsCollection.restrictedContributionsCount
@@ -62,11 +80,11 @@
data.meta = {version:conf.package.version, author:conf.package.author}
//Debug flags
if (dflags.includes("--cakeday")||q["dflag.cakeday"]) {
if ((dflags.includes("--cakeday"))||(q["dflag.cakeday"])) {
console.debug(`metrics/compute/${login} > applying dflag --cakeday`)
computed.cakeday = true
}
if (dflags.includes("--hireable")||q["dflag.hireable"]) {
if ((dflags.includes("--hireable"))||(q["dflag.hireable"])) {
console.debug(`metrics/compute/${login} > applying dflag --hireable`)
data.user.isHireable = true
}