feat(plugins/pagespeed): improved performance and PWA support, fix(plugins/posts): "dev.to" lowercase username, fix(plugins/base): repositories count, fix(plugins/followup): issues and pull requests count from archived repositories (#1101)

This commit is contained in:
Jayant Goel
2022-06-24 22:57:51 +05:30
committed by GitHub
parent c5105a132f
commit f544663a3f
15 changed files with 170 additions and 38 deletions

View File

@@ -32,7 +32,7 @@ export default async function({login, graphql, rest, data, q, queries, imports},
Object.assign(data, {user: queried[account]})
postprocess?.[account]({login, data})
try {
Object.assign(data.user, (await graphql(queries.base[`${account}.x`]({login, account, "calendar.from": new Date(Date.now() - 14 * 24 * 60 * 60 * 1000).toISOString(), "calendar.to": (new Date()).toISOString()})))[account])
Object.assign(data.user, (await graphql(queries.base[`${account}.x`]({login, account, "calendar.from": new Date(Date.now() - 14 * 24 * 60 * 60 * 1000).toISOString(), "calendar.to": (new Date()).toISOString(), affiliations})))[account])
console.debug(`metrics/compute/${login}/base > successfully loaded bulk query`)
}
catch {
@@ -54,7 +54,7 @@ export default async function({login, graphql, rest, data, q, queries, imports},
//Query repositories fields
for (const field of ["totalCount", "totalDiskUsage"]) {
try {
Object.assign(data.user.repositories, (await graphql(queries.base["field.repositories"]({login, account, field})))[account].repositories)
Object.assign(data.user.repositories, (await graphql(queries.base["field.repositories"]({login, account, field, affiliations})))[account].repositories)
}
catch (error) {
console.debug(`metrics/compute/${login}/base > failed to retrieve repositories.${field}`)

View File

@@ -1,6 +1,6 @@
query BaseFieldRepositories {
query BaseFieldRepositories{
$account(login: "$login") {
repositories(last: 0) {
repositories(last: 0 $affiliations) {
$field
}
}

View File

@@ -12,7 +12,7 @@ query BaseOrganizationX {
membersWithRole {
totalCount
}
repositories(last: 0) {
repositories(last: 0 $affiliations) {
totalCount
totalDiskUsage
}

View File

@@ -30,7 +30,7 @@ query BaseUserX {
repositoriesContributedTo(includeUserRepositories: true) {
totalCount
}
repositories(last: 0) {
repositories(last: 0 $affiliations) {
totalCount
totalDiskUsage
}

View File

@@ -22,4 +22,13 @@
token: ${{ secrets.METRICS_TOKEN }}
base: ""
plugin_followup: yes
plugin_followup_indepth: yes
plugin_followup_indepth: yes
- name: Exclude Archived
uses: lowlighter/metrics@latest
with:
filename: metrics.plugin.followup.archived.svg
token: ${{ secrets.METRICS_TOKEN }}
base: ""
plugin_followup: yes
plugin_followup_archived: no

View File

@@ -7,7 +7,9 @@ export default async function({login, data, computed, imports, q, graphql, queri
return null
//Load inputs
let {sections, indepth} = imports.metadata.plugins.followup.inputs({data, account, q})
let {sections, indepth, archived} = imports.metadata.plugins.followup.inputs({data, account, q})
archived = archived === false ? "archived:false" : ""
//Define getters
const followup = {
@@ -90,7 +92,7 @@ export default async function({login, data, computed, imports, q, graphql, queri
//Load user issues and pull requests
if ((account === "user") && (sections.includes("user"))) {
const search = await graphql(queries.followup.user({login}))
const search = await graphql(queries.followup.user({login, archived}))
followup.user = {
issues: {
get count() {

View File

@@ -36,3 +36,8 @@ inputs:
type: boolean
default: no
extras: yes
plugin_followup_archived:
description: Include Issues and Pull requests on the archived repositories.
type: boolean
default: yes

View File

@@ -1,26 +1,26 @@
query FollowupUser {
issues_open:search(query: "is:issue author:$login is:open", type: ISSUE, first: 0) {
issues_open:search(query: "is:issue author:$login is:open $archived", type: ISSUE, first: 0) {
issueCount
}
issues_drafts:search(query: "is:issue author:$login draft:true", type: ISSUE, first: 0) {
issues_drafts:search(query: "is:issue author:$login draft:true $archived", type: ISSUE, first: 0) {
issueCount
}
issues_skipped:search(query: "is:issue author:$login is:closed label:wontfix,duplicate", type: ISSUE, first: 0) {
issues_skipped:search(query: "is:issue author:$login is:closed label:wontfix,duplicate $archived", type: ISSUE, first: 0) {
issueCount
}
issues_closed:search(query: "is:issue author:$login is:closed", type: ISSUE, first: 0) {
issues_closed:search(query: "is:issue author:$login is:closed $archived", type: ISSUE, first: 0) {
issueCount
}
pr_open:search(query: "is:pr author:$login is:open draft:false", type: ISSUE, first: 0) {
pr_open:search(query: "is:pr author:$login is:open draft:false $archived", type: ISSUE, first: 0) {
issueCount
}
pr_drafts:search(query: "is:pr author:$login draft:true", type: ISSUE, first: 0) {
pr_drafts:search(query: "is:pr author:$login draft:true $archived", type: ISSUE, first: 0) {
issueCount
}
pr_closed:search(query: "is:pr author:$login is:unmerged is:closed draft:false", type: ISSUE, first: 0) {
pr_closed:search(query: "is:pr author:$login is:unmerged is:closed draft:false $archived", type: ISSUE, first: 0) {
issueCount
}
pr_merged:search(query: "is:pr author:$login is:merged", type: ISSUE, first: 0) {
pr_merged:search(query: "is:pr author:$login is:merged $archived", type: ISSUE, first: 0) {
issueCount
}
}

View File

@@ -30,3 +30,13 @@
plugin_pagespeed_token: ${{ secrets.PAGESPEED_TOKEN }}
plugin_pagespeed_url: https://lecoq.io
- name: Succint report with PWA
uses: lowlighter/metrics@latest
with:
filename: metrics.plugin.pagespeed.svg
token: NOT_NEEDED
base: ""
plugin_pagespeed: yes
plugin_pagespeed_token: ${{ secrets.PAGESPEED_TOKEN }}
plugin_pagespeed_url: https://lecoq.io
plugin_pagespeed_pwa: yes

View File

@@ -7,7 +7,7 @@ export default async function({login, imports, data, q, account}, {enabled = fal
return null
//Load inputs
let {detailed, screenshot, url} = imports.metadata.plugins.pagespeed.inputs({data, account, q})
let {detailed, screenshot, url, pwa} = imports.metadata.plugins.pagespeed.inputs({data, account, q})
//Format url if needed
if (!/^https?:[/][/]/.test(url))
url = `https://${url}`
@@ -15,22 +15,26 @@ export default async function({login, imports, data, q, account}, {enabled = fal
const result = {url: `${protocol}//${host}`, detailed, scores: [], metrics: {}}
//Load scores from API
console.debug(`metrics/compute/${login}/plugins > pagespeed > querying api for ${result.url}`)
const scores = new Map()
await Promise.all(["performance", "accessibility", "best-practices", "seo"].map(async category => {
//Perform audit
console.debug(`metrics/compute/${login}/plugins > pagespeed > performing audit ${category}`)
const request = await imports.axios.get(`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=${category}&url=${url}${token ? `&key=${token}` : ""}`)
const categories = ["performance", "accessibility", "best-practices", "seo"]
if (pwa){
categories.push("pwa")
}
let categories_required = ""
for (const category of categories){
categories_required += `&category=${category}`
}
//Perform audit
console.debug(`metrics/compute/${login}/plugins > pagespeed > performing audit ${categories_required}`)
const request = await imports.axios.get(`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${url}${categories_required}${token ? `&key=${token}` : ""}`)
for (const category of categories){
const {score, title} = request.data.lighthouseResult.categories[category]
scores.set(category, {score, title})
result.scores.push({score, title})
console.debug(`metrics/compute/${login}/plugins > pagespeed > performed audit ${category} (status code ${request.status})`)
//Store screenshot
if ((screenshot) && (category === "performance")) {
result.screenshot = request.data.lighthouseResult.audits["final-screenshot"].details.data
console.debug(`metrics/compute/${login}/plugins > pagespeed > performed audit ${category} (status code ${request.status})`)
}
}))
result.scores = [scores.get("performance"), scores.get("accessibility"), scores.get("best-practices"), scores.get("seo")]
}
//Store screenshot
if (screenshot) {
result.screenshot = request.data.lighthouseResult.audits["final-screenshot"].details.data
}
//Detailed metrics
if (detailed) {
console.debug(`metrics/compute/${login}/plugins > pagespeed > performing detailed audit`)

View File

@@ -54,4 +54,9 @@ inputs:
Although not mandatory, it strongly advised to create one to avoid triggering the rate limiter. See [PageSpeed documentation](https://developers.google.com/speed/docs/insights/v5/get-started) for more informations.
type: token
default: ""
default: ""
plugin_pagespeed_pwa:
description: Display PWA Status
type: boolean
default: no

View File

@@ -8,7 +8,6 @@ export default async function({login, data, imports, q, queries, account}, {enab
//Load inputs
let {source, descriptions, covers, limit, user} = imports.metadata.plugins.posts.inputs({data, account, q})
//Retrieve posts
console.debug(`metrics/compute/${login}/plugins > posts > processing with source ${source}`)
let posts = null
@@ -16,6 +15,7 @@ export default async function({login, data, imports, q, queries, account}, {enab
switch (source) {
//Dev.to
case "dev.to": {
user = user.toLowerCase()
console.debug(`metrics/compute/${login}/plugins > posts > querying api`)
posts = (await imports.axios.get(`https://dev.to/api/articles?username=${user}&state=fresh`)).data.map(({title, description, published_at: date, cover_image: image, url: link}) => ({title, description, date, image, link}))
link = `https://dev.to/${user}`