chore: code formatting
This commit is contained in:
@@ -143,7 +143,7 @@ export default async function({list, login, data, computed, imports, graphql, qu
|
||||
|
||||
//Member
|
||||
{
|
||||
const { years: value } = computed.registered
|
||||
const {years:value} = computed.registered
|
||||
const unlock = null
|
||||
|
||||
list.push({
|
||||
|
||||
@@ -219,7 +219,7 @@ export default async function({list, login, data, computed, imports, graphql, qu
|
||||
|
||||
//Member
|
||||
{
|
||||
const { years: value } = computed.registered
|
||||
const {years:value} = computed.registered
|
||||
const unlock = null
|
||||
|
||||
list.push({
|
||||
|
||||
@@ -95,7 +95,7 @@ export default async function({login, graphql, rest, data, q, queries, imports},
|
||||
}
|
||||
catch (error) {
|
||||
console.debug(`metrics/compute/${login}/base > failed to retrieve ${_batch} repositories after ${cursor}, this is probably due to an API timeout, halving batch`)
|
||||
_batch = Math.floor(_batch/2)
|
||||
_batch = Math.floor(_batch / 2)
|
||||
if (_batch < 1) {
|
||||
console.debug(`metrics/compute/${login}/base > failed to retrieve repositories, cannot halve batch anymore`)
|
||||
throw error
|
||||
|
||||
@@ -3,7 +3,7 @@ export default async function({login, q, imports, data, rest, account}, {enabled
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
if ((!enabled)||(!q.code))
|
||||
if ((!enabled) || (!q.code))
|
||||
return null
|
||||
|
||||
//Context
|
||||
@@ -25,15 +25,22 @@ export default async function({login, q, imports, data, rest, account}, {enabled
|
||||
try {
|
||||
for (let page = 1; page <= pages; page++) {
|
||||
console.debug(`metrics/compute/${login}/plugins > code > loading page ${page}/${pages}`)
|
||||
events.push(...[...await Promise.all([...(context.mode === "repository" ? await rest.activity.listRepoEvents({owner:context.owner, repo:context.repo}) : await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
|
||||
.filter(({type}) => type === "PushEvent")
|
||||
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
|
||||
.filter(({repo:{name:repo}}) => !((skipped.includes(repo.split("/").pop())) || (skipped.includes(repo))))
|
||||
.filter(event => visibility === "public" ? event.public : true)
|
||||
.flatMap(({payload}) => Promise.all(payload.commits.map(async commit => (await rest.request(commit.url)).data)))])]
|
||||
.flat()
|
||||
.filter(({parents}) => parents.length <= 1)
|
||||
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring)||author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length)
|
||||
events.push(
|
||||
...[
|
||||
...await Promise.all([
|
||||
...(context.mode === "repository"
|
||||
? await rest.activity.listRepoEvents({owner:context.owner, repo:context.repo})
|
||||
: await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
|
||||
.filter(({type}) => type === "PushEvent")
|
||||
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
|
||||
.filter(({repo:{name:repo}}) => !((skipped.includes(repo.split("/").pop())) || (skipped.includes(repo))))
|
||||
.filter(event => visibility === "public" ? event.public : true)
|
||||
.flatMap(({payload}) => Promise.all(payload.commits.map(async commit => (await rest.request(commit.url)).data))),
|
||||
]),
|
||||
]
|
||||
.flat()
|
||||
.filter(({parents}) => parents.length <= 1)
|
||||
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring) || author?.email?.toLocaleLowerCase().includes(authoring) || author?.name?.toLocaleLowerCase().includes(authoring)).length),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -48,8 +55,8 @@ export default async function({login, q, imports, data, rest, account}, {enabled
|
||||
.filter(({patch}) => (patch ? (patch.match(/\n/mg)?.length ?? 1) : Infinity) < lines)
|
||||
for (const file of files)
|
||||
file.language = await imports.language({...file, prefix:login}).catch(() => "unknown")
|
||||
files = files.filter(({language}) => (!languages.length)||(languages.includes(language.toLocaleLowerCase())))
|
||||
const snippet = files[Math.floor(Math.random()*files.length)] ?? null
|
||||
files = files.filter(({language}) => (!languages.length) || (languages.includes(language.toLocaleLowerCase())))
|
||||
const snippet = files[Math.floor(Math.random() * files.length)] ?? null
|
||||
if (snippet) {
|
||||
//Trim common indent from content and change line feed
|
||||
if (!snippet.patch.split("\n").shift().endsWith("@@"))
|
||||
@@ -68,4 +75,4 @@ export default async function({login, q, imports, data, rest, account}, {enabled
|
||||
catch (error) {
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ export default async function({login, q, imports, data, rest, graphql, queries,
|
||||
|
||||
//Contributions categories
|
||||
const types = Object.fromEntries([...new Set(Object.keys(categories))].map(type => [type, new Set()]))
|
||||
if ((sections.includes("categories"))&&(extras)) {
|
||||
if ((sections.includes("categories")) && (extras)) {
|
||||
//Temporary directory
|
||||
const repository = `${repo.owner}/${repo.repo}`
|
||||
const path = imports.paths.join(imports.os.tmpdir(), `${repository.replace(/[^\w]/g, "_")}`)
|
||||
@@ -90,10 +90,11 @@ export default async function({login, q, imports, data, rest, graphql, queries,
|
||||
stdout(line) {
|
||||
if (line.trim().length)
|
||||
files.push(line)
|
||||
}
|
||||
},
|
||||
})
|
||||
//Search for contributions type in specified categories
|
||||
filesloop: for (const file of files) {
|
||||
filesloop:
|
||||
for (const file of files) {
|
||||
for (const [category, globs] of Object.entries(categories)) {
|
||||
for (const glob of [globs].flat(Infinity)) {
|
||||
if (imports.minimatch(file, glob, {nocase:true})) {
|
||||
|
||||
@@ -16,7 +16,13 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
|
||||
}
|
||||
|
||||
//Init
|
||||
const computed = {commits:0, sponsorships:0, licenses:{favorite:"", used:{}, about:{}}, token:{}, repositories:{watchers:0, stargazers:0, issues_open:0, issues_closed:0, pr_open:0, pr_closed:0, pr_merged:0, forks:0, forked:0, releases:0, deployments:0, environments:0}}
|
||||
const computed = {
|
||||
commits:0,
|
||||
sponsorships:0,
|
||||
licenses:{favorite:"", used:{}, about:{}},
|
||||
token:{},
|
||||
repositories:{watchers:0, stargazers:0, issues_open:0, issues_closed:0, pr_open:0, pr_closed:0, pr_merged:0, forks:0, forked:0, releases:0, deployments:0, environments:0},
|
||||
}
|
||||
const avatar = imports.imgb64(data.user.avatarUrl)
|
||||
data.computed = computed
|
||||
console.debug(`metrics/compute/${login} > formatting common metrics`)
|
||||
@@ -38,6 +44,7 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
|
||||
else if (process?.env?.TZ)
|
||||
data.config.timezone = {name:process.env.TZ, offset}
|
||||
|
||||
|
||||
//Display
|
||||
data.large = display === "large"
|
||||
data.columns = display === "columns"
|
||||
@@ -101,7 +108,7 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
|
||||
const months = diff.getUTCMonth() - new Date(0).getUTCMonth()
|
||||
const days = diff.getUTCDate() - new Date(0).getUTCDate()
|
||||
|
||||
computed.registered = {years: years + days / 365.25, months}
|
||||
computed.registered = {years:years + days / 365.25, months}
|
||||
computed.registration = years ? `${years} year${imports.s(years)} ago` : months ? `${months} month${imports.s(months)} ago` : `${days} day${imports.s(days)} ago`
|
||||
computed.cakeday = (years >= 1 && months === 0 && days === 0) ? true : false
|
||||
|
||||
@@ -124,7 +131,7 @@ export default async function({login, q}, {conf, data, rest, graphql, plugins, q
|
||||
data.meta = {
|
||||
version:conf.package.version,
|
||||
author:conf.package.author,
|
||||
generated:imports.format.date(new Date(), {date:true, time:true})
|
||||
generated:imports.format.date(new Date(), {date:true, time:true}),
|
||||
}
|
||||
|
||||
//Debug flags
|
||||
|
||||
@@ -1,69 +1,69 @@
|
||||
//Setup
|
||||
export default async function({login, q, imports, graphql, queries, data, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
if ((!enabled)||(!q.discussions))
|
||||
return null
|
||||
export default async function({login, q, imports, graphql, queries, data, account}, {enabled = false} = {}) {
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
if ((!enabled) || (!q.discussions))
|
||||
return null
|
||||
|
||||
//Load inputs
|
||||
const {categories:_categories, "categories.limit":_categories_limit} = imports.metadata.plugins.discussions.inputs({data, account, q})
|
||||
const discussions = {categories:{}, upvotes:{discussions:0, comments:0}}
|
||||
discussions.display = {categories:_categories ? {limit:_categories_limit || Infinity} : null}
|
||||
//Load inputs
|
||||
const {categories:_categories, "categories.limit":_categories_limit} = imports.metadata.plugins.discussions.inputs({data, account, q})
|
||||
const discussions = {categories:{}, upvotes:{discussions:0, comments:0}}
|
||||
discussions.display = {categories:_categories ? {limit:_categories_limit || Infinity} : null}
|
||||
|
||||
//Fetch general statistics
|
||||
const stats = Object.fromEntries(Object.entries((await graphql(queries.discussions.statistics({login}))).user).map(([key, value]) => [key, value.totalCount]))
|
||||
Object.assign(discussions, stats)
|
||||
//Fetch general statistics
|
||||
const stats = Object.fromEntries(Object.entries((await graphql(queries.discussions.statistics({login}))).user).map(([key, value]) => [key, value.totalCount]))
|
||||
Object.assign(discussions, stats)
|
||||
|
||||
//Load started discussions
|
||||
{
|
||||
const fetched = []
|
||||
const categories = {}
|
||||
let cursor = null
|
||||
let pushed = 0
|
||||
do {
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieving discussions after ${cursor}`)
|
||||
const {user:{repositoryDiscussions:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.categories({login, after:cursor ? `after: "${cursor}"` : ""}))
|
||||
cursor = edges?.[edges?.length - 1]?.cursor
|
||||
fetched.push(...nodes)
|
||||
pushed = nodes.length
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} discussions after ${cursor}`)
|
||||
} while ((pushed) && (cursor))
|
||||
//Load started discussions
|
||||
{
|
||||
const fetched = []
|
||||
const categories = {}
|
||||
let cursor = null
|
||||
let pushed = 0
|
||||
do {
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieving discussions after ${cursor}`)
|
||||
const {user:{repositoryDiscussions:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.categories({login, after:cursor ? `after: "${cursor}"` : ""}))
|
||||
cursor = edges?.[edges?.length - 1]?.cursor
|
||||
fetched.push(...nodes)
|
||||
pushed = nodes.length
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} discussions after ${cursor}`)
|
||||
} while ((pushed) && (cursor))
|
||||
|
||||
//Compute upvotes
|
||||
fetched.map(({upvoteCount}) => discussions.upvotes.discussions += upvoteCount)
|
||||
//Compute upvotes
|
||||
fetched.map(({upvoteCount}) => discussions.upvotes.discussions += upvoteCount)
|
||||
|
||||
//Compute favorite category
|
||||
for (const category of [...fetched.map(({category:{emoji, name}}) => `${imports.emoji.get(emoji) ?? emoji} ${name}`)])
|
||||
categories[category] = (categories[category] ?? 0) + 1
|
||||
const categoryEntries = Object.entries(categories).sort((a, b) => b[1] - a[1])
|
||||
discussions.categories.stats = Object.fromEntries(categoryEntries)
|
||||
discussions.categories.favorite = categoryEntries[0]?.[0] ?? null
|
||||
}
|
||||
//Compute favorite category
|
||||
for (const category of [...fetched.map(({category:{emoji, name}}) => `${imports.emoji.get(emoji) ?? emoji} ${name}`)])
|
||||
categories[category] = (categories[category] ?? 0) + 1
|
||||
const categoryEntries = Object.entries(categories).sort((a, b) => b[1] - a[1])
|
||||
discussions.categories.stats = Object.fromEntries(categoryEntries)
|
||||
discussions.categories.favorite = categoryEntries[0]?.[0] ?? null
|
||||
}
|
||||
|
||||
//Load comments
|
||||
{
|
||||
const fetched = []
|
||||
let cursor = null
|
||||
let pushed = 0
|
||||
do {
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieving comments after ${cursor}`)
|
||||
const {user:{repositoryDiscussionComments:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.comments({login, after:cursor ? `after: "${cursor}"` : ""}))
|
||||
cursor = edges?.[edges?.length - 1]?.cursor
|
||||
fetched.push(...nodes)
|
||||
pushed = nodes.length
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} comments after ${cursor}`)
|
||||
} while ((pushed) && (cursor))
|
||||
//Load comments
|
||||
{
|
||||
const fetched = []
|
||||
let cursor = null
|
||||
let pushed = 0
|
||||
do {
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieving comments after ${cursor}`)
|
||||
const {user:{repositoryDiscussionComments:{edges = [], nodes = []} = {}}} = await graphql(queries.discussions.comments({login, after:cursor ? `after: "${cursor}"` : ""}))
|
||||
cursor = edges?.[edges?.length - 1]?.cursor
|
||||
fetched.push(...nodes)
|
||||
pushed = nodes.length
|
||||
console.debug(`metrics/compute/${login}/discussions > retrieved ${pushed} comments after ${cursor}`)
|
||||
} while ((pushed) && (cursor))
|
||||
|
||||
//Compute upvotes
|
||||
fetched.map(({upvoteCount}) => discussions.upvotes.comments += upvoteCount)
|
||||
}
|
||||
//Compute upvotes
|
||||
fetched.map(({upvoteCount}) => discussions.upvotes.comments += upvoteCount)
|
||||
}
|
||||
|
||||
//Results
|
||||
return discussions
|
||||
}
|
||||
//Handle errors
|
||||
catch (error) {
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
//Results
|
||||
return discussions
|
||||
}
|
||||
//Handle errors
|
||||
catch (error) {
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ export default async function({login, data, computed, imports, q, graphql, queri
|
||||
closed:0,
|
||||
drafts:0,
|
||||
skipped:0,
|
||||
}
|
||||
},
|
||||
},
|
||||
pr:{
|
||||
get count() {
|
||||
@@ -50,13 +50,12 @@ export default async function({login, data, computed, imports, q, graphql, queri
|
||||
closed:0,
|
||||
merged:0,
|
||||
drafts:0,
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
//Extras features
|
||||
if (extras) {
|
||||
|
||||
//Indepth mode
|
||||
if (indepth) {
|
||||
console.debug(`metrics/compute/${login}/plugins > followup > indepth`)
|
||||
@@ -90,7 +89,7 @@ export default async function({login, data, computed, imports, q, graphql, queri
|
||||
}
|
||||
|
||||
//Load user issues and pull requests
|
||||
if ((account === "user")&&(sections.includes("user"))) {
|
||||
if ((account === "user") && (sections.includes("user"))) {
|
||||
const search = await graphql(queries.followup.user({login}))
|
||||
followup.user = {
|
||||
issues:{
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//Legacy import
|
||||
import { recent as recent_analyzer } from "./../languages/analyzers.mjs"
|
||||
import {recent as recent_analyzer} from "./../languages/analyzers.mjs"
|
||||
|
||||
//Setup
|
||||
export default async function({login, data, rest, imports, q, account}, {enabled = false, extras = false, ...defaults} = {}) {
|
||||
@@ -45,7 +45,7 @@ export default async function({login, data, rest, imports, q, account}, {enabled
|
||||
...await Promise.allSettled(
|
||||
commits
|
||||
.flatMap(({payload}) => payload.commits)
|
||||
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring)||author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length)
|
||||
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring) || author?.email?.toLocaleLowerCase().includes(authoring) || author?.name?.toLocaleLowerCase().includes(authoring)).length)
|
||||
.map(async commit => (await rest.request(commit)).data.files),
|
||||
),
|
||||
]
|
||||
@@ -93,22 +93,23 @@ export default async function({login, data, rest, imports, q, account}, {enabled
|
||||
//Compute average number of characters per line of code fetched
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > computing average number of characters per line of code`)
|
||||
const lines = patches.flatMap(({patch}) => patch.split("\n").map(line => line.length))
|
||||
habits.lines.average.chars = lines.reduce((a, b) => a + b, 0)/lines.length
|
||||
habits.lines.average.chars = lines.reduce((a, b) => a + b, 0) / lines.length
|
||||
}
|
||||
|
||||
//Linguist
|
||||
if ((extras)&&(charts)) {
|
||||
if ((extras) && (charts)) {
|
||||
//Check if linguist exists
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > searching recently used languages using linguist`)
|
||||
if (patches.length) {
|
||||
//Call language analyzer (note: using content from other plugin is usually disallowed, this is mostly for legacy purposes)
|
||||
habits.linguist.available = true
|
||||
const {total, stats} = await recent_analyzer({login, data, imports, rest, account}, {days, load:from || 1000, tempdir:"habits"})
|
||||
habits.linguist.languages = Object.fromEntries(Object.entries(stats).map(([language, value]) => [language, value/total]))
|
||||
habits.linguist.languages = Object.fromEntries(Object.entries(stats).map(([language, value]) => [language, value / total]))
|
||||
habits.linguist.ordered = Object.entries(habits.linguist.languages).sort(([_an, a], [_bn, b]) => b - a)
|
||||
}
|
||||
else
|
||||
console.debug(`metrics/compute/${login}/plugins > habits > linguist not available`)
|
||||
|
||||
}
|
||||
|
||||
//Results
|
||||
|
||||
@@ -114,4 +114,4 @@ async function statistics({login, graphql, queries, start, end, calendar}) {
|
||||
//Compute average
|
||||
average = (values.reduce((a, b) => a + b, 0) / values.length).toFixed(2).replace(/[.]0+$/, "")
|
||||
return {streak, max, average}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,15 +61,16 @@ export async function recent({login, data, imports, rest, account}, {skipped = [
|
||||
|
||||
//Get user recent activity
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > querying api`)
|
||||
const commits = [], pages = Math.ceil(load/100), results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0, days}
|
||||
const commits = [], pages = Math.ceil(load / 100), results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0, days}
|
||||
try {
|
||||
for (let page = 1; page <= pages; page++) {
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > loading page ${page}`)
|
||||
commits.push(...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
|
||||
.filter(({type}) => type === "PushEvent")
|
||||
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
|
||||
.filter(({repo:{name:repo}}) => (!skipped.includes(repo.toLocaleLowerCase())) && (!skipped.includes(repo.toLocaleLowerCase().split("/").pop())))
|
||||
.filter(({created_at}) => new Date(created_at) > new Date(Date.now() - days * 24 * 60 * 60 * 1000))
|
||||
commits.push(
|
||||
...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
|
||||
.filter(({type}) => type === "PushEvent")
|
||||
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
|
||||
.filter(({repo:{name:repo}}) => (!skipped.includes(repo.toLocaleLowerCase())) && (!skipped.includes(repo.toLocaleLowerCase().split("/").pop())))
|
||||
.filter(({created_at}) => new Date(created_at) > new Date(Date.now() - days * 24 * 60 * 60 * 1000)),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -86,17 +87,17 @@ export async function recent({login, data, imports, rest, account}, {skipped = [
|
||||
...await Promise.allSettled(
|
||||
commits
|
||||
.flatMap(({payload}) => payload.commits)
|
||||
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring)||author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length)
|
||||
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.login?.toLocaleLowerCase().includes(authoring) || author?.email?.toLocaleLowerCase().includes(authoring) || author?.name?.toLocaleLowerCase().includes(authoring)).length)
|
||||
.map(commit => commit.url)
|
||||
.map(async commit => (await rest.request(commit)).data),
|
||||
)
|
||||
),
|
||||
]
|
||||
.filter(({status}) => status === "fulfilled")
|
||||
.map(({value}) => value)
|
||||
.filter(({parents}) => parents.length <= 1)
|
||||
.map(({files}) => files)
|
||||
.flatMap(files => files.map(file => ({name:imports.paths.basename(file.filename), directory:imports.paths.dirname(file.filename), patch:file.patch ?? "", repo:file.raw_url?.match(/(?<=^https:..github.com\/)(?<repo>.*)(?=\/raw)/)?.groups.repo ?? "_"})))
|
||||
.map(({name, directory, patch, repo}) => ({name, directory:`${repo.replace(/[/]/g, "@")}/${directory}`, patch:patch.split("\n").filter(line => /^[+]/.test(line)).map(line => line.substring(1)).join("\n")}))
|
||||
.filter(({status}) => status === "fulfilled")
|
||||
.map(({value}) => value)
|
||||
.filter(({parents}) => parents.length <= 1)
|
||||
.map(({files}) => files)
|
||||
.flatMap(files => files.map(file => ({name:imports.paths.basename(file.filename), directory:imports.paths.dirname(file.filename), patch:file.patch ?? "", repo:file.raw_url?.match(/(?<=^https:..github.com\/)(?<repo>.*)(?=\/raw)/)?.groups.repo ?? "_"})))
|
||||
.map(({name, directory, patch, repo}) => ({name, directory:`${repo.replace(/[/]/g, "@")}/${directory}`, patch:patch.split("\n").filter(line => /^[+]/.test(line)).map(line => line.substring(1)).join("\n")}))
|
||||
|
||||
//Temporary directory
|
||||
const path = imports.paths.join(imports.os.tmpdir(), `${data.user.databaseId}-${tempdir}`)
|
||||
@@ -164,13 +165,13 @@ async function analyze({login, imports, data}, {results, path, categories = ["pr
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > checking git log`)
|
||||
for (let page = 0; ; page++) {
|
||||
try {
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > processing commits ${page*per_page} from ${(page+1)*per_page}`)
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > processing commits ${page * per_page} from ${(page + 1) * per_page}`)
|
||||
let empty = true, file = null, lang = null
|
||||
await imports.spawn("git", ["log", ...data.shared["commits.authoring"].map(authoring => `--author="${authoring}"`), "--regexp-ignore-case", "--format=short", "--patch", `--max-count=${per_page}`, `--skip=${page*per_page}`], {cwd:path}, {
|
||||
await imports.spawn("git", ["log", ...data.shared["commits.authoring"].map(authoring => `--author="${authoring}"`), "--regexp-ignore-case", "--format=short", "--patch", `--max-count=${per_page}`, `--skip=${page * per_page}`], {cwd:path}, {
|
||||
stdout(line) {
|
||||
try {
|
||||
//Unflag empty output
|
||||
if ((empty)&&(line.trim().length))
|
||||
if ((empty) && (line.trim().length))
|
||||
empty = false
|
||||
//Commits counter
|
||||
if (/^commit [0-9a-f]{40}$/.test(line)) {
|
||||
@@ -178,13 +179,13 @@ async function analyze({login, imports, data}, {results, path, categories = ["pr
|
||||
return
|
||||
}
|
||||
//Ignore empty lines or unneeded lines
|
||||
if ((!/^[+]/.test(line))||(!line.length))
|
||||
if ((!/^[+]/.test(line)) || (!line.length))
|
||||
return
|
||||
//File marker
|
||||
if (/^[+]{3}\sb[/](?<file>[\s\S]+)$/.test(line)) {
|
||||
file = `${path}/${line.match(/^[+]{3}\sb[/](?<file>[\s\S]+)$/)?.groups?.file}`.replace(/\\/g, "/")
|
||||
lang = files[file] ?? null
|
||||
if ((lang)&&(!categories.includes(languageResults[lang].type)))
|
||||
if ((lang) && (!categories.includes(languageResults[lang].type)))
|
||||
lang = null
|
||||
edited.add(file)
|
||||
return
|
||||
@@ -203,7 +204,7 @@ async function analyze({login, imports, data}, {results, path, categories = ["pr
|
||||
catch (error) {
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > an error occured while processing line (${error.message}), skipping...`)
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
if (empty) {
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > no more commits`)
|
||||
@@ -223,7 +224,7 @@ if (/languages.analyzers.mjs$/.test(process.argv[1])) {
|
||||
(async function() {
|
||||
//Parse inputs
|
||||
const [_authoring, path] = process.argv.slice(2)
|
||||
if ((!_authoring)||(!path)) {
|
||||
if ((!_authoring) || (!path)) {
|
||||
console.log("Usage is:\n npm run indepth -- <commits authoring> <repository local path>\n\n")
|
||||
process.exit(1)
|
||||
}
|
||||
@@ -235,7 +236,7 @@ if (/languages.analyzers.mjs$/.test(process.argv[1])) {
|
||||
//Prepare call
|
||||
const imports = await import("../../app/metrics/utils.mjs")
|
||||
const results = {total:0, lines:{}, colors:{}, stats:{}, missed:0}
|
||||
console.debug = log => /exited with code null/.test(log) ? console.error(log.replace(/^.*--max-count=(?<step>\d+) --skip=(?<start>\d+).*$/, (_, step, start) => `error: skipped commits ${start} from ${Number(start)+Number(step)}`)) : null
|
||||
console.debug = log => /exited with code null/.test(log) ? console.error(log.replace(/^.*--max-count=(?<step>\d+) --skip=(?<start>\d+).*$/, (_, step, start) => `error: skipped commits ${start} from ${Number(start) + Number(step)}`)) : null
|
||||
|
||||
//Analyze repository
|
||||
console.log(`commits authoring | ${authoring}\nrepository path | ${path}\n`)
|
||||
|
||||
@@ -17,7 +17,11 @@ export default async function({login, data, imports, q, rest, account}, {enabled
|
||||
}
|
||||
|
||||
//Load inputs
|
||||
let {ignored, skipped, colors, aliases, details, threshold, limit, indepth, "analysis.timeout":timeout, sections, categories, "recent.categories":_recent_categories, "recent.load":_recent_load, "recent.days":_recent_days} = imports.metadata.plugins.languages.inputs({data, account, q})
|
||||
let {ignored, skipped, colors, aliases, details, threshold, limit, indepth, "analysis.timeout":timeout, sections, categories, "recent.categories":_recent_categories, "recent.load":_recent_load, "recent.days":_recent_days} = imports.metadata.plugins.languages.inputs({
|
||||
data,
|
||||
account,
|
||||
q,
|
||||
})
|
||||
threshold = (Number(threshold.replace(/%$/, "")) || 0) / 100
|
||||
skipped.push(...data.shared["repositories.skipped"])
|
||||
if (!limit)
|
||||
@@ -59,7 +63,7 @@ export default async function({login, data, imports, q, rest, account}, {enabled
|
||||
//Extras features
|
||||
if (extras) {
|
||||
//Recently used languages
|
||||
if ((sections.includes("recently-used"))&&(context.mode === "user")) {
|
||||
if ((sections.includes("recently-used")) && (context.mode === "user")) {
|
||||
try {
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > using recent analyzer`)
|
||||
languages["stats.recent"] = await recent_analyzer({login, data, imports, rest, account}, {skipped, categories:_recent_categories ?? categories, days:_recent_days, load:_recent_load, timeout})
|
||||
@@ -102,7 +106,8 @@ export default async function({login, data, imports, q, rest, account}, {enabled
|
||||
//Compute languages stats
|
||||
for (const {section, stats = {}, lines = {}, total = 0} of [{section:"favorites", stats:languages.stats, lines:languages.lines, total:languages.total}, {section:"recent", ...languages["stats.recent"]}]) {
|
||||
console.debug(`metrics/compute/${login}/plugins > languages > computing stats ${section}`)
|
||||
languages[section] = Object.entries(stats).filter(([name]) => !ignored.includes(name.toLocaleLowerCase())).sort(([_an, a], [_bn, b]) => b - a).slice(0, limit).map(([name, value]) => ({name, value, size:value, color:languages.colors[name], x:0})).filter(({value}) => value / total > threshold)
|
||||
languages[section] = Object.entries(stats).filter(([name]) => !ignored.includes(name.toLocaleLowerCase())).sort(([_an, a], [_bn, b]) => b - a).slice(0, limit).map(([name, value]) => ({name, value, size:value, color:languages.colors[name], x:0})).filter(({value}) => value / total > threshold
|
||||
)
|
||||
const visible = {total:Object.values(languages[section]).map(({size}) => size).reduce((a, b) => a + b, 0)}
|
||||
for (let i = 0; i < languages[section].length; i++) {
|
||||
const {name} = languages[section][i]
|
||||
|
||||
@@ -23,7 +23,8 @@ export default async function({login, data, imports, rest, q, account}, {enabled
|
||||
//Get contributors stats from repositories
|
||||
console.debug(`metrics/compute/${login}/plugins > lines > querying api`)
|
||||
const lines = {added:0, deleted:0}
|
||||
const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getContributorsStats({owner, repo})))].filter(({status}) => status === "fulfilled").map(({value}) => value)
|
||||
const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getContributorsStats({owner, repo})))].filter(({status}) => status === "fulfilled"
|
||||
).map(({value}) => value)
|
||||
|
||||
//Compute changed lines
|
||||
console.debug(`metrics/compute/${login}/plugins > lines > computing total diff`)
|
||||
|
||||
@@ -132,11 +132,11 @@ export default async function({login, imports, data, q, account}, {enabled = fal
|
||||
//Parse tracklist
|
||||
tracks = [
|
||||
...await frame.evaluate(() => [...document.querySelectorAll("ytmusic-playlist-shelf-renderer ytmusic-responsive-list-item-renderer")].map(item => ({
|
||||
name:item.querySelector("yt-formatted-string.title > a")?.innerText ?? "",
|
||||
artist:item.querySelector(".secondary-flex-columns > yt-formatted-string > a")?.innerText ?? "",
|
||||
artwork:item.querySelector("img").src,
|
||||
})
|
||||
)),
|
||||
name:item.querySelector("yt-formatted-string.title > a")?.innerText ?? "",
|
||||
artist:item.querySelector(".secondary-flex-columns > yt-formatted-string > a")?.innerText ?? "",
|
||||
artwork:item.querySelector("img").src,
|
||||
}))
|
||||
),
|
||||
]
|
||||
break
|
||||
}
|
||||
@@ -257,12 +257,11 @@ export default async function({login, imports, data, q, account}, {enabled = fal
|
||||
try {
|
||||
//Request access token
|
||||
console.debug(`metrics/compute/${login}/plugins > music > requesting access token with youtube refresh token`)
|
||||
const res = await imports.axios.post("https://music.youtube.com/youtubei/v1/browse?alt=json&key=AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30",
|
||||
{
|
||||
const res = await imports.axios.post("https://music.youtube.com/youtubei/v1/browse?alt=json&key=AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30", {
|
||||
browseEndpointContextSupportedConfigs:{
|
||||
browseEndpointContextMusicConfig:{
|
||||
pageType:"MUSIC_PAGE_TYPE_PLAYLIST",
|
||||
}
|
||||
browseEndpointContextMusicConfig:{
|
||||
pageType:"MUSIC_PAGE_TYPE_PLAYLIST",
|
||||
},
|
||||
},
|
||||
context:{
|
||||
client:{
|
||||
@@ -272,9 +271,8 @@ export default async function({login, imports, data, q, account}, {enabled = fal
|
||||
hl:"en",
|
||||
},
|
||||
},
|
||||
browseId:"FEmusic_history"
|
||||
},
|
||||
{
|
||||
browseId:"FEmusic_history",
|
||||
}, {
|
||||
headers:{
|
||||
Authorization:SAPISIDHASH,
|
||||
Cookie:token,
|
||||
@@ -337,14 +335,14 @@ export default async function({login, imports, data, q, account}, {enabled = fal
|
||||
Object.defineProperty(modes, "top", {
|
||||
get() {
|
||||
return `Top played artists ${time_msg}`
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
else {
|
||||
Object.defineProperty(modes, "top", {
|
||||
get() {
|
||||
return `Top played tracks ${time_msg}`
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -355,7 +353,7 @@ export default async function({login, imports, data, q, account}, {enabled = fal
|
||||
//Prepare credentials
|
||||
const [client_id, client_secret, refresh_token] = token.split(",").map(part => part.trim())
|
||||
if ((!client_id) || (!client_secret) || (!refresh_token))
|
||||
throw { error: { message: "Spotify token must contain client id/secret and refresh token" } }
|
||||
throw {error:{message:"Spotify token must contain client id/secret and refresh token"}}
|
||||
else if (limit > 50)
|
||||
throw {error:{message:"Spotify top limit cannot be greater than 50"}}
|
||||
|
||||
@@ -372,40 +370,39 @@ export default async function({login, imports, data, q, account}, {enabled = fal
|
||||
//Retrieve tracks
|
||||
console.debug(`metrics/compute/${login}/plugins > music > querying spotify api`)
|
||||
tracks = []
|
||||
const loaded =
|
||||
top_type === "artists"
|
||||
? (
|
||||
await imports.axios.get(
|
||||
`https://api.spotify.com/v1/me/top/artists?time_range=${time_range}_term&limit=${limit}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${access}`,
|
||||
},
|
||||
}
|
||||
)
|
||||
).data.items.map(({ name, genres, images }) => ({
|
||||
name,
|
||||
artist: genres.join(" • "),
|
||||
artwork: images[0].url,
|
||||
}))
|
||||
: (
|
||||
await imports.axios.get(
|
||||
`https://api.spotify.com/v1/me/top/tracks?time_range=${time_range}_term&limit=${limit}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${access}`,
|
||||
},
|
||||
}
|
||||
)
|
||||
).data.items.map(({ name, artists, album }) => ({
|
||||
name,
|
||||
artist: artists[0].name,
|
||||
artwork: album.images[0].url,
|
||||
}))
|
||||
const loaded = top_type === "artists"
|
||||
? (
|
||||
await imports.axios.get(
|
||||
`https://api.spotify.com/v1/me/top/artists?time_range=${time_range}_term&limit=${limit}`,
|
||||
{
|
||||
headers:{
|
||||
"Content-Type":"application/json",
|
||||
Accept:"application/json",
|
||||
Authorization:`Bearer ${access}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
).data.items.map(({name, genres, images}) => ({
|
||||
name,
|
||||
artist:genres.join(" • "),
|
||||
artwork:images[0].url,
|
||||
}))
|
||||
: (
|
||||
await imports.axios.get(
|
||||
`https://api.spotify.com/v1/me/top/tracks?time_range=${time_range}_term&limit=${limit}`,
|
||||
{
|
||||
headers:{
|
||||
"Content-Type":"application/json",
|
||||
Accept:"application/json",
|
||||
Authorization:`Bearer ${access}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
).data.items.map(({name, artists, album}) => ({
|
||||
name,
|
||||
artist:artists[0].name,
|
||||
artwork:album.images[0].url,
|
||||
}))
|
||||
//Ensure no duplicate are added
|
||||
for (const track of loaded) {
|
||||
if (!tracks.map(({name}) => name).includes(track.name))
|
||||
@@ -431,38 +428,37 @@ export default async function({login, imports, data, q, account}, {enabled = fal
|
||||
try {
|
||||
console.debug(`metrics/compute/${login}/plugins > music > querying lastfm api`)
|
||||
const period = time_range === "short" ? "1month" : time_range === "medium" ? "6month" : "overall"
|
||||
tracks =
|
||||
top_type === "artists"
|
||||
? (
|
||||
await imports.axios.get(
|
||||
`https://ws.audioscrobbler.com/2.0/?method=user.gettopartists&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`,
|
||||
{
|
||||
headers: {
|
||||
"User-Agent": "lowlighter/metrics",
|
||||
Accept: "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data.topartists.artist.map(artist => ({
|
||||
name: artist.name,
|
||||
artist: `Play count: ${artist.playcount}`,
|
||||
artwork: artist.image.reverse()[0]["#text"],
|
||||
}))
|
||||
: (
|
||||
await imports.axios.get(
|
||||
`https://ws.audioscrobbler.com/2.0/?method=user.gettoptracks&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`,
|
||||
{
|
||||
headers: {
|
||||
"User-Agent": "lowlighter/metrics",
|
||||
Accept: "application/json",
|
||||
},
|
||||
}
|
||||
)
|
||||
).data.toptracks.track.map(track => ({
|
||||
name: track.name,
|
||||
artist: track.artist.name,
|
||||
artwork: track.image.reverse()[0]["#text"],
|
||||
}))
|
||||
tracks = top_type === "artists"
|
||||
? (
|
||||
await imports.axios.get(
|
||||
`https://ws.audioscrobbler.com/2.0/?method=user.gettopartists&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`,
|
||||
{
|
||||
headers:{
|
||||
"User-Agent":"lowlighter/metrics",
|
||||
Accept:"application/json",
|
||||
},
|
||||
},
|
||||
)
|
||||
).data.topartists.artist.map(artist => ({
|
||||
name:artist.name,
|
||||
artist:`Play count: ${artist.playcount}`,
|
||||
artwork:artist.image.reverse()[0]["#text"],
|
||||
}))
|
||||
: (
|
||||
await imports.axios.get(
|
||||
`https://ws.audioscrobbler.com/2.0/?method=user.gettoptracks&user=${user}&api_key=${token}&limit=${limit}&period=${period}&format=json`,
|
||||
{
|
||||
headers:{
|
||||
"User-Agent":"lowlighter/metrics",
|
||||
Accept:"application/json",
|
||||
},
|
||||
},
|
||||
)
|
||||
).data.toptracks.track.map(track => ({
|
||||
name:track.name,
|
||||
artist:track.artist.name,
|
||||
artwork:track.image.reverse()[0]["#text"],
|
||||
}))
|
||||
}
|
||||
//Handle errors
|
||||
catch (error) {
|
||||
@@ -526,4 +522,4 @@ function get_all_with_key(obj, key) {
|
||||
result.push(...get_all_with_key(obj[i], key))
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,11 +55,11 @@ export default async function({login, q, imports, rest, graphql, data, account,
|
||||
//Save user data
|
||||
contribution.user = {
|
||||
commits,
|
||||
percentage:commits/contribution.history,
|
||||
percentage:commits / contribution.history,
|
||||
maintainer:maintainers.includes(login),
|
||||
get stars() {
|
||||
return this.maintainer ? stars : this.percentage*stars
|
||||
}
|
||||
return this.maintainer ? stars : this.percentage * stars
|
||||
},
|
||||
}
|
||||
console.debug(`metrics/compute/${login}/plugins > notable > indepth > successfully processed ${owner}/${repo}`)
|
||||
}
|
||||
@@ -91,6 +91,7 @@ export default async function({login, q, imports, rest, graphql, data, account,
|
||||
}
|
||||
else
|
||||
aggregated.set(key, {name:key, handle, avatar, organization, stars, aggregated:1, ..._extras})
|
||||
|
||||
}
|
||||
contributions = [...aggregated.values()]
|
||||
if (extras) {
|
||||
@@ -100,7 +101,6 @@ export default async function({login, q, imports, rest, graphql, data, account,
|
||||
contributions = contributions.sort((a, b) => ((b.user?.percentage + b.user?.maintainer) || 0) - ((a.user?.percentage + a.user?.maintainer) || 0))
|
||||
}
|
||||
|
||||
|
||||
//Results
|
||||
return {contributions}
|
||||
}
|
||||
|
||||
@@ -3,10 +3,11 @@ export default async function({q, imports, data, account}, {enabled = false, tok
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
if ((!enabled)||(!q.poopmap))
|
||||
if ((!enabled) || (!q.poopmap))
|
||||
return null
|
||||
|
||||
if (!token) return {poops:[], days:7}
|
||||
if (!token)
|
||||
return {poops:[], days:7}
|
||||
|
||||
const {days} = imports.metadata.plugins.poopmap.inputs({data, account, q})
|
||||
const {data:{poops}} = await imports.axios.get(`https://api.poopmap.net/api/v1/public_links/${token}`)
|
||||
@@ -33,4 +34,4 @@ export default async function({q, imports, data, account}, {enabled = false, tok
|
||||
catch (error) {
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ export default async function({login, q, imports, graphql, queries, data, accoun
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
if ((!enabled)||(!q.repositories))
|
||||
if ((!enabled) || (!q.repositories))
|
||||
return null
|
||||
|
||||
//Load inputs
|
||||
@@ -35,4 +35,4 @@ export default async function({login, q, imports, graphql, queries, data, accoun
|
||||
catch (error) {
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ export default async function({login, q, imports, data, graphql, queries, accoun
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
if ((!enabled)||(!q.sponsors))
|
||||
if ((!enabled) || (!q.sponsors))
|
||||
return null
|
||||
|
||||
//Load inputs
|
||||
|
||||
@@ -31,7 +31,7 @@ export default async function({login, graphql, data, imports, q, queries, accoun
|
||||
console.debug(`metrics/compute/${login}/plugins > stargazers > loaded ${dates.length} stargazers in total`)
|
||||
|
||||
//Compute stargazers increments
|
||||
const days = 14 * (1 + data.large/2)
|
||||
const days = 14 * (1 + data.large / 2)
|
||||
const increments = {dates:Object.fromEntries([...new Array(days).fill(null).map((_, i) => [new Date(Date.now() - i * 24 * 60 * 60 * 1000).toISOString().slice(0, 10), 0]).reverse()]), max:NaN, min:NaN}
|
||||
dates
|
||||
.map(date => date.toISOString().slice(0, 10))
|
||||
|
||||
@@ -3,7 +3,7 @@ export default async function({login, q, imports, data, account}, {enabled = fal
|
||||
//Plugin execution
|
||||
try {
|
||||
//Check if plugin is enabled and requirements are met
|
||||
if ((!enabled)||(!q.starlists))
|
||||
if ((!enabled) || (!q.starlists))
|
||||
return null
|
||||
|
||||
//Load inputs
|
||||
@@ -19,17 +19,18 @@ export default async function({login, q, imports, data, account}, {enabled = fal
|
||||
console.debug(`metrics/compute/${login}/plugins > starlists > fetching lists`)
|
||||
await page.goto(`https://github.com/${login}?tab=stars`)
|
||||
let lists = (await page.evaluate(() => [...document.querySelectorAll("[href^='/stars/lowlighter/lists']")].map(element => ({
|
||||
link:element.href,
|
||||
name:element.querySelector("h3")?.innerText ?? "",
|
||||
description:element.querySelector("span")?.innerText ?? "",
|
||||
count:Number(element.querySelector("div")?.innerText.match(/(?<count>\d+)/)?.groups.count),
|
||||
repositories:[]
|
||||
}))))
|
||||
link:element.href,
|
||||
name:element.querySelector("h3")?.innerText ?? "",
|
||||
description:element.querySelector("span")?.innerText ?? "",
|
||||
count:Number(element.querySelector("div")?.innerText.match(/(?<count>\d+)/)?.groups.count),
|
||||
repositories:[],
|
||||
}))
|
||||
))
|
||||
const count = lists.length
|
||||
console.debug(`metrics/compute/${login}/plugins > starlists > found [${lists.map(({name}) => name)}]`)
|
||||
lists = lists
|
||||
.filter(({name}) => name)
|
||||
.filter(({name}) => (!only.length)||(only.includes(name.toLocaleLowerCase())))
|
||||
.filter(({name}) => (!only.length) || (only.includes(name.toLocaleLowerCase())))
|
||||
.filter(({name}) => !ignored.includes(name.toLocaleLowerCase()))
|
||||
.slice(0, limit)
|
||||
console.debug(`metrics/compute/${login}/plugins > starlists > extracted ${lists.length} lists`)
|
||||
@@ -39,9 +40,10 @@ export default async function({login, q, imports, data, account}, {enabled = fal
|
||||
console.debug(`metrics/compute/${login}/plugins > starlists > fetching ${list.name}`)
|
||||
await page.goto(list.link)
|
||||
const repositories = await page.evaluate(() => [...document.querySelectorAll("#user-list-repositories > div")].map(element => ({
|
||||
name:element.querySelector("div:first-child")?.innerText.replace(" / ", "/") ?? "",
|
||||
description:element.querySelector(".py-1")?.innerText ?? ""
|
||||
})))
|
||||
name:element.querySelector("div:first-child")?.innerText.replace(" / ", "/") ?? "",
|
||||
description:element.querySelector(".py-1")?.innerText ?? "",
|
||||
}))
|
||||
)
|
||||
list.repositories.push(...repositories)
|
||||
if (_shuffle)
|
||||
list.repositories = imports.shuffle(list.repositories)
|
||||
@@ -59,4 +61,4 @@ export default async function({login, q, imports, data, account}, {enabled = fal
|
||||
catch (error) {
|
||||
throw {error:{message:"An error occured", instance:error}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,8 @@ export default async function({login, imports, data, rest, q, account}, {enabled
|
||||
//Get views stats from repositories
|
||||
console.debug(`metrics/compute/${login}/plugins > traffic > querying api`)
|
||||
const views = {count:0, uniques:0}
|
||||
const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getViews({owner, repo})))].filter(({status}) => status === "fulfilled").map(({value}) => value)
|
||||
const response = [...await Promise.allSettled(repositories.map(({repo, owner}) => (skipped.includes(repo.toLocaleLowerCase())) || (skipped.includes(`${owner}/${repo}`.toLocaleLowerCase())) ? {} : rest.repos.getViews({owner, repo})))].filter(({status}) => status === "fulfilled"
|
||||
).map(({value}) => value)
|
||||
|
||||
//Compute views
|
||||
console.debug(`metrics/compute/${login}/plugins > traffic > computing stats`)
|
||||
|
||||
Reference in New Issue
Block a user