feat(plugins/languages): add plugin_languages_analysis_timeout to avoid long runs (#572) [skip ci]
This commit is contained in:
@@ -1,140 +1,154 @@
|
|||||||
import linguist from "linguist-js"
|
import linguist from "linguist-js"
|
||||||
|
|
||||||
/**Indepth analyzer */
|
/**Indepth analyzer */
|
||||||
export async function indepth({login, data, imports, repositories}, {skipped, categories}) {
|
export async function indepth({login, data, imports, repositories}, {skipped, categories, timeout}) {
|
||||||
|
return new Promise(async (solve, reject) => {
|
||||||
//Compute repositories stats from fetched repositories
|
//Timeout
|
||||||
const results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0}
|
if (Number.isFinite(timeout)) {
|
||||||
for (const repository of repositories) {
|
console.debug(`metrics/compute/${login}/plugins > languages > timeout set to ${timeout}m`)
|
||||||
//Skip repository if asked
|
setTimeout(() => reject(`Reached maximum execution time of ${timeout}m for analysis`), timeout * 60 * 1000)
|
||||||
if ((skipped.includes(repository.name.toLocaleLowerCase())) || (skipped.includes(`${repository.owner.login}/${repository.name}`.toLocaleLowerCase()))) {
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > skipped repository ${repository.owner.login}/${repository.name}`)
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//Repository handle
|
//Compute repositories stats from fetched repositories
|
||||||
const repo = `${repository.owner.login}/${repository.name}`
|
const results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0}
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > checking ${repo}`)
|
for (const repository of repositories) {
|
||||||
|
//Skip repository if asked
|
||||||
|
if ((skipped.includes(repository.name.toLocaleLowerCase())) || (skipped.includes(`${repository.owner.login}/${repository.name}`.toLocaleLowerCase()))) {
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > skipped repository ${repository.owner.login}/${repository.name}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
//Temporary directory
|
//Repository handle
|
||||||
const path = imports.paths.join(imports.os.tmpdir(), `${data.user.databaseId}-${repo.replace(/[^\w]/g, "_")}`)
|
const repo = `${repository.owner.login}/${repository.name}`
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > cloning ${repo} to temp dir ${path}`)
|
console.debug(`metrics/compute/${login}/plugins > languages > indepth > checking ${repo}`)
|
||||||
|
|
||||||
//Process
|
//Temporary directory
|
||||||
try {
|
const path = imports.paths.join(imports.os.tmpdir(), `${data.user.databaseId}-${repo.replace(/[^\w]/g, "_")}`)
|
||||||
//Git clone into temporary directory
|
console.debug(`metrics/compute/${login}/plugins > languages > indepth > cloning ${repo} to temp dir ${path}`)
|
||||||
await imports.fs.rm(path, {recursive:true, force:true})
|
|
||||||
await imports.fs.mkdir(path, {recursive:true})
|
|
||||||
const git = await imports.git(path)
|
|
||||||
await git.clone(`https://github.com/${repo}`, ".").status()
|
|
||||||
|
|
||||||
//Analyze repository
|
//Process
|
||||||
await analyze(arguments[0], {results, path, categories})
|
try {
|
||||||
|
//Git clone into temporary directory
|
||||||
|
await imports.fs.rm(path, {recursive:true, force:true})
|
||||||
|
await imports.fs.mkdir(path, {recursive:true})
|
||||||
|
const git = await imports.git(path)
|
||||||
|
await git.clone(`https://github.com/${repo}`, ".").status()
|
||||||
|
|
||||||
|
//Analyze repository
|
||||||
|
await analyze(arguments[0], {results, path, categories})
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > indepth > an error occured while processing ${repo}, skipping...`)
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
//Cleaning
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > indepth > cleaning temp dir ${path}`)
|
||||||
|
await imports.fs.rm(path, {recursive:true, force:true})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
solve(results)
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > an error occured while processing ${repo}, skipping...`)
|
})
|
||||||
}
|
|
||||||
finally {
|
|
||||||
//Cleaning
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth > cleaning temp dir ${path}`)
|
|
||||||
await imports.fs.rm(path, {recursive:true, force:true})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**Recent languages activity */
|
/**Recent languages activity */
|
||||||
export async function recent({login, data, imports, rest, account}, {skipped = [], categories, days = 0, load = 0, tempdir = "recent"}) {
|
export async function recent({login, data, imports, rest, account}, {skipped = [], categories, days = 0, load = 0, tempdir = "recent", timeout}) {
|
||||||
|
return new Promise(async (solve, reject) => {
|
||||||
//Get user recent activity
|
//Timeout
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > querying api`)
|
if (Number.isFinite(timeout)) {
|
||||||
const commits = [], pages = Math.ceil(load/100), results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0, days}
|
console.debug(`metrics/compute/${login}/plugins > languages > timeout set to ${timeout}m`)
|
||||||
try {
|
setTimeout(() => reject(`Reached maximum execution time of ${timeout}m for analysis`), timeout * 60 * 1000)
|
||||||
for (let page = 1; page <= pages; page++) {
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > loading page ${page}`)
|
|
||||||
commits.push(...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
|
|
||||||
.filter(({type}) => type === "PushEvent")
|
|
||||||
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
|
|
||||||
.filter(({repo:{name:repo}}) => (!skipped.includes(repo.toLocaleLowerCase())) && (!skipped.includes(repo.toLocaleLowerCase().split("/").pop())))
|
|
||||||
.filter(({created_at}) => new Date(created_at) > new Date(Date.now() - days * 24 * 60 * 60 * 1000))
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
catch {
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > no more page to load`)
|
|
||||||
}
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > ${commits.length} commits loaded`)
|
|
||||||
results.latest = Math.round((new Date().getTime() - new Date(commits.slice(-1).shift()?.created_at).getTime()) / (1000 * 60 * 60 * 24))
|
|
||||||
|
|
||||||
//Retrieve edited files and filter edited lines (those starting with +/-) from patches
|
//Get user recent activity
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > loading patches`)
|
console.debug(`metrics/compute/${login}/plugins > languages > querying api`)
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > commits authoring set to ${JSON.stringify(data.shared["commits.authoring"])}`)
|
const commits = [], pages = Math.ceil(load/100), results = {total:0, lines:{}, stats:{}, colors:{}, commits:0, files:0, missed:0, days}
|
||||||
let patches = [
|
try {
|
||||||
...await Promise.allSettled(
|
for (let page = 1; page <= pages; page++) {
|
||||||
commits
|
console.debug(`metrics/compute/${login}/plugins > languages > loading page ${page}`)
|
||||||
.flatMap(({payload}) => payload.commits)
|
commits.push(...(await rest.activity.listEventsForAuthenticatedUser({username:login, per_page:100, page})).data
|
||||||
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length)
|
.filter(({type}) => type === "PushEvent")
|
||||||
.map(commit => commit.url)
|
.filter(({actor}) => account === "organization" ? true : actor.login?.toLocaleLowerCase() === login.toLocaleLowerCase())
|
||||||
.map(async commit => (await rest.request(commit)).data),
|
.filter(({repo:{name:repo}}) => (!skipped.includes(repo.toLocaleLowerCase())) && (!skipped.includes(repo.toLocaleLowerCase().split("/").pop())))
|
||||||
)
|
.filter(({created_at}) => new Date(created_at) > new Date(Date.now() - days * 24 * 60 * 60 * 1000))
|
||||||
]
|
)
|
||||||
.filter(({status}) => status === "fulfilled")
|
|
||||||
.map(({value}) => value)
|
|
||||||
.filter(({parents}) => parents.length <= 1)
|
|
||||||
.map(({files}) => files)
|
|
||||||
.flatMap(files => files.map(file => ({name:imports.paths.basename(file.filename), directory:imports.paths.dirname(file.filename), patch:file.patch ?? "", repo:file.raw_url?.match(/(?<=^https:..github.com\/)(?<repo>.*)(?=\/raw)/)?.groups.repo ?? "_"})))
|
|
||||||
.map(({name, directory, patch, repo}) => ({name, directory:`${repo.replace(/[/]/g, "@")}/${directory}`, patch:patch.split("\n").filter(line => /^[+]/.test(line)).map(line => line.substring(1)).join("\n")}))
|
|
||||||
|
|
||||||
//Temporary directory
|
|
||||||
const path = imports.paths.join(imports.os.tmpdir(), `${data.user.databaseId}-${tempdir}`)
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > creating temp dir ${path} with ${patches.length} files`)
|
|
||||||
|
|
||||||
//Process
|
|
||||||
try {
|
|
||||||
//Save patches in temporary directory matching respective repository and filename
|
|
||||||
await imports.fs.rm(path, {recursive:true, force:true})
|
|
||||||
await imports.fs.mkdir(path, {recursive:true})
|
|
||||||
await Promise.all(patches.map(async ({name, directory, patch}) => {
|
|
||||||
await imports.fs.mkdir(imports.paths.join(path, directory), {recursive:true})
|
|
||||||
imports.fs.writeFile(imports.paths.join(path, directory, name), patch)
|
|
||||||
}))
|
|
||||||
|
|
||||||
//Process temporary repositories
|
|
||||||
for (const directory of await imports.fs.readdir(path)) {
|
|
||||||
//Pull gitattributes if possible
|
|
||||||
for (const branch of ["main", "master"]) {
|
|
||||||
const repo = directory.replace("@", "/")
|
|
||||||
try {
|
|
||||||
await imports.fs.writeFile(imports.paths.join(path, directory, ".gitattributes"), await imports.fetch(`https://raw.githubusercontent.com/${repo}/${branch}/.gitattributes`).then(response => response.text()).catch(() => ""))
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > successfully fetched .gitattributes for ${repo}`)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > cannot load .gitattributes on branch ${branch} for ${repo}`)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//Create temporary git repository
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > creating temp git repository for ${directory}`)
|
|
||||||
const git = await imports.git(imports.paths.join(path, directory))
|
|
||||||
await git.init().add(".").addConfig("user.name", data.shared["commits.authoring"]?.[0] ?? login).addConfig("user.email", "<>").commit("linguist").status()
|
|
||||||
|
|
||||||
//Analyze repository
|
|
||||||
await analyze(arguments[0], {results, path:imports.paths.join(path, directory), categories})
|
|
||||||
|
|
||||||
//Since we reproduce a "partial repository" with a single commit, use number of commits retrieved instead
|
|
||||||
results.commits = commits.length
|
|
||||||
}
|
}
|
||||||
}
|
catch {
|
||||||
catch {
|
console.debug(`metrics/compute/${login}/plugins > languages > no more page to load`)
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > an error occured while processing recently used languages`)
|
}
|
||||||
}
|
console.debug(`metrics/compute/${login}/plugins > languages > ${commits.length} commits loaded`)
|
||||||
finally {
|
results.latest = Math.round((new Date().getTime() - new Date(commits.slice(-1).shift()?.created_at).getTime()) / (1000 * 60 * 60 * 24))
|
||||||
//Cleaning
|
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > cleaning temp dir ${path}`)
|
//Retrieve edited files and filter edited lines (those starting with +/-) from patches
|
||||||
await imports.fs.rm(path, {recursive:true, force:true})
|
console.debug(`metrics/compute/${login}/plugins > languages > loading patches`)
|
||||||
}
|
console.debug(`metrics/compute/${login}/plugins > languages > commits authoring set to ${JSON.stringify(data.shared["commits.authoring"])}`)
|
||||||
return results
|
let patches = [
|
||||||
|
...await Promise.allSettled(
|
||||||
|
commits
|
||||||
|
.flatMap(({payload}) => payload.commits)
|
||||||
|
.filter(({author}) => data.shared["commits.authoring"].filter(authoring => author?.email?.toLocaleLowerCase().includes(authoring)||author?.name?.toLocaleLowerCase().includes(authoring)).length)
|
||||||
|
.map(commit => commit.url)
|
||||||
|
.map(async commit => (await rest.request(commit)).data),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
.filter(({status}) => status === "fulfilled")
|
||||||
|
.map(({value}) => value)
|
||||||
|
.filter(({parents}) => parents.length <= 1)
|
||||||
|
.map(({files}) => files)
|
||||||
|
.flatMap(files => files.map(file => ({name:imports.paths.basename(file.filename), directory:imports.paths.dirname(file.filename), patch:file.patch ?? "", repo:file.raw_url?.match(/(?<=^https:..github.com\/)(?<repo>.*)(?=\/raw)/)?.groups.repo ?? "_"})))
|
||||||
|
.map(({name, directory, patch, repo}) => ({name, directory:`${repo.replace(/[/]/g, "@")}/${directory}`, patch:patch.split("\n").filter(line => /^[+]/.test(line)).map(line => line.substring(1)).join("\n")}))
|
||||||
|
|
||||||
|
//Temporary directory
|
||||||
|
const path = imports.paths.join(imports.os.tmpdir(), `${data.user.databaseId}-${tempdir}`)
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > creating temp dir ${path} with ${patches.length} files`)
|
||||||
|
|
||||||
|
//Process
|
||||||
|
try {
|
||||||
|
//Save patches in temporary directory matching respective repository and filename
|
||||||
|
await imports.fs.rm(path, {recursive:true, force:true})
|
||||||
|
await imports.fs.mkdir(path, {recursive:true})
|
||||||
|
await Promise.all(patches.map(async ({name, directory, patch}) => {
|
||||||
|
await imports.fs.mkdir(imports.paths.join(path, directory), {recursive:true})
|
||||||
|
imports.fs.writeFile(imports.paths.join(path, directory, name), patch)
|
||||||
|
}))
|
||||||
|
|
||||||
|
//Process temporary repositories
|
||||||
|
for (const directory of await imports.fs.readdir(path)) {
|
||||||
|
//Pull gitattributes if possible
|
||||||
|
for (const branch of ["main", "master"]) {
|
||||||
|
const repo = directory.replace("@", "/")
|
||||||
|
try {
|
||||||
|
await imports.fs.writeFile(imports.paths.join(path, directory, ".gitattributes"), await imports.fetch(`https://raw.githubusercontent.com/${repo}/${branch}/.gitattributes`).then(response => response.text()).catch(() => ""))
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > successfully fetched .gitattributes for ${repo}`)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > cannot load .gitattributes on branch ${branch} for ${repo}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Create temporary git repository
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > creating temp git repository for ${directory}`)
|
||||||
|
const git = await imports.git(imports.paths.join(path, directory))
|
||||||
|
await git.init().add(".").addConfig("user.name", data.shared["commits.authoring"]?.[0] ?? login).addConfig("user.email", "<>").commit("linguist").status()
|
||||||
|
|
||||||
|
//Analyze repository
|
||||||
|
await analyze(arguments[0], {results, path:imports.paths.join(path, directory), categories})
|
||||||
|
|
||||||
|
//Since we reproduce a "partial repository" with a single commit, use number of commits retrieved instead
|
||||||
|
results.commits = commits.length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > an error occured while processing recently used languages`)
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
//Cleaning
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > cleaning temp dir ${path}`)
|
||||||
|
await imports.fs.rm(path, {recursive:true, force:true})
|
||||||
|
}
|
||||||
|
solve(results)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**Analyze a single repository */
|
/**Analyze a single repository */
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ export default async function({login, data, imports, q, rest, account}, {enabled
|
|||||||
}
|
}
|
||||||
|
|
||||||
//Load inputs
|
//Load inputs
|
||||||
let {ignored, skipped, colors, aliases, details, threshold, limit, indepth, sections, categories, "recent.categories":_recent_categories, "recent.load":_recent_load, "recent.days":_recent_days} = imports.metadata.plugins.languages.inputs({data, account, q})
|
let {ignored, skipped, colors, aliases, details, threshold, limit, indepth, "analysis.timeout":timeout, sections, categories, "recent.categories":_recent_categories, "recent.load":_recent_load, "recent.days":_recent_days} = imports.metadata.plugins.languages.inputs({data, account, q})
|
||||||
threshold = (Number(threshold.replace(/%$/, "")) || 0) / 100
|
threshold = (Number(threshold.replace(/%$/, "")) || 0) / 100
|
||||||
skipped.push(...data.shared["repositories.skipped"])
|
skipped.push(...data.shared["repositories.skipped"])
|
||||||
if (!limit)
|
if (!limit)
|
||||||
@@ -60,18 +60,28 @@ export default async function({login, data, imports, q, rest, account}, {enabled
|
|||||||
if (extras) {
|
if (extras) {
|
||||||
//Recently used languages
|
//Recently used languages
|
||||||
if ((sections.includes("recently-used"))&&(context.mode === "user")) {
|
if ((sections.includes("recently-used"))&&(context.mode === "user")) {
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > using recent analyzer`)
|
try {
|
||||||
languages["stats.recent"] = await recent_analyzer({login, data, imports, rest, account}, {skipped, categories:_recent_categories ?? categories, days:_recent_days, load:_recent_load})
|
console.debug(`metrics/compute/${login}/plugins > languages > using recent analyzer`)
|
||||||
Object.assign(languages.colors, languages["stats.recent"].colors)
|
languages["stats.recent"] = await recent_analyzer({login, data, imports, rest, account}, {skipped, categories:_recent_categories ?? categories, days:_recent_days, load:_recent_load, timeout})
|
||||||
|
Object.assign(languages.colors, languages["stats.recent"].colors)
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > ${error}`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//Indepth mode
|
//Indepth mode
|
||||||
if (indepth) {
|
if (indepth) {
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > switching to indepth mode (this may take some time)`)
|
try {
|
||||||
const existingColors = languages.colors
|
console.debug(`metrics/compute/${login}/plugins > languages > switching to indepth mode (this may take some time)`)
|
||||||
Object.assign(languages, await indepth_analyzer({login, data, imports, repositories}, {skipped, categories}))
|
const existingColors = languages.colors
|
||||||
Object.assign(languages.colors, existingColors)
|
Object.assign(languages, await indepth_analyzer({login, data, imports, repositories}, {skipped, categories, timeout}))
|
||||||
console.debug(`metrics/compute/${login}/plugins > languages > indepth analysis missed ${languages.missed} commits`)
|
Object.assign(languages.colors, existingColors)
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > indepth analysis missed ${languages.missed} commits`)
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.debug(`metrics/compute/${login}/plugins > languages > ${error}`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -97,7 +97,18 @@ inputs:
|
|||||||
description: Indepth languages processing (see documentation before enabling)
|
description: Indepth languages processing (see documentation before enabling)
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
|
|
||||||
|
# Analysis timeout (in minutes)
|
||||||
|
# In case of timeout, it'll automatically fallback to default algorithm
|
||||||
|
# Please be nice with GitHub Actions and disable `plugin_languages_indepth` if your account is not supported
|
||||||
|
# See documentation before enabling
|
||||||
|
plugin_languages_analysis_timeout:
|
||||||
|
description: Languages analysis timeout
|
||||||
|
type: number
|
||||||
|
default: 15
|
||||||
|
min: 1
|
||||||
|
max: 30
|
||||||
|
|
||||||
# GitHub language categories to display
|
# GitHub language categories to display
|
||||||
plugin_languages_categories:
|
plugin_languages_categories:
|
||||||
description: Language categories to display
|
description: Language categories to display
|
||||||
|
|||||||
Reference in New Issue
Block a user