mirror of
https://github.com/CHOMPStation2/CHOMPStation2.git
synced 2025-12-11 18:53:06 +00:00
Manualbiome (#11216)
This commit is contained in:
@@ -1,42 +1,42 @@
|
||||
import { parseChangelog } from "./changelogParser.js";
|
||||
|
||||
const safeYml = (string) =>
|
||||
string.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n");
|
||||
string.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n");
|
||||
|
||||
export function changelogToYml(changelog, login) {
|
||||
const author = changelog.author || login;
|
||||
const ymlLines = [];
|
||||
const author = changelog.author || login;
|
||||
const ymlLines = [];
|
||||
|
||||
ymlLines.push(`author: "${safeYml(author)}"`);
|
||||
ymlLines.push(`delete-after: True`);
|
||||
ymlLines.push(`changes:`);
|
||||
ymlLines.push(`author: "${safeYml(author)}"`);
|
||||
ymlLines.push(`delete-after: True`);
|
||||
ymlLines.push(`changes:`);
|
||||
|
||||
for (const change of changelog.changes) {
|
||||
ymlLines.push(
|
||||
` - ${change.type.changelogKey}: "${safeYml(change.description)}"`
|
||||
);
|
||||
}
|
||||
for (const change of changelog.changes) {
|
||||
ymlLines.push(
|
||||
` - ${change.type.changelogKey}: "${safeYml(change.description)}"`,
|
||||
);
|
||||
}
|
||||
|
||||
return ymlLines.join("\n");
|
||||
return ymlLines.join("\n");
|
||||
}
|
||||
|
||||
export async function processAutoChangelog({ github, context }) {
|
||||
const changelog = parseChangelog(context.payload.pull_request.body);
|
||||
if (!changelog || changelog.changes.length === 0) {
|
||||
console.log("no changelog found");
|
||||
return;
|
||||
}
|
||||
const changelog = parseChangelog(context.payload.pull_request.body);
|
||||
if (!changelog || changelog.changes.length === 0) {
|
||||
console.log("no changelog found");
|
||||
return;
|
||||
}
|
||||
|
||||
const yml = changelogToYml(
|
||||
changelog,
|
||||
context.payload.pull_request.user.login
|
||||
);
|
||||
const yml = changelogToYml(
|
||||
changelog,
|
||||
context.payload.pull_request.user.login,
|
||||
);
|
||||
|
||||
github.rest.repos.createOrUpdateFileContents({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
path: `html/changelogs_ch/AutoChangeLog-pr-${context.payload.pull_request.number}.yml`, // CHOMPEdit
|
||||
message: `Automatic changelog for PR #${context.payload.pull_request.number} [ci skip]`,
|
||||
content: Buffer.from(yml).toString("base64"),
|
||||
});
|
||||
github.rest.repos.createOrUpdateFileContents({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
path: `html/changelogs_ch/AutoChangeLog-pr-${context.payload.pull_request.number}.yml`, // CHOMPEdit
|
||||
message: `Automatic changelog for PR #${context.payload.pull_request.number} [ci skip]`,
|
||||
content: Buffer.from(yml).toString("base64"),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3,19 +3,19 @@ import { changelogToYml } from "./autoChangelog.js";
|
||||
import { parseChangelog } from "./changelogParser.js";
|
||||
|
||||
assert.equal(
|
||||
changelogToYml(
|
||||
parseChangelog(`
|
||||
changelogToYml(
|
||||
parseChangelog(`
|
||||
My cool PR!
|
||||
:cl: DenverCoder9
|
||||
add: Adds new stuff
|
||||
add: Adds more stuff
|
||||
/:cl:
|
||||
`)
|
||||
),
|
||||
`),
|
||||
),
|
||||
|
||||
`author: "DenverCoder9"
|
||||
`author: "DenverCoder9"
|
||||
delete-after: True
|
||||
changes:
|
||||
- rscadd: "Adds new stuff"
|
||||
- rscadd: "Adds more stuff"`
|
||||
- rscadd: "Adds more stuff"`,
|
||||
);
|
||||
|
||||
@@ -8,106 +8,106 @@
|
||||
* doesn't exist.
|
||||
*/
|
||||
export const CHANGELOG_ENTRIES = [
|
||||
[
|
||||
["rscadd", "add", "adds"],
|
||||
{
|
||||
placeholders: [
|
||||
"Added new mechanics or gameplay changes",
|
||||
"Added more things",
|
||||
],
|
||||
},
|
||||
],
|
||||
[
|
||||
["rscadd", "add", "adds"],
|
||||
{
|
||||
placeholders: [
|
||||
"Added new mechanics or gameplay changes",
|
||||
"Added more things",
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["bugfix", "fix", "fixes"],
|
||||
{
|
||||
placeholders: ["fixed a few things"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["bugfix", "fix", "fixes"],
|
||||
{
|
||||
placeholders: ["fixed a few things"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["rscdel", "del", "dels"],
|
||||
{
|
||||
placeholders: ["Removed old things"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["rscdel", "del", "dels"],
|
||||
{
|
||||
placeholders: ["Removed old things"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["qol", "tweak"],
|
||||
{
|
||||
placeholders: ["made something easier to use"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["qol", "tweak"],
|
||||
{
|
||||
placeholders: ["made something easier to use"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["maptweak", "remap"],
|
||||
{
|
||||
placeholders: ["tweaked something in a map"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["maptweak", "remap"],
|
||||
{
|
||||
placeholders: ["tweaked something in a map"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["sound"],
|
||||
{
|
||||
placeholders: ["added/modified/removed audio or sound effects"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["sound"],
|
||||
{
|
||||
placeholders: ["added/modified/removed audio or sound effects"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["image"],
|
||||
{
|
||||
placeholders: ["added/modified/removed some icons or images"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["image"],
|
||||
{
|
||||
placeholders: ["added/modified/removed some icons or images"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["spellcheck", "typo"],
|
||||
{
|
||||
placeholders: ["fixed a few typos"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["spellcheck", "typo"],
|
||||
{
|
||||
placeholders: ["fixed a few typos"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["balance"],
|
||||
{
|
||||
placeholders: ["rebalanced something"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["balance"],
|
||||
{
|
||||
placeholders: ["rebalanced something"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["code_imp", "code"],
|
||||
{
|
||||
placeholders: ["changed some code"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["code_imp", "code"],
|
||||
{
|
||||
placeholders: ["changed some code"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["refactor"],
|
||||
{
|
||||
placeholders: ["refactored some code"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["refactor"],
|
||||
{
|
||||
placeholders: ["refactored some code"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["config"],
|
||||
{
|
||||
placeholders: ["changed some config setting"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["config"],
|
||||
{
|
||||
placeholders: ["changed some config setting"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["admin"],
|
||||
{
|
||||
placeholders: ["messed with admin stuff"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["admin"],
|
||||
{
|
||||
placeholders: ["messed with admin stuff"],
|
||||
},
|
||||
],
|
||||
|
||||
[
|
||||
["server"],
|
||||
{
|
||||
placeholders: ["something server ops should know"],
|
||||
},
|
||||
],
|
||||
[
|
||||
["server"],
|
||||
{
|
||||
placeholders: ["something server ops should know"],
|
||||
},
|
||||
],
|
||||
];
|
||||
|
||||
// Valid changelog openers
|
||||
|
||||
@@ -4,80 +4,80 @@ const REGEX_CHANGELOG_LINE = /^(\w+): (.+)$/;
|
||||
|
||||
const CHANGELOG_KEYS_TO_ENTRY = {};
|
||||
for (const [types, entry] of changelogConfig.CHANGELOG_ENTRIES) {
|
||||
const entryWithChangelogKey = {
|
||||
...entry,
|
||||
changelogKey: types[0],
|
||||
};
|
||||
const entryWithChangelogKey = {
|
||||
...entry,
|
||||
changelogKey: types[0],
|
||||
};
|
||||
|
||||
for (const type of types) {
|
||||
CHANGELOG_KEYS_TO_ENTRY[type] = entryWithChangelogKey;
|
||||
}
|
||||
for (const type of types) {
|
||||
CHANGELOG_KEYS_TO_ENTRY[type] = entryWithChangelogKey;
|
||||
}
|
||||
}
|
||||
|
||||
function parseChangelogBody(lines, openTag) {
|
||||
const [changelogOpening] = lines.splice(0, 1);
|
||||
const [changelogOpening] = lines.splice(0, 1);
|
||||
|
||||
const author =
|
||||
changelogOpening.substring(openTag.length).trim() || undefined;
|
||||
const author = changelogOpening.substring(openTag.length).trim() || undefined;
|
||||
|
||||
const changelog = {
|
||||
author,
|
||||
changes: [],
|
||||
};
|
||||
const changelog = {
|
||||
author,
|
||||
changes: [],
|
||||
};
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.trim().length === 0) {
|
||||
continue;
|
||||
}
|
||||
for (const line of lines) {
|
||||
if (line.trim().length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const closeTag of changelogConfig.CHANGELOG_CLOSE_TAGS) {
|
||||
if (line.startsWith(closeTag)) {
|
||||
return changelog;
|
||||
}
|
||||
}
|
||||
for (const closeTag of changelogConfig.CHANGELOG_CLOSE_TAGS) {
|
||||
if (line.startsWith(closeTag)) {
|
||||
return changelog;
|
||||
}
|
||||
}
|
||||
|
||||
const match = line.match(REGEX_CHANGELOG_LINE);
|
||||
if (match) {
|
||||
const [_, type, description] = match;
|
||||
const match = line.match(REGEX_CHANGELOG_LINE);
|
||||
if (match) {
|
||||
const [_, type, description] = match;
|
||||
|
||||
const entry = CHANGELOG_KEYS_TO_ENTRY[type] || CHANGELOG_KEYS_TO_ENTRY["rscadd"];
|
||||
const entry =
|
||||
CHANGELOG_KEYS_TO_ENTRY[type] || CHANGELOG_KEYS_TO_ENTRY["rscadd"];
|
||||
|
||||
if (!entry || entry.placeholders.includes(description)) {
|
||||
continue;
|
||||
}
|
||||
if (!entry || entry.placeholders.includes(description)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry) {
|
||||
changelog.changes.push({
|
||||
type: entry,
|
||||
description,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const lastChange = changelog.changes[changelog.changes.length - 1];
|
||||
if (lastChange) {
|
||||
lastChange.description += `\n${line}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (entry) {
|
||||
changelog.changes.push({
|
||||
type: entry,
|
||||
description,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const lastChange = changelog.changes[changelog.changes.length - 1];
|
||||
if (lastChange) {
|
||||
lastChange.description += `\n${line}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changelog;
|
||||
return changelog;
|
||||
}
|
||||
|
||||
export function parseChangelog(text) {
|
||||
if(text == null) {
|
||||
return undefined;
|
||||
}
|
||||
const lines = text.split("\n").map((line) => line.trim());
|
||||
if (text == null) {
|
||||
return undefined;
|
||||
}
|
||||
const lines = text.split("\n").map((line) => line.trim());
|
||||
|
||||
for (let index = 0; index < lines.length; index++) {
|
||||
const line = lines[index];
|
||||
for (let index = 0; index < lines.length; index++) {
|
||||
const line = lines[index];
|
||||
|
||||
for (const openTag of changelogConfig.CHANGELOG_OPEN_TAGS) {
|
||||
if (line.startsWith(openTag)) {
|
||||
return parseChangelogBody(lines.slice(index), openTag);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const openTag of changelogConfig.CHANGELOG_OPEN_TAGS) {
|
||||
if (line.startsWith(openTag)) {
|
||||
return parseChangelogBody(lines.slice(index), openTag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -27,8 +27,8 @@ assert.equal(multiLineChangelog.author, undefined);
|
||||
assert.equal(multiLineChangelog.changes.length, 1);
|
||||
assert.equal(multiLineChangelog.changes[0].type.changelogKey, "rscadd");
|
||||
assert.equal(
|
||||
multiLineChangelog.changes[0].description,
|
||||
"Adds new stuff\nto the game"
|
||||
multiLineChangelog.changes[0].description,
|
||||
"Adds new stuff\nto the game",
|
||||
);
|
||||
|
||||
// Placeholders
|
||||
|
||||
@@ -19,14 +19,14 @@ export async function removeGuideComments({ github, context }) {
|
||||
let newBody = context.payload.pull_request.body;
|
||||
|
||||
if (!newBody) {
|
||||
console.log("PR body is empty, skipping...");
|
||||
return;
|
||||
console.log("PR body is empty, skipping...");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const comment of comments) {
|
||||
newBody = newBody.replace(
|
||||
new RegExp(`^\\s*${escapeRegex(comment)}\\s*`, "gm"),
|
||||
"\n"
|
||||
"\n",
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,222 +3,224 @@ const TITLE_BOT_HEADER = "title: ";
|
||||
|
||||
// Only check jobs that start with these.
|
||||
// Helps make sure we don't restart something like screenshot tests or linters, which are not known to be flaky.
|
||||
const CONSIDERED_JOBS = [
|
||||
"Integration Tests",
|
||||
];
|
||||
const CONSIDERED_JOBS = ["Integration Tests"];
|
||||
|
||||
async function getFailedJobsForRun(github, context, workflowRunId, runAttempt) {
|
||||
const {
|
||||
data: { jobs },
|
||||
} = await github.rest.actions.listJobsForWorkflowRunAttempt({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: workflowRunId,
|
||||
attempt_number: runAttempt,
|
||||
});
|
||||
const {
|
||||
data: { jobs },
|
||||
} = await github.rest.actions.listJobsForWorkflowRunAttempt({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: workflowRunId,
|
||||
attempt_number: runAttempt,
|
||||
});
|
||||
|
||||
return jobs
|
||||
.filter((job) => job.conclusion === "failure")
|
||||
.filter((job) =>
|
||||
CONSIDERED_JOBS.some((title) => job.name.startsWith(title))
|
||||
);
|
||||
return jobs
|
||||
.filter((job) => job.conclusion === "failure")
|
||||
.filter((job) =>
|
||||
CONSIDERED_JOBS.some((title) => job.name.startsWith(title)),
|
||||
);
|
||||
}
|
||||
|
||||
export async function rerunFlakyTests({ github, context }) {
|
||||
const failingJobs = await getFailedJobsForRun(
|
||||
github,
|
||||
context,
|
||||
context.payload.workflow_run.id,
|
||||
context.payload.workflow_run.run_attempt
|
||||
);
|
||||
const failingJobs = await getFailedJobsForRun(
|
||||
github,
|
||||
context,
|
||||
context.payload.workflow_run.id,
|
||||
context.payload.workflow_run.run_attempt,
|
||||
);
|
||||
|
||||
if (failingJobs.length > 1) {
|
||||
console.log("Multiple jobs failing. PROBABLY not flaky, not rerunning.");
|
||||
return;
|
||||
}
|
||||
if (failingJobs.length > 1) {
|
||||
console.log("Multiple jobs failing. PROBABLY not flaky, not rerunning.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (failingJobs.length === 0) {
|
||||
throw new Error(
|
||||
"rerunFlakyTests should not have run on a run with no failing jobs"
|
||||
);
|
||||
}
|
||||
if (failingJobs.length === 0) {
|
||||
throw new Error(
|
||||
"rerunFlakyTests should not have run on a run with no failing jobs",
|
||||
);
|
||||
}
|
||||
|
||||
github.rest.actions.reRunWorkflowFailedJobs({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
github.rest.actions.reRunWorkflowFailedJobs({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
}
|
||||
|
||||
// Tries its best to extract a useful error title and message for the given log
|
||||
export function extractDetails(log) {
|
||||
// Strip off timestamp
|
||||
const lines = log.split(/^[0-9.:T\-]*?Z /gm);
|
||||
// Strip off timestamp
|
||||
const lines = log.split(/^[0-9.:T\-]*?Z /gm);
|
||||
|
||||
const failureRegex = /^\t?FAILURE #(?<number>[0-9]+): (?<headline>.+)/;
|
||||
const groupRegex = /^##\[group\](?<group>.+)/;
|
||||
const failureRegex = /^\t?FAILURE #(?<number>[0-9]+): (?<headline>.+)/;
|
||||
const groupRegex = /^##\[group\](?<group>.+)/;
|
||||
|
||||
const failures = [];
|
||||
let lastGroup = "root";
|
||||
let loggingFailure;
|
||||
const failures = [];
|
||||
let lastGroup = "root";
|
||||
let loggingFailure;
|
||||
|
||||
const newFailure = (failureMatch) => {
|
||||
const { headline } = failureMatch.groups;
|
||||
const newFailure = (failureMatch) => {
|
||||
const { headline } = failureMatch.groups;
|
||||
|
||||
loggingFailure = {
|
||||
headline,
|
||||
group: lastGroup.replace("/datum/unit_test/", ""),
|
||||
details: [],
|
||||
};
|
||||
};
|
||||
loggingFailure = {
|
||||
headline,
|
||||
group: lastGroup.replace("/datum/unit_test/", ""),
|
||||
details: [],
|
||||
};
|
||||
};
|
||||
|
||||
for (const line of lines) {
|
||||
const groupMatch = line.match(groupRegex);
|
||||
if (groupMatch) {
|
||||
lastGroup = groupMatch.groups.group.trim();
|
||||
continue;
|
||||
}
|
||||
for (const line of lines) {
|
||||
const groupMatch = line.match(groupRegex);
|
||||
if (groupMatch) {
|
||||
lastGroup = groupMatch.groups.group.trim();
|
||||
continue;
|
||||
}
|
||||
|
||||
const failureMatch = line.match(failureRegex);
|
||||
const failureMatch = line.match(failureRegex);
|
||||
|
||||
if (loggingFailure === undefined) {
|
||||
if (!failureMatch) {
|
||||
continue;
|
||||
}
|
||||
if (loggingFailure === undefined) {
|
||||
if (!failureMatch) {
|
||||
continue;
|
||||
}
|
||||
|
||||
newFailure(failureMatch);
|
||||
} else if (failureMatch || line.startsWith("##")) {
|
||||
failures.push(loggingFailure);
|
||||
loggingFailure = undefined;
|
||||
newFailure(failureMatch);
|
||||
} else if (failureMatch || line.startsWith("##")) {
|
||||
failures.push(loggingFailure);
|
||||
loggingFailure = undefined;
|
||||
|
||||
if (failureMatch) {
|
||||
newFailure(failureMatch);
|
||||
}
|
||||
} else {
|
||||
loggingFailure.details.push(line.trim());
|
||||
}
|
||||
}
|
||||
if (failureMatch) {
|
||||
newFailure(failureMatch);
|
||||
}
|
||||
} else {
|
||||
loggingFailure.details.push(line.trim());
|
||||
}
|
||||
}
|
||||
|
||||
// We had no logged failures, there's not really anything we can do here
|
||||
if (failures.length === 0) {
|
||||
return {
|
||||
title: "Flaky test failure with no obvious source",
|
||||
failures,
|
||||
};
|
||||
}
|
||||
// We had no logged failures, there's not really anything we can do here
|
||||
if (failures.length === 0) {
|
||||
return {
|
||||
title: "Flaky test failure with no obvious source",
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
// We *could* create multiple failures for multiple groups.
|
||||
// This would be important if we had multiple flaky tests at the same time.
|
||||
// I'm choosing not to because it complicates this logic a bit, has the ability to go terribly wrong,
|
||||
// and also because there's something funny to me about that increasing the urgency of fixing
|
||||
// flaky tests. If it becomes a serious issue though, I would not mind this being fixed.
|
||||
const uniqueGroups = new Set(failures.map((failure) => failure.group));
|
||||
// We *could* create multiple failures for multiple groups.
|
||||
// This would be important if we had multiple flaky tests at the same time.
|
||||
// I'm choosing not to because it complicates this logic a bit, has the ability to go terribly wrong,
|
||||
// and also because there's something funny to me about that increasing the urgency of fixing
|
||||
// flaky tests. If it becomes a serious issue though, I would not mind this being fixed.
|
||||
const uniqueGroups = new Set(failures.map((failure) => failure.group));
|
||||
|
||||
if (uniqueGroups.size > 1) {
|
||||
return {
|
||||
title: `Multiple flaky test failures in ${Array.from(uniqueGroups)
|
||||
.sort()
|
||||
.join(", ")}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
if (uniqueGroups.size > 1) {
|
||||
return {
|
||||
title: `Multiple flaky test failures in ${Array.from(uniqueGroups)
|
||||
.sort()
|
||||
.join(", ")}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
const failGroup = failures[0].group;
|
||||
const failGroup = failures[0].group;
|
||||
|
||||
if (failures.length > 1) {
|
||||
return {
|
||||
title: `Multiple errors in flaky test ${failGroup}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
if (failures.length > 1) {
|
||||
return {
|
||||
title: `Multiple errors in flaky test ${failGroup}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
const failure = failures[0];
|
||||
const failure = failures[0];
|
||||
|
||||
// Common patterns where we can always get a detailed title
|
||||
const runtimeMatch = failure.headline.match(/Runtime in .+?: (?<error>.+)/);
|
||||
if (runtimeMatch) {
|
||||
const runtime = runtimeMatch.groups.error.trim();
|
||||
// Common patterns where we can always get a detailed title
|
||||
const runtimeMatch = failure.headline.match(/Runtime in .+?: (?<error>.+)/);
|
||||
if (runtimeMatch) {
|
||||
const runtime = runtimeMatch.groups.error.trim();
|
||||
|
||||
const invalidTimerMatch = runtime.match(/^Invalid timer:.+object:(?<object>[^[]+).*delegate:(?<proc>.+?), source:/);
|
||||
if (invalidTimerMatch) {
|
||||
return {
|
||||
title: `Flaky test ${failGroup}: Invalid timer: ${invalidTimerMatch.groups.proc.trim()} on ${invalidTimerMatch.groups.object.trim()}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
const invalidTimerMatch = runtime.match(
|
||||
/^Invalid timer:.+object:(?<object>[^[]+).*delegate:(?<proc>.+?), source:/,
|
||||
);
|
||||
if (invalidTimerMatch) {
|
||||
return {
|
||||
title: `Flaky test ${failGroup}: Invalid timer: ${invalidTimerMatch.groups.proc.trim()} on ${invalidTimerMatch.groups.object.trim()}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
title: `Flaky test ${failGroup}: ${runtime}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
return {
|
||||
title: `Flaky test ${failGroup}: ${runtime}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
const hardDelMatch = failure.headline.match(/^(?<object>\/[\w/]+) hard deleted .* times out of a total del count of/);
|
||||
if (hardDelMatch) {
|
||||
return {
|
||||
title: `Flaky hard delete: ${hardDelMatch.groups.object}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
const hardDelMatch = failure.headline.match(
|
||||
/^(?<object>\/[\w/]+) hard deleted .* times out of a total del count of/,
|
||||
);
|
||||
if (hardDelMatch) {
|
||||
return {
|
||||
title: `Flaky hard delete: ${hardDelMatch.groups.object}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
// Try to normalize the title and remove anything that might be variable
|
||||
const normalizedError = failure.headline.replace(/\s*at .+?:[0-9]+.*/g, ""); // "<message> at code.dm:123"
|
||||
// Try to normalize the title and remove anything that might be variable
|
||||
const normalizedError = failure.headline.replace(/\s*at .+?:[0-9]+.*/g, ""); // "<message> at code.dm:123"
|
||||
|
||||
return {
|
||||
title: `Flaky test ${failGroup}: ${normalizedError}`,
|
||||
failures,
|
||||
};
|
||||
return {
|
||||
title: `Flaky test ${failGroup}: ${normalizedError}`,
|
||||
failures,
|
||||
};
|
||||
}
|
||||
|
||||
async function getExistingIssueId(graphql, context, title) {
|
||||
// Hope you never have more than 100 of these open!
|
||||
const {
|
||||
repository: {
|
||||
issues: { nodes: openFlakyTestIssues },
|
||||
},
|
||||
} = await graphql(
|
||||
`
|
||||
query ($owner: String!, $repo: String!, $label: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
issues(
|
||||
labels: [$label]
|
||||
first: 100
|
||||
orderBy: { field: CREATED_AT, direction: DESC }
|
||||
states: [OPEN]
|
||||
) {
|
||||
nodes {
|
||||
number
|
||||
title
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
label: LABEL,
|
||||
}
|
||||
);
|
||||
// Hope you never have more than 100 of these open!
|
||||
const {
|
||||
repository: {
|
||||
issues: { nodes: openFlakyTestIssues },
|
||||
},
|
||||
} = await graphql(
|
||||
`
|
||||
query ($owner: String!, $repo: String!, $label: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
issues(
|
||||
labels: [$label]
|
||||
first: 100
|
||||
orderBy: { field: CREATED_AT, direction: DESC }
|
||||
states: [OPEN]
|
||||
) {
|
||||
nodes {
|
||||
number
|
||||
title
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
label: LABEL,
|
||||
},
|
||||
);
|
||||
|
||||
const exactTitle = openFlakyTestIssues.find((issue) => issue.title === title);
|
||||
if (exactTitle !== undefined) {
|
||||
return exactTitle.number;
|
||||
}
|
||||
const exactTitle = openFlakyTestIssues.find((issue) => issue.title === title);
|
||||
if (exactTitle !== undefined) {
|
||||
return exactTitle.number;
|
||||
}
|
||||
|
||||
const foundInBody = openFlakyTestIssues.find((issue) =>
|
||||
issue.body.includes(`<!-- ${TITLE_BOT_HEADER}${exactTitle} -->`)
|
||||
);
|
||||
if (foundInBody !== undefined) {
|
||||
return foundInBody.number;
|
||||
}
|
||||
const foundInBody = openFlakyTestIssues.find((issue) =>
|
||||
issue.body.includes(`<!-- ${TITLE_BOT_HEADER}${exactTitle} -->`),
|
||||
);
|
||||
if (foundInBody !== undefined) {
|
||||
return foundInBody.number;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function createBody({ title, failures }, runUrl) {
|
||||
return `
|
||||
return `
|
||||
<!-- This issue can be renamed, but do not change the next comment! -->
|
||||
<!-- title: ${title} -->
|
||||
|
||||
@@ -227,67 +229,67 @@ function createBody({ title, failures }, runUrl) {
|
||||
Failures:
|
||||
\`\`\`
|
||||
${failures
|
||||
.map(
|
||||
(failure) =>
|
||||
`${failure.group}: ${failure.headline}\n\t${failure.details.join("\n")}`
|
||||
)
|
||||
.join("\n")}
|
||||
.map(
|
||||
(failure) =>
|
||||
`${failure.group}: ${failure.headline}\n\t${failure.details.join("\n")}`,
|
||||
)
|
||||
.join("\n")}
|
||||
\`\`\`
|
||||
`.replace(/^\s*/gm, "");
|
||||
}
|
||||
|
||||
export async function reportFlakyTests({ github, context }) {
|
||||
const failedJobsFromLastRun = await getFailedJobsForRun(
|
||||
github,
|
||||
context,
|
||||
context.payload.workflow_run.id,
|
||||
context.payload.workflow_run.run_attempt - 1
|
||||
);
|
||||
const failedJobsFromLastRun = await getFailedJobsForRun(
|
||||
github,
|
||||
context,
|
||||
context.payload.workflow_run.id,
|
||||
context.payload.workflow_run.run_attempt - 1,
|
||||
);
|
||||
|
||||
// This could one day be relaxed if we face serious enough flaky test problems, so we're going to loop anyway
|
||||
if (failedJobsFromLastRun.length !== 1) {
|
||||
console.log(
|
||||
"Multiple jobs failing after retry, assuming maintainer rerun."
|
||||
);
|
||||
// This could one day be relaxed if we face serious enough flaky test problems, so we're going to loop anyway
|
||||
if (failedJobsFromLastRun.length !== 1) {
|
||||
console.log(
|
||||
"Multiple jobs failing after retry, assuming maintainer rerun.",
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
for (const job of failedJobsFromLastRun) {
|
||||
const { data: log } =
|
||||
await github.rest.actions.downloadJobLogsForWorkflowRun({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
job_id: job.id,
|
||||
});
|
||||
for (const job of failedJobsFromLastRun) {
|
||||
const { data: log } =
|
||||
await github.rest.actions.downloadJobLogsForWorkflowRun({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
job_id: job.id,
|
||||
});
|
||||
|
||||
const details = extractDetails(log);
|
||||
const details = extractDetails(log);
|
||||
|
||||
const existingIssueId = await getExistingIssueId(
|
||||
github.graphql,
|
||||
context,
|
||||
details.title
|
||||
);
|
||||
const existingIssueId = await getExistingIssueId(
|
||||
github.graphql,
|
||||
context,
|
||||
details.title,
|
||||
);
|
||||
|
||||
if (existingIssueId !== undefined) {
|
||||
// Maybe in the future, if it's helpful, update the existing issue with new links
|
||||
console.log(`Existing issue found: #${existingIssueId}`);
|
||||
return;
|
||||
}
|
||||
if (existingIssueId !== undefined) {
|
||||
// Maybe in the future, if it's helpful, update the existing issue with new links
|
||||
console.log(`Existing issue found: #${existingIssueId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
await github.rest.issues.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
title: details.title,
|
||||
labels: [LABEL],
|
||||
body: createBody(
|
||||
details,
|
||||
`https://github.com/${context.repo.owner}/${
|
||||
context.repo.repo
|
||||
}/actions/runs/${context.payload.workflow_run.id}/attempts/${
|
||||
context.payload.workflow_run.run_attempt - 1
|
||||
}`
|
||||
),
|
||||
});
|
||||
}
|
||||
await github.rest.issues.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
title: details.title,
|
||||
labels: [LABEL],
|
||||
body: createBody(
|
||||
details,
|
||||
`https://github.com/${context.repo.owner}/${
|
||||
context.repo.repo
|
||||
}/actions/runs/${context.payload.workflow_run.id}/attempts/${
|
||||
context.payload.workflow_run.run_attempt - 1
|
||||
}`,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,43 +3,43 @@ import fs from "node:fs";
|
||||
import { extractDetails } from "./rerunFlakyTests.js";
|
||||
|
||||
function extractDetailsFromPayload(filename) {
|
||||
return extractDetails(
|
||||
fs.readFileSync(`tests/flakyTestPayloads/${filename}.txt`, {
|
||||
encoding: "utf8",
|
||||
})
|
||||
);
|
||||
return extractDetails(
|
||||
fs.readFileSync(`tests/flakyTestPayloads/${filename}.txt`, {
|
||||
encoding: "utf8",
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const chatClient = extractDetailsFromPayload("chat_client");
|
||||
assert.equal(
|
||||
chatClient.title,
|
||||
"Flaky hard delete: /datum/computer_file/program/chatclient"
|
||||
chatClient.title,
|
||||
"Flaky hard delete: /datum/computer_file/program/chatclient",
|
||||
);
|
||||
assert.equal(chatClient.failures.length, 1);
|
||||
|
||||
const monkeyBusiness = extractDetailsFromPayload("monkey_business");
|
||||
assert.equal(
|
||||
monkeyBusiness.title,
|
||||
"Flaky test monkey_business: Cannot execute null.resolve()."
|
||||
monkeyBusiness.title,
|
||||
"Flaky test monkey_business: Cannot execute null.resolve().",
|
||||
);
|
||||
assert.equal(monkeyBusiness.failures.length, 1);
|
||||
|
||||
const shapeshift = extractDetailsFromPayload("shapeshift");
|
||||
assert.equal(
|
||||
shapeshift.title,
|
||||
"Multiple errors in flaky test shapeshift_spell"
|
||||
shapeshift.title,
|
||||
"Multiple errors in flaky test shapeshift_spell",
|
||||
);
|
||||
assert.equal(shapeshift.failures.length, 16);
|
||||
|
||||
const multipleFailures = extractDetailsFromPayload("multiple_failures");
|
||||
assert.equal(
|
||||
multipleFailures.title,
|
||||
"Multiple flaky test failures in more_shapeshift_spell, shapeshift_spell"
|
||||
multipleFailures.title,
|
||||
"Multiple flaky test failures in more_shapeshift_spell, shapeshift_spell",
|
||||
);
|
||||
assert.equal(multipleFailures.failures.length, 2);
|
||||
|
||||
const invalidTimer = extractDetailsFromPayload("invalid_timer");
|
||||
assert.equal(
|
||||
invalidTimer.title,
|
||||
"Flaky test monkey_business: Invalid timer: /datum/looping_sound/proc/start_sound_loop() on /datum/looping_sound/showering"
|
||||
invalidTimer.title,
|
||||
"Flaky test monkey_business: Invalid timer: /datum/looping_sound/proc/start_sound_loop() on /datum/looping_sound/showering",
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user