Further improved auto-changelogs (#7600)

This commit is contained in:
Selis
2024-01-21 11:53:14 +01:00
committed by GitHub
parent fc022033d5
commit e1b5803ce2
14 changed files with 776 additions and 116 deletions

View File

@@ -16,12 +16,10 @@ qol: made something easier to use
balance: rebalanced something balance: rebalanced something
fix: fixed a few things fix: fixed a few things
remap: tweaked something in a map remap: tweaked something in a map
soundadd: added audio or sound effects sound: added/modified/removed audio or sound effects
sounddel: removed audio or sound effects image: added/modified/removed some icons or images
imageadd: added some icons or images
imagedel: removed some icons or images
spellcheck: fixed a few typos spellcheck: fixed a few typos
code_imp: changed some code code: changed some code
refactor: refactored some code refactor: refactored some code
config: changed some config setting config: changed some config setting
admin: messed with admin stuff admin: messed with admin stuff

View File

@@ -1,90 +1,24 @@
name: Autochangelog # Creates an entry in html/changelogs automatically, to eventually be compiled by compile_changelogs
name: Auto Changelog
on: on:
pull_request_target: pull_request_target:
types: closed types:
- closed
branches: branches:
- master - master
permissions:
env: contents: write
BASENAME: "chompstation"
jobs: jobs:
autochangelog: auto_changelog:
name: Autochangelog runs-on: ubuntu-latest
runs-on: ubuntu-20.04 if: github.event.pull_request.merged == true
permissions:
contents: write # to push to branch
pull-requests: write # to create pull requests (repo-sync/pull-request)
concurrency:
group: autochangelog_group
cancel-in-progress: false
if: github.event.pull_request.merged == true && ( contains(github.event.pull_request.body, ':cl:') || contains(github.event.pull_request.body, '🆑') )
steps: steps:
- name: Clone - name: Checkout
uses: /actions/checkout@v3 uses: actions/checkout@v3
- name: Branch or checkout - name: Run auto changelog
run: | uses: actions/github-script@v6
git fetch origin with:
git checkout changelog_generation 2>/dev/null || git checkout -b changelog_generation script: |
- name: Ensure +x on CI directory const { processAutoChangelog } = await import('${{ github.workspace }}/tools/pull_request_hooks/autoChangelog.js')
run: | await processAutoChangelog({ github, context })
chmod -R +x ./tools/ci github-token: ${{ secrets.COMFY_ORANGE_PAT || secrets.GITHUB_TOKEN }}
chmod -R +x ./tools/GenerateChangelog/prbody.txt
- uses: actions/setup-python@v5
with:
python-version: '3.7'
- name: Write PR body to prbody.txt
uses: "DamianReeves/write-file-action@master"
with:
path: ./tools/GenerateChangelog/prbody.txt
write-mode: overwrite
contents: '${{ github.event.pull_request.body }}'
- name: Generate Changelog
run: |
pip install pyyaml
python tools/GenerateChangelog/ss13_autochangelog_ch.py \
html/changelogs_ch \
${{ github.event.pull_request.number }} \
'${{ github.event.pull_request.user.login }}' \
'tools/GenerateChangelog/prbody.txt'
python tools/GenerateChangelog/ss13_genchangelog_ch.py \
html/changelogs_ch
- name: Empty prbody.txt
uses: "DamianReeves/write-file-action@master"
with:
path: ./tools/GenerateChangelog/prbody.txt
write-mode: overwrite
contents: ' '
- name: Commit CL
run: |
git config --local user.email "action@github.com"
git config --local user.name "Changelog Generation"
git add .
git commit -m "Changelog Auto-Update (`date`) [ci skip]" -a || true
git push -u origin changelog_generation
- name: Check if PR exists
id: check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
prs=$(gh pr list \
--repo "$GITHUB_REPOSITORY" \
--json baseRefName,headRefName \
--jq '
map(select(.baseRefName == "master" and .headRefName == "changelog_generation"))
| length
')
if ((prs > 0)); then
echo "skip=true" >> "$GITHUB_OUTPUT"
fi
- name: Create Pull Request
if: '!steps.check.outputs.skip'
uses: repo-sync/pull-request@v2
with:
source_branch: "changelog_generation"
destination_branch: "master"
pr_title: "Automatic Changelog Generation"
pr_body: "This pull request updates the changelog"
pr_label: "Infrastructure"
pr_allow_empty: false
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -0,0 +1,54 @@
name: Compile changelogs
on:
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
jobs:
compile:
name: "Compile changelogs"
runs-on: ubuntu-22.04
steps:
- name: "Check for ACTION_ENABLER secret and pass true to output if it exists to be checked by later steps"
id: value_holder
env:
ENABLER_SECRET: ${{ secrets.ACTION_ENABLER }}
run: |
unset SECRET_EXISTS
if [ -n "$ENABLER_SECRET" ]; then SECRET_EXISTS=true ; fi
echo "ACTIONS_ENABLED=$SECRET_EXISTS" >> $GITHUB_OUTPUT
- name: "Setup python"
if: steps.value_holder.outputs.ACTIONS_ENABLED
uses: actions/setup-python@v1
with:
python-version: '3.x'
- name: "Install deps"
if: steps.value_holder.outputs.ACTIONS_ENABLED
run: |
python -m pip install --upgrade pip
python -m pip install pyyaml
sudo apt-get install dos2unix
- name: "Checkout"
if: steps.value_holder.outputs.ACTIONS_ENABLED
uses: actions/checkout@v3
with:
fetch-depth: 25
persist-credentials: false
- name: "Compile"
if: steps.value_holder.outputs.ACTIONS_ENABLED
run: |
python tools/ss13_genchangelog.py html/changelogs
- name: Commit
if: steps.value_holder.outputs.ACTIONS_ENABLED
run: |
git config --local user.email "action@github.com"
git config --local user.name "Changelogs"
git pull origin master
git add html/changelogs
git commit -m "Automatic changelog compile [ci skip]" -a || true
- name: "Push"
if: steps.value_holder.outputs.ACTIONS_ENABLED
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.COMFY_ORANGE_PAT || secrets.GITHUB_TOKEN }}

View File

@@ -8,28 +8,30 @@ import dateformat from 'dateformat';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
const icons = { const icons = {
bugfix: { icon: 'bug', color: 'green' },
wip: { icon: 'hammer', color: 'orange' },
qol: { icon: 'hand-holding-heart', color: 'green' },
soundadd: { icon: 'tg-sound-plus', color: 'green' },
sounddel: { icon: 'tg-sound-minus', color: 'red' },
add: { icon: 'check-circle', color: 'green' }, add: { icon: 'check-circle', color: 'green' },
admin: { icon: 'user-shield', color: 'purple' },
balance: { icon: 'balance-scale-right', color: 'yellow' },
bugfix: { icon: 'bug', color: 'green' },
code_imp: { icon: 'code', color: 'green' },
config: { icon: 'cogs', color: 'purple' },
expansion: { icon: 'check-circle', color: 'green' }, expansion: { icon: 'check-circle', color: 'green' },
rscadd: { icon: 'check-circle', color: 'green' }, experiment: { icon: 'radiation', color: 'yellow' },
rscdel: { icon: 'times-circle', color: 'red' }, image: { icon: 'image', color: 'green' },
imageadd: { icon: 'tg-image-plus', color: 'green' }, imageadd: { icon: 'tg-image-plus', color: 'green' },
imagedel: { icon: 'tg-image-minus', color: 'red' }, imagedel: { icon: 'tg-image-minus', color: 'red' },
spellcheck: { icon: 'spell-check', color: 'green' }, qol: { icon: 'hand-holding-heart', color: 'green' },
experiment: { icon: 'radiation', color: 'yellow' },
balance: { icon: 'balance-scale-right', color: 'yellow' },
code_imp: { icon: 'code', color: 'green' },
refactor: { icon: 'tools', color: 'green' }, refactor: { icon: 'tools', color: 'green' },
config: { icon: 'cogs', color: 'purple' }, rscadd: { icon: 'check-circle', color: 'green' },
admin: { icon: 'user-shield', color: 'purple' }, rscdel: { icon: 'times-circle', color: 'red' },
server: { icon: 'server', color: 'purple' }, server: { icon: 'server', color: 'purple' },
sound: { icon: 'volume-high', color: 'green' },
soundadd: { icon: 'tg-sound-plus', color: 'green' },
sounddel: { icon: 'tg-sound-minus', color: 'red' },
spellcheck: { icon: 'spell-check', color: 'green' },
tgs: { icon: 'toolbox', color: 'purple' }, tgs: { icon: 'toolbox', color: 'purple' },
tweak: { icon: 'wrench', color: 'green' }, tweak: { icon: 'wrench', color: 'green' },
unknown: { icon: 'info-circle', color: 'label' }, unknown: { icon: 'info-circle', color: 'label' },
wip: { icon: 'hammer', color: 'orange' },
}; };
export class Changelog extends Component { export class Changelog extends Component {

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,42 @@
import { parseChangelog } from "./changelogParser.js";
const safeYml = (string) =>
string.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n");
export function changelogToYml(changelog, login) {
const author = changelog.author || login;
const ymlLines = [];
ymlLines.push(`author: "${safeYml(author)}"`);
ymlLines.push(`delete-after: True`);
ymlLines.push(`changes:`);
for (const change of changelog.changes) {
ymlLines.push(
` - ${change.type.changelogKey}: "${safeYml(change.description)}"`
);
}
return ymlLines.join("\n");
}
export async function processAutoChangelog({ github, context }) {
const changelog = parseChangelog(context.payload.pull_request.body);
if (!changelog || changelog.changes.length === 0) {
console.log("no changelog found");
return;
}
const yml = changelogToYml(
changelog,
context.payload.pull_request.user.login
);
github.rest.repos.createOrUpdateFileContents({
owner: context.repo.owner,
repo: context.repo.repo,
path: `html/changelogs/AutoChangeLog-pr-${context.payload.pull_request.number}.yml`,
message: `Automatic changelog for PR #${context.payload.pull_request.number} [ci skip]`,
content: Buffer.from(yml).toString("base64"),
});
}

View File

@@ -0,0 +1,21 @@
import assert from "node:assert/strict";
import { changelogToYml } from "./autoChangelog.js";
import { parseChangelog } from "./changelogParser.js";
assert.equal(
changelogToYml(
parseChangelog(`
My cool PR!
:cl: DenverCoder9
add: Adds new stuff
add: Adds more stuff
/:cl:
`)
),
`author: "DenverCoder9"
delete-after: True
changes:
- rscadd: "Adds new stuff"
- rscadd: "Adds more stuff"`
);

View File

@@ -0,0 +1,120 @@
/**
* A map of changelog phrases to meta-information.
*
* The first entry in the list is used in the changelog YML file as the key when
* used, but other than that all entries are equivalent.
*
* placeholders - The default messages, if the changelog has this then we pretend it
* doesn't exist.
*/
export const CHANGELOG_ENTRIES = [
[
["rscadd", "add", "adds"],
{
placeholders: [
"Added new mechanics or gameplay changes",
"Added more things",
],
},
],
[
["bugfix", "fix", "fixes"],
{
placeholders: ["fixed a few things"],
},
],
[
["rscdel", "del", "dels"],
{
placeholders: ["Removed old things"],
},
],
[
["qol"],
{
placeholders: ["made something easier to use"],
},
],
[
["remap"],
{
placeholders: ["tweaked something in a map"],
},
],
[
["sound"],
{
placeholders: ["added/modified/removed audio or sound effects"],
},
],
[
["image"],
{
placeholders: ["added/modified/removed some icons or images"],
},
],
[
["spellcheck", "typo"],
{
placeholders: ["fixed a few typos"],
},
],
[
["balance"],
{
placeholders: ["rebalanced something"],
},
],
[
["code_imp", "code"],
{
placeholders: ["changed some code"],
},
],
[
["refactor"],
{
placeholders: ["refactored some code"],
},
],
[
["config"],
{
placeholders: ["changed some config setting"],
},
],
[
["admin"],
{
placeholders: ["messed with admin stuff"],
},
],
[
["server"],
{
placeholders: ["something server ops should know"],
},
],
];
// Valid changelog openers
export const CHANGELOG_OPEN_TAGS = [":cl:", "??"];
// Valid changelog closers
export const CHANGELOG_CLOSE_TAGS = ["/:cl:", "/ :cl:", ":/cl:", "/??", "/ ??"];
// Placeholder value for an author
export const CHANGELOG_AUTHOR_PLACEHOLDER_NAME = "optional name here";

View File

@@ -0,0 +1,80 @@
import * as changelogConfig from "./changelogConfig.js";
const REGEX_CHANGELOG_LINE = /^(\w+): (.+)$/;
const CHANGELOG_KEYS_TO_ENTRY = {};
for (const [types, entry] of changelogConfig.CHANGELOG_ENTRIES) {
const entryWithChangelogKey = {
...entry,
changelogKey: types[0],
};
for (const type of types) {
CHANGELOG_KEYS_TO_ENTRY[type] = entryWithChangelogKey;
}
}
function parseChangelogBody(lines, openTag) {
const [changelogOpening] = lines.splice(0, 1);
const author =
changelogOpening.substring(openTag.length).trim() || undefined;
const changelog = {
author,
changes: [],
};
for (const line of lines) {
if (line.trim().length === 0) {
continue;
}
for (const closeTag of changelogConfig.CHANGELOG_CLOSE_TAGS) {
if (line.startsWith(closeTag)) {
return changelog;
}
}
const match = line.match(REGEX_CHANGELOG_LINE);
if (match) {
const [_, type, description] = match;
const entry = CHANGELOG_KEYS_TO_ENTRY[type];
if (entry.placeholders.includes(description)) {
continue;
}
if (entry) {
changelog.changes.push({
type: entry,
description,
});
}
} else {
const lastChange = changelog.changes[changelog.changes.length - 1];
if (lastChange) {
lastChange.description += `\n${line}`;
}
}
}
return changelog;
}
export function parseChangelog(text) {
const lines = text.split("\n").map((line) => line.trim());
for (let index = 0; index < lines.length; index++) {
const line = lines[index];
for (const openTag of changelogConfig.CHANGELOG_OPEN_TAGS) {
if (line.startsWith(openTag)) {
return parseChangelogBody(lines.slice(index), openTag);
}
}
}
return undefined;
}

View File

@@ -0,0 +1,72 @@
import { strict as assert } from "node:assert";
import { parseChangelog } from "./changelogParser.js";
// Basic test
const basicChangelog = parseChangelog(`
My cool PR!
:cl: DenverCoder9
add: Adds new stuff
/:cl:
`);
assert.equal(basicChangelog.author, "DenverCoder9");
assert.equal(basicChangelog.changes.length, 1);
assert.equal(basicChangelog.changes[0].type.changelogKey, "rscadd");
assert.equal(basicChangelog.changes[0].description, "Adds new stuff");
// Multi-line test
const multiLineChangelog = parseChangelog(`
My cool PR!
:cl:
add: Adds new stuff
to the game
/:cl:
`);
assert.equal(multiLineChangelog.author, undefined);
assert.equal(multiLineChangelog.changes.length, 1);
assert.equal(multiLineChangelog.changes[0].type.changelogKey, "rscadd");
assert.equal(
multiLineChangelog.changes[0].description,
"Adds new stuff\nto the game"
);
// Placeholders
const placeholderChangelog = parseChangelog(`
My cool PR!
:cl:
add: Added new mechanics or gameplay changes
/:cl:
`);
assert.equal(placeholderChangelog.changes.length, 0);
// No changelog
const noChangelog = parseChangelog(`
My cool PR!
`);
assert.equal(noChangelog, undefined);
// No /:cl:
const noCloseChangelog = parseChangelog(`
My cool PR!
:cl:
add: Adds new stuff
`);
assert.equal(noCloseChangelog.changes.length, 1);
assert.equal(noCloseChangelog.changes[0].type.changelogKey, "rscadd");
assert.equal(noCloseChangelog.changes[0].description, "Adds new stuff");
// :cl: with arbitrary text
const arbitraryTextChangelog = parseChangelog(`
My cool PR!
:cl:
Adds new stuff
/:cl:
`);
assert.equal(arbitraryTextChangelog.changes.length, 0);

View File

@@ -0,0 +1,3 @@
{
"type": "module"
}

View File

@@ -0,0 +1,293 @@
const LABEL = "🤖 Flaky Test Report";
const TITLE_BOT_HEADER = "title: ";
// Only check jobs that start with these.
// Helps make sure we don't restart something like screenshot tests or linters, which are not known to be flaky.
const CONSIDERED_JOBS = [
"Integration Tests",
];
async function getFailedJobsForRun(github, context, workflowRunId, runAttempt) {
const {
data: { jobs },
} = await github.rest.actions.listJobsForWorkflowRunAttempt({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: workflowRunId,
attempt_number: runAttempt,
});
return jobs
.filter((job) => job.conclusion === "failure")
.filter((job) =>
CONSIDERED_JOBS.some((title) => job.name.startsWith(title))
);
}
export async function rerunFlakyTests({ github, context }) {
const failingJobs = await getFailedJobsForRun(
github,
context,
context.payload.workflow_run.id,
context.payload.workflow_run.run_attempt
);
if (failingJobs.length > 1) {
console.log("Multiple jobs failing. PROBABLY not flaky, not rerunning.");
return;
}
if (failingJobs.length === 0) {
throw new Error(
"rerunFlakyTests should not have run on a run with no failing jobs"
);
}
github.rest.actions.reRunWorkflowFailedJobs({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
}
// Tries its best to extract a useful error title and message for the given log
export function extractDetails(log) {
// Strip off timestamp
const lines = log.split(/^[0-9.:T\-]*?Z /gm);
const failureRegex = /^\t?FAILURE #(?<number>[0-9]+): (?<headline>.+)/;
const groupRegex = /^##\[group\](?<group>.+)/;
const failures = [];
let lastGroup = "root";
let loggingFailure;
const newFailure = (failureMatch) => {
const { headline } = failureMatch.groups;
loggingFailure = {
headline,
group: lastGroup.replace("/datum/unit_test/", ""),
details: [],
};
};
for (const line of lines) {
const groupMatch = line.match(groupRegex);
if (groupMatch) {
lastGroup = groupMatch.groups.group.trim();
continue;
}
const failureMatch = line.match(failureRegex);
if (loggingFailure === undefined) {
if (!failureMatch) {
continue;
}
newFailure(failureMatch);
} else if (failureMatch || line.startsWith("##")) {
failures.push(loggingFailure);
loggingFailure = undefined;
if (failureMatch) {
newFailure(failureMatch);
}
} else {
loggingFailure.details.push(line.trim());
}
}
// We had no logged failures, there's not really anything we can do here
if (failures.length === 0) {
return {
title: "Flaky test failure with no obvious source",
failures,
};
}
// We *could* create multiple failures for multiple groups.
// This would be important if we had multiple flaky tests at the same time.
// I'm choosing not to because it complicates this logic a bit, has the ability to go terribly wrong,
// and also because there's something funny to me about that increasing the urgency of fixing
// flaky tests. If it becomes a serious issue though, I would not mind this being fixed.
const uniqueGroups = new Set(failures.map((failure) => failure.group));
if (uniqueGroups.size > 1) {
return {
title: `Multiple flaky test failures in ${Array.from(uniqueGroups)
.sort()
.join(", ")}`,
failures,
};
}
const failGroup = failures[0].group;
if (failures.length > 1) {
return {
title: `Multiple errors in flaky test ${failGroup}`,
failures,
};
}
const failure = failures[0];
// Common patterns where we can always get a detailed title
const runtimeMatch = failure.headline.match(/Runtime in .+?: (?<error>.+)/);
if (runtimeMatch) {
const runtime = runtimeMatch.groups.error.trim();
const invalidTimerMatch = runtime.match(/^Invalid timer:.+object:(?<object>[^[]+).*delegate:(?<proc>.+?), source:/);
if (invalidTimerMatch) {
return {
title: `Flaky test ${failGroup}: Invalid timer: ${invalidTimerMatch.groups.proc.trim()} on ${invalidTimerMatch.groups.object.trim()}`,
failures,
};
}
return {
title: `Flaky test ${failGroup}: ${runtime}`,
failures,
};
}
const hardDelMatch = failure.headline.match(/^(?<object>\/[\w/]+) hard deleted .* times out of a total del count of/);
if (hardDelMatch) {
return {
title: `Flaky hard delete: ${hardDelMatch.groups.object}`,
failures,
};
}
// Try to normalize the title and remove anything that might be variable
const normalizedError = failure.headline.replace(/\s*at .+?:[0-9]+.*/g, ""); // "<message> at code.dm:123"
return {
title: `Flaky test ${failGroup}: ${normalizedError}`,
failures,
};
}
async function getExistingIssueId(graphql, context, title) {
// Hope you never have more than 100 of these open!
const {
repository: {
issues: { nodes: openFlakyTestIssues },
},
} = await graphql(
`
query ($owner: String!, $repo: String!, $label: String!) {
repository(owner: $owner, name: $repo) {
issues(
labels: [$label]
first: 100
orderBy: { field: CREATED_AT, direction: DESC }
states: [OPEN]
) {
nodes {
number
title
body
}
}
}
}
`,
{
owner: context.repo.owner,
repo: context.repo.repo,
label: LABEL,
}
);
const exactTitle = openFlakyTestIssues.find((issue) => issue.title === title);
if (exactTitle !== undefined) {
return exactTitle.number;
}
const foundInBody = openFlakyTestIssues.find((issue) =>
issue.body.includes(`<!-- ${TITLE_BOT_HEADER}${exactTitle} -->`)
);
if (foundInBody !== undefined) {
return foundInBody.number;
}
return undefined;
}
function createBody({ title, failures }, runUrl) {
return `
<!-- This issue can be renamed, but do not change the next comment! -->
<!-- title: ${title} -->
Flaky tests were detected in [this test run](${runUrl}). This means that there was a failure that was cleared when the tests were simply restarted.
Failures:
\`\`\`
${failures
.map(
(failure) =>
`${failure.group}: ${failure.headline}\n\t${failure.details.join("\n")}`
)
.join("\n")}
\`\`\`
`.replace(/^\s*/gm, "");
}
export async function reportFlakyTests({ github, context }) {
const failedJobsFromLastRun = await getFailedJobsForRun(
github,
context,
context.payload.workflow_run.id,
context.payload.workflow_run.run_attempt - 1
);
// This could one day be relaxed if we face serious enough flaky test problems, so we're going to loop anyway
if (failedJobsFromLastRun.length !== 1) {
console.log(
"Multiple jobs failing after retry, assuming maintainer rerun."
);
return;
}
for (const job of failedJobsFromLastRun) {
const { data: log } =
await github.rest.actions.downloadJobLogsForWorkflowRun({
owner: context.repo.owner,
repo: context.repo.repo,
job_id: job.id,
});
const details = extractDetails(log);
const existingIssueId = await getExistingIssueId(
github.graphql,
context,
details.title
);
if (existingIssueId !== undefined) {
// Maybe in the future, if it's helpful, update the existing issue with new links
console.log(`Existing issue found: #${existingIssueId}`);
return;
}
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: details.title,
labels: [LABEL],
body: createBody(
details,
`https://github.com/${context.repo.owner}/${
context.repo.repo
}/actions/runs/${context.payload.workflow_run.id}/attempts/${
context.payload.workflow_run.run_attempt - 1
}`
),
});
}
}

View File

@@ -0,0 +1,45 @@
import { strict as assert } from "node:assert";
import fs from "node:fs";
import { extractDetails } from "./rerunFlakyTests.js";
function extractDetailsFromPayload(filename) {
return extractDetails(
fs.readFileSync(`tests/flakyTestPayloads/${filename}.txt`, {
encoding: "utf8",
})
);
}
const chatClient = extractDetailsFromPayload("chat_client");
assert.equal(
chatClient.title,
"Flaky hard delete: /datum/computer_file/program/chatclient"
);
assert.equal(chatClient.failures.length, 1);
const monkeyBusiness = extractDetailsFromPayload("monkey_business");
assert.equal(
monkeyBusiness.title,
"Flaky test monkey_business: Cannot execute null.resolve()."
);
assert.equal(monkeyBusiness.failures.length, 1);
const shapeshift = extractDetailsFromPayload("shapeshift");
assert.equal(
shapeshift.title,
"Multiple errors in flaky test shapeshift_spell"
);
assert.equal(shapeshift.failures.length, 16);
const multipleFailures = extractDetailsFromPayload("multiple_failures");
assert.equal(
multipleFailures.title,
"Multiple flaky test failures in more_shapeshift_spell, shapeshift_spell"
);
assert.equal(multipleFailures.failures.length, 2);
const invalidTimer = extractDetailsFromPayload("invalid_timer");
assert.equal(
invalidTimer.title,
"Flaky test monkey_business: Invalid timer: /datum/looping_sound/proc/start_sound_loop() on /datum/looping_sound/showering"
);

View File

@@ -1,6 +1,6 @@
''' '''
Usage: Usage:
$ python ss13_genchangelog.py html/changelogs_ch/ $ python ss13_genchangelog.py html/changelogs/
ss13_genchangelog.py - Generate changelog from YAML. ss13_genchangelog.py - Generate changelog from YAML.
@@ -27,7 +27,7 @@ THE SOFTWARE.
from __future__ import print_function from __future__ import print_function
import yaml, os, glob, sys, re, time, argparse import yaml, os, glob, sys, re, time, argparse
from datetime import datetime, date from datetime import datetime, date, timedelta
from time import time from time import time
today = date.today() today = date.today()
@@ -61,7 +61,9 @@ validPrefixes = [
'refactor', 'refactor',
'config', 'config',
'admin', 'admin',
'server' 'server',
'sound',
'image',
] ]
def dictToTuples(inp): def dictToTuples(inp):
@@ -69,7 +71,6 @@ def dictToTuples(inp):
old_changelog_cache = os.path.join(args.ymlDir, '.all_changelog.yml') old_changelog_cache = os.path.join(args.ymlDir, '.all_changelog.yml')
failed_cache_read = True
if os.path.isfile(old_changelog_cache): if os.path.isfile(old_changelog_cache):
try: try:
print('Reading old changelog cache...') print('Reading old changelog cache...')
@@ -103,7 +104,6 @@ if os.path.isfile(old_changelog_cache):
print("Failed to read old changelog cache:") print("Failed to read old changelog cache:")
print(e, file=sys.stderr) print(e, file=sys.stderr)
errors = False
print('Reading changelogs...') print('Reading changelogs...')
for fileName in glob.glob(os.path.join(args.ymlDir, "*.yml")): for fileName in glob.glob(os.path.join(args.ymlDir, "*.yml")):
name, ext = os.path.splitext(os.path.basename(fileName)) name, ext = os.path.splitext(os.path.basename(fileName))
@@ -111,12 +111,10 @@ for fileName in glob.glob(os.path.join(args.ymlDir, "*.yml")):
if name == 'example': continue if name == 'example': continue
fileName = os.path.abspath(fileName) fileName = os.path.abspath(fileName)
formattedDate = today.strftime(fileDateFormat) formattedDate = today.strftime(fileDateFormat)
if not os.path.exists(archiveDir):
os.makedirs(archiveDir)
monthFile = os.path.join(archiveDir, formattedDate + '.yml') monthFile = os.path.join(archiveDir, formattedDate + '.yml')
print(' Reading {}...'.format(fileName)) print(' Reading {}...'.format(fileName))
cl = {} cl = {}
with open(fileName, 'r') as f: with open(fileName, 'r',encoding='utf-8') as f:
cl = yaml.load(f, Loader=yaml.SafeLoader) cl = yaml.load(f, Loader=yaml.SafeLoader)
currentEntries = {} currentEntries = {}
if os.path.exists(monthFile): if os.path.exists(monthFile):
@@ -131,8 +129,8 @@ for fileName in glob.glob(os.path.join(args.ymlDir, "*.yml")):
if change not in author_entries: if change not in author_entries:
(change_type, _) = dictToTuples(change)[0] (change_type, _) = dictToTuples(change)[0]
if change_type not in validPrefixes: if change_type not in validPrefixes:
errors = True
print(' {0}: Invalid prefix {1}'.format(fileName, change_type), file=sys.stderr) print(' {0}: Invalid prefix {1}'.format(fileName, change_type), file=sys.stderr)
sys.exit(1)
author_entries += [change] author_entries += [change]
new += 1 new += 1
currentEntries[today][cl['author']] = author_entries currentEntries[today][cl['author']] = author_entries
@@ -144,8 +142,6 @@ for fileName in glob.glob(os.path.join(args.ymlDir, "*.yml")):
print(' Deleting {0} (delete-after set)...'.format(fileName)) print(' Deleting {0} (delete-after set)...'.format(fileName))
os.remove(fileName) os.remove(fileName)
with open(monthFile, 'w+', encoding='utf-8') as f: with open(monthFile, 'w', encoding='utf-8') as f:
yaml.dump(currentEntries, f, default_flow_style=False)
if errors: yaml.dump(currentEntries, f, default_flow_style=False)
sys.exit(1)