Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 13 additions & 12 deletions .github/scripts/apply_branch_protection.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,19 @@ cat >"$payload_file" <<JSON
{
"required_status_checks": {
"strict": true,
"contexts": [
"CI / test",
"CI / staticcheck",
"CI / openapi-contract",
"CI / docker-build",
"CI / helm-lint",
"CI / security-gosec",
"CI / security-govulncheck",
"CI / security-trivy",
"CI / security-sbom",
"CI / perf-smoke",
"CI / integration"
"checks": [
{"context": "test", "app_id": 15368},
{"context": "staticcheck", "app_id": 15368},
{"context": "openapi-contract", "app_id": 15368},
{"context": "config-lint", "app_id": 15368},
{"context": "docker-build", "app_id": 15368},
{"context": "helm-lint", "app_id": 15368},
{"context": "security-gosec", "app_id": 15368},
{"context": "security-govulncheck", "app_id": 15368},
{"context": "security-trivy", "app_id": 15368},
{"context": "security-sbom", "app_id": 15368},
{"context": "perf-smoke", "app_id": 15368},
{"context": "integration", "app_id": 15368}
]
},
"enforce_admins": true,
Expand Down
124 changes: 124 additions & 0 deletions .github/scripts/collect_flake_metrics.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
#!/usr/bin/env bash
set -euo pipefail

require_bin() {
if ! command -v "$1" >/dev/null 2>&1; then
echo "Missing dependency: $1" >&2
exit 1
fi
}

require_bin gh
require_bin jq

if [[ -z "${GH_TOKEN:-}" ]]; then
echo "GH_TOKEN is required" >&2
exit 1
fi
if [[ -z "${REPO:-}" ]]; then
echo "REPO is required (owner/repo)" >&2
exit 1
fi
if [[ -z "${RUN_ID:-}" ]]; then
echo "RUN_ID is required" >&2
exit 1
fi

output_dir="${OUTPUT_DIR:-flake-metrics}"
workflow_file="${WORKFLOW_FILE:-ci.yml}"
branch="${BRANCH:-main}"
trend_limit="${TREND_RUN_LIMIT:-30}"

mkdir -p "${output_dir}"

jobs_file="$(mktemp)"
runs_file="$(mktemp)"
trap 'rm -f "${jobs_file}" "${runs_file}"' EXIT

# Current run snapshot for integration/perf-smoke status and timing.
gh api "repos/${REPO}/actions/runs/${RUN_ID}/jobs?per_page=100" >"${jobs_file}"

jq -n \
--arg generated_at "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
--arg repo "${REPO}" \
--argjson run_id "${RUN_ID}" \
--arg run_url "https://github.com/${REPO}/actions/runs/${RUN_ID}" \
--slurpfile jobs "${jobs_file}" \
'{
generated_at: $generated_at,
repository: $repo,
run_id: $run_id,
run_url: $run_url,
jobs: ($jobs[0].jobs
| map(select(.name == "integration" or .name == "perf-smoke")
| {
name: .name,
status: .status,
conclusion: .conclusion,
started_at: .started_at,
completed_at: .completed_at,
duration_seconds: (if (.started_at != null and .completed_at != null)
then ((.completed_at | fromdateiso8601) - (.started_at | fromdateiso8601))
else null
end)
}))
}' >"${output_dir}/current.json"

printf 'run_id,run_number,run_created_at,run_conclusion,run_url,job_name,job_conclusion,job_started_at,job_completed_at,duration_seconds\n' >"${output_dir}/trend.csv"

# Trend window over recent CI runs on main.
gh api "repos/${REPO}/actions/workflows/${workflow_file}/runs?branch=${branch}&per_page=${trend_limit}" >"${runs_file}"

while IFS=$'\t' read -r run_id run_number run_created_at run_conclusion run_url; do
run_jobs_file="$(mktemp)"
gh api "repos/${REPO}/actions/runs/${run_id}/jobs?per_page=100" >"${run_jobs_file}"

jq -r \
--arg run_id "${run_id}" \
--arg run_number "${run_number}" \
--arg run_created_at "${run_created_at}" \
--arg run_conclusion "${run_conclusion}" \
--arg run_url "${run_url}" \
'.jobs[]
| select(.name == "integration" or .name == "perf-smoke")
| [
$run_id,
$run_number,
$run_created_at,
$run_conclusion,
$run_url,
.name,
(.conclusion // "unknown"),
(.started_at // ""),
(.completed_at // ""),
(if (.started_at != null and .completed_at != null)
then (((.completed_at | fromdateiso8601) - (.started_at | fromdateiso8601)) | tostring)
else ""
end)
]
| @csv' "${run_jobs_file}" >>"${output_dir}/trend.csv"

rm -f "${run_jobs_file}"
done < <(jq -r '.workflow_runs[] | [.id, .run_number, .created_at, .conclusion, .html_url] | @tsv' "${runs_file}")

jq -n \
--arg generated_at "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
--arg repo "${REPO}" \
--arg branch "${branch}" \
--argjson run_id "${RUN_ID}" \
--arg workflow_file "${workflow_file}" \
--arg trend_csv "${output_dir}/trend.csv" \
--arg current_json "${output_dir}/current.json" \
'{
generated_at: $generated_at,
repository: $repo,
branch: $branch,
source_run_id: $run_id,
workflow_file: $workflow_file,
artifacts: {
current: $current_json,
trend: $trend_csv
}
}' >"${output_dir}/manifest.json"

echo "Flake metrics written to ${output_dir}"
99 changes: 99 additions & 0 deletions .github/scripts/create_ci_failure_issue.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
#!/usr/bin/env bash
set -euo pipefail

require_bin() {
if ! command -v "$1" >/dev/null 2>&1; then
echo "Missing dependency: $1" >&2
exit 1
fi
}

require_bin gh
require_bin jq

if [[ -z "${GH_TOKEN:-}" ]]; then
echo "GH_TOKEN is required" >&2
exit 1
fi
if [[ -z "${REPO:-}" ]]; then
echo "REPO is required (owner/repo)" >&2
exit 1
fi
if [[ -z "${RUN_ID:-}" ]]; then
echo "RUN_ID is required" >&2
exit 1
fi

run_file="$(mktemp)"
jobs_file="$(mktemp)"
failed_file="$(mktemp)"
body_file="$(mktemp)"
work_dir="$(mktemp -d)"
trap 'rm -f "${run_file}" "${jobs_file}" "${failed_file}" "${body_file}"; rm -rf "${work_dir}"' EXIT

gh api "repos/${REPO}/actions/runs/${RUN_ID}" >"${run_file}"
gh api "repos/${REPO}/actions/runs/${RUN_ID}/jobs?per_page=100" >"${jobs_file}"

jq -r '.jobs[] | select(.conclusion == "failure") | [.id, .name, .html_url] | @tsv' "${jobs_file}" >"${failed_file}"

if [[ ! -s "${failed_file}" ]]; then
echo "No failed jobs found for run ${RUN_ID}; skipping issue creation"
exit 0
fi

title="CI failure on main"
existing_issue="$(gh issue list -R "${REPO}" --state open --search "\"${title}\" in:title" --json number --jq '.[0].number // empty')"

# Ensure label exists for triage routing.
gh label create ci-failure -R "${REPO}" --description "Automated CI failure reports on main" --color B60205 2>/dev/null || true

run_url="$(jq -r '.html_url' "${run_file}")"
head_sha="$(jq -r '.head_sha' "${run_file}")"
run_attempt="$(jq -r '.run_attempt' "${run_file}")"
created_at="$(jq -r '.created_at' "${run_file}")"

{
echo "Automated CI failure report for a \\`main\\` push run."
echo
echo "- Run: ${run_url}"
echo "- Run ID: ${RUN_ID}"
echo "- Attempt: ${run_attempt}"
echo "- Head SHA: \\`${head_sha}\\`"
echo "- Created At: ${created_at}"
echo
echo "## Failed Jobs"
while IFS=$'\t' read -r job_id job_name job_url; do
echo "- [${job_name}](${job_url}) (job id: ${job_id})"
done <"${failed_file}"
} >"${body_file}"

while IFS=$'\t' read -r job_id job_name _job_url; do
log_file="${work_dir}/job-${job_id}.log"
if ! gh run view "${RUN_ID}" -R "${REPO}" --job "${job_id}" --log-failed >"${log_file}" 2>/dev/null; then
gh run view "${RUN_ID}" -R "${REPO}" --job "${job_id}" --log >"${log_file}" 2>/dev/null || true
fi

{
echo
echo "<details><summary>${job_name} failed log (truncated)</summary>"
echo
echo "\\`\\`\\`text"
if [[ -s "${log_file}" ]]; then
head -n 220 "${log_file}" | sed 's/\r$//'
else
echo "No log output available for job ${job_id}."
fi
echo "\\`\\`\\`"
echo
echo "</details>"
} >>"${body_file}"
done <"${failed_file}"

if [[ -n "${existing_issue}" ]]; then
gh issue comment "${existing_issue}" -R "${REPO}" --body-file "${body_file}" >/dev/null
echo "Added CI failure details to existing issue #${existing_issue}"
exit 0
fi

issue_url="$(gh issue create -R "${REPO}" --title "${title}" --body-file "${body_file}" --label ci-failure)"
echo "Created CI failure issue: ${issue_url}"
Loading