|
| 1 | +#!/usr/bin/env python3 |
| 2 | + |
| 3 | +"""Submit kOps AI conformance results to cncf/k8s-ai-conformance. |
| 4 | +
|
| 5 | +Usage: |
| 6 | + dev/tasks/submit-ai-conformance [--submit] <artifacts-url> |
| 7 | +
|
| 8 | +By default, runs in dry-run mode. Pass --submit to actually create the PR. |
| 9 | +
|
| 10 | +Examples: |
| 11 | + dev/tasks/submit-ai-conformance https://gcsweb.k8s.io/gcs/kubernetes-ci-logs/logs/e2e-kops-ai-conformance/2034963660111089664/artifacts/ |
| 12 | + dev/tasks/submit-ai-conformance --submit https://gcsweb.k8s.io/gcs/kubernetes-ci-logs/logs/e2e-kops-ai-conformance/2034963660111089664/artifacts/ |
| 13 | +""" |
| 14 | + |
| 15 | +import os |
| 16 | +import re |
| 17 | +import shutil |
| 18 | +import subprocess |
| 19 | +import sys |
| 20 | +import tempfile |
| 21 | +import urllib.request |
| 22 | + |
| 23 | +import yaml |
| 24 | + |
| 25 | + |
| 26 | +JOB_NAME = "e2e-kops-ai-conformance" |
| 27 | +GCS_BUCKET = "kubernetes-ci-logs" |
| 28 | +CONFORMANCE_REPO = "cncf/k8s-ai-conformance" |
| 29 | +KOPS_DIR_NAME = "kops" |
| 30 | + |
| 31 | + |
| 32 | +def run(cmd, **kwargs): |
| 33 | + """Run a command, printing it first.""" |
| 34 | + print(f"+ {' '.join(cmd)}") |
| 35 | + return subprocess.run(cmd, check=True, **kwargs) |
| 36 | + |
| 37 | + |
| 38 | +def capture(cmd, **kwargs): |
| 39 | + """Run a command and return its stdout.""" |
| 40 | + result = subprocess.run(cmd, capture_output=True, text=True, **kwargs) |
| 41 | + if result.returncode != 0: |
| 42 | + print(result.stdout, end="", file=sys.stderr) |
| 43 | + print(result.stderr, end="", file=sys.stderr) |
| 44 | + result.check_returncode() |
| 45 | + return result.stdout.strip() |
| 46 | + |
| 47 | + |
| 48 | +def gsutil_cp(src, dst): |
| 49 | + run(["gsutil", "-m", "cp", "-r", src, dst]) |
| 50 | + |
| 51 | + |
| 52 | +def parse_build_id(input_str): |
| 53 | + """Extract the build ID from a URL or raw ID.""" |
| 54 | + if re.fullmatch(r"\d+", input_str): |
| 55 | + return input_str |
| 56 | + m = re.search(rf"logs/{JOB_NAME}/(\d+)", input_str) |
| 57 | + if m: |
| 58 | + return m.group(1) |
| 59 | + print(f"ERROR: Cannot parse build ID from: {input_str}", file=sys.stderr) |
| 60 | + print("Provide either a build ID (e.g. 2034963660111089664) or a GCS URL.", file=sys.stderr) |
| 61 | + sys.exit(1) |
| 62 | + |
| 63 | + |
| 64 | +def download_artifacts(build_id, tmpdir): |
| 65 | + """Download ai-conformance.yaml and test evidence from GCS.""" |
| 66 | + gcs_prefix = f"gs://{GCS_BUCKET}/logs/{JOB_NAME}/{build_id}/artifacts" |
| 67 | + print(f"Downloading artifacts from {gcs_prefix}...") |
| 68 | + gsutil_cp(f"{gcs_prefix}/ai-conformance.yaml", f"{tmpdir}/ai-conformance.yaml") |
| 69 | + |
| 70 | + # List test evidence files and download only the .md ones. |
| 71 | + listing = capture(["gsutil", "ls", "-r", f"{gcs_prefix}/tests/"]) |
| 72 | + for line in listing.splitlines(): |
| 73 | + line = line.strip() |
| 74 | + if not line.endswith("/output.md"): |
| 75 | + continue |
| 76 | + # e.g. gs://.../artifacts/tests/TestFoo/output.md -> tests/TestFoo/output.md |
| 77 | + rel = line.split("/artifacts/", 1)[1] |
| 78 | + dest = os.path.join(tmpdir, rel) |
| 79 | + os.makedirs(os.path.dirname(dest), exist_ok=True) |
| 80 | + gsutil_cp(line, dest) |
| 81 | + |
| 82 | + |
| 83 | +def download_template(kube_minor, tmpdir): |
| 84 | + """Download the official conformance template for this k8s version.""" |
| 85 | + url = f"https://raw.githubusercontent.com/{CONFORMANCE_REPO}/main/docs/AIConformance-{kube_minor}.yaml" |
| 86 | + print(f"Downloading conformance template from {url}...") |
| 87 | + dest = os.path.join(tmpdir, "template.yaml") |
| 88 | + urllib.request.urlretrieve(url, dest) |
| 89 | + return dest |
| 90 | + |
| 91 | + |
| 92 | +def load_yaml(path): |
| 93 | + with open(path) as f: |
| 94 | + return yaml.safe_load(f) |
| 95 | + |
| 96 | + |
| 97 | +def build_product_yaml(template, results): |
| 98 | + """Merge our test results into the conformance template.""" |
| 99 | + # Build a lookup of our results by (category, id). |
| 100 | + results_lookup = {} |
| 101 | + for category, items in results.get("spec", {}).items(): |
| 102 | + for item in items: |
| 103 | + results_lookup[(category, item["id"])] = item |
| 104 | + |
| 105 | + # Merge metadata: start with template, overlay our results. |
| 106 | + metadata = template["metadata"].copy() |
| 107 | + metadata.update(results["metadata"]) |
| 108 | + |
| 109 | + # Fill in defaults for kOps. |
| 110 | + if not metadata.get("contactEmailAddress") or metadata["contactEmailAddress"].startswith("["): |
| 111 | + metadata["contactEmailAddress"] = "sig-cluster-lifecycle@kubernetes.io" |
| 112 | + if not metadata.get("k8sConformanceUrl") or metadata["k8sConformanceUrl"].startswith("["): |
| 113 | + kube_minor = metadata["kubernetesVersion"].lstrip("v").rsplit(".", 1)[0] |
| 114 | + metadata["k8sConformanceUrl"] = f"https://github.com/cncf/k8s-conformance/tree/master/v{kube_minor}/kops" |
| 115 | + |
| 116 | + # Build merged spec: template structure with our results filled in. |
| 117 | + spec = {} |
| 118 | + for category, template_items in template.get("spec", {}).items(): |
| 119 | + spec[category] = [] |
| 120 | + for tmpl_item in template_items: |
| 121 | + merged = { |
| 122 | + "id": tmpl_item["id"], |
| 123 | + "description": tmpl_item["description"], |
| 124 | + "level": tmpl_item["level"], |
| 125 | + } |
| 126 | + |
| 127 | + result = results_lookup.get((category, tmpl_item["id"])) |
| 128 | + if result: |
| 129 | + merged["status"] = result.get("status", "") |
| 130 | + # Convert evidence paths: prefer .md over .html for GitHub rendering. |
| 131 | + evidence = [] |
| 132 | + for e in result.get("evidence", []): |
| 133 | + if e.startswith("tests/"): |
| 134 | + evidence.append(e.replace("/output.html", "/output.md")) |
| 135 | + else: |
| 136 | + evidence.append(e) |
| 137 | + merged["evidence"] = evidence |
| 138 | + merged["notes"] = result.get("notes", "") |
| 139 | + else: |
| 140 | + # Not in our results. |
| 141 | + if tmpl_item["level"] == "SHOULD": |
| 142 | + merged["status"] = "N/A" |
| 143 | + merged["evidence"] = [] |
| 144 | + merged["notes"] = "Not applicable for kOps at this time." |
| 145 | + else: |
| 146 | + merged["status"] = "" |
| 147 | + merged["evidence"] = [] |
| 148 | + merged["notes"] = "" |
| 149 | + |
| 150 | + spec[category].append(merged) |
| 151 | + |
| 152 | + return {"metadata": metadata, "spec": spec} |
| 153 | + |
| 154 | + |
| 155 | +def write_product_yaml(data, path): |
| 156 | + """Write PRODUCT.yaml with the standard header.""" |
| 157 | + class Dumper(yaml.Dumper): |
| 158 | + pass |
| 159 | + |
| 160 | + def str_representer(dumper, s): |
| 161 | + if "\n" in s: |
| 162 | + return dumper.represent_scalar("tag:yaml.org,2002:str", s, style="|") |
| 163 | + return dumper.represent_scalar("tag:yaml.org,2002:str", s) |
| 164 | + |
| 165 | + Dumper.add_representer(str, str_representer) |
| 166 | + |
| 167 | + header = ( |
| 168 | + "# Kubernetes AI Conformance Checklist\n" |
| 169 | + "# Notes: This checklist is based on the Kubernetes AI Conformance document.\n" |
| 170 | + "# Participants should fill in the 'status', 'evidence', and 'notes' fields for each requirement.\n\n" |
| 171 | + ) |
| 172 | + with open(path, "w") as f: |
| 173 | + f.write(header) |
| 174 | + yaml.dump(data, f, Dumper=Dumper, default_flow_style=False, sort_keys=False, width=200) |
| 175 | + |
| 176 | + print(f"Wrote {path}") |
| 177 | + |
| 178 | + |
| 179 | +def copy_evidence(tmpdir, submit_dir): |
| 180 | + """Copy .md evidence files into the submission directory.""" |
| 181 | + tests_src = os.path.join(tmpdir, "tests") |
| 182 | + if not os.path.isdir(tests_src): |
| 183 | + return |
| 184 | + for root, _dirs, files in os.walk(tests_src): |
| 185 | + for fname in files: |
| 186 | + if fname == "output.md": |
| 187 | + src = os.path.join(root, fname) |
| 188 | + rel = os.path.relpath(src, tmpdir) # e.g. tests/TestFoo/output.md |
| 189 | + dst = os.path.join(submit_dir, rel) |
| 190 | + os.makedirs(os.path.dirname(dst), exist_ok=True) |
| 191 | + shutil.copy2(src, dst) |
| 192 | + |
| 193 | + |
| 194 | +def create_pr(tmpdir, submit_dir, kube_minor, kube_version, platform_version, build_id): |
| 195 | + """Clone the conformance repo, commit the submission, and open a PR.""" |
| 196 | + github_user = os.environ.get("GITHUB_USER", os.environ.get("USER", "")) |
| 197 | + if not github_user: |
| 198 | + print("ERROR: Set GITHUB_USER or USER environment variable.", file=sys.stderr) |
| 199 | + sys.exit(1) |
| 200 | + |
| 201 | + clone_dir = os.path.join(tmpdir, "k8s-ai-conformance") |
| 202 | + |
| 203 | + # Ensure we have a fork. |
| 204 | + try: |
| 205 | + capture(["gh", "repo", "view", f"{github_user}/k8s-ai-conformance"]) |
| 206 | + except subprocess.CalledProcessError: |
| 207 | + print(f"Forking {CONFORMANCE_REPO}...") |
| 208 | + run(["gh", "repo", "fork", CONFORMANCE_REPO, "--clone=false"]) |
| 209 | + |
| 210 | + run(["gh", "repo", "clone", f"{github_user}/k8s-ai-conformance", clone_dir, "--", "--depth=1"]) |
| 211 | + |
| 212 | + branch = f"kops-v{kube_minor}" |
| 213 | + run(["git", "remote", "add", "cncf", f"https://github.com/{CONFORMANCE_REPO}.git"], cwd=clone_dir) |
| 214 | + run(["git", "fetch", "cncf", "main", "--depth=1"], cwd=clone_dir) |
| 215 | + run(["git", "checkout", "-b", branch, "cncf/main"], cwd=clone_dir) |
| 216 | + |
| 217 | + # Copy submission into the clone. |
| 218 | + dest = os.path.join(clone_dir, f"v{kube_minor}", KOPS_DIR_NAME) |
| 219 | + if os.path.exists(dest): |
| 220 | + shutil.rmtree(dest) |
| 221 | + shutil.copytree(submit_dir, dest) |
| 222 | + |
| 223 | + # Commit. |
| 224 | + run(["git", "add", f"v{kube_minor}/{KOPS_DIR_NAME}/"], cwd=clone_dir) |
| 225 | + commit_msg = ( |
| 226 | + f"Add kOps AI Conformance results for v{kube_minor}\n\n" |
| 227 | + f"kOps version: {platform_version}\n" |
| 228 | + f"Kubernetes version: {kube_version}\n" |
| 229 | + f"Build: https://prow.k8s.io/view/gs/{GCS_BUCKET}/logs/{JOB_NAME}/{build_id}\n" |
| 230 | + ) |
| 231 | + run(["git", "commit", "-m", commit_msg], cwd=clone_dir) |
| 232 | + |
| 233 | + # Push. |
| 234 | + print(f"\nPushing to {github_user}/k8s-ai-conformance...") |
| 235 | + run(["git", "push", "-u", "origin", branch, "--force"], cwd=clone_dir) |
| 236 | + |
| 237 | + # Create PR. |
| 238 | + print("\nCreating pull request...") |
| 239 | + pr_body = ( |
| 240 | + f"## Conformance results for kOps v{kube_minor}\n\n" |
| 241 | + f"- **Platform**: kOps\n" |
| 242 | + f"- **Platform Version**: {platform_version}\n" |
| 243 | + f"- **Kubernetes Version**: {kube_version}\n" |
| 244 | + f"- **Vendor**: kOps Project\n\n" |
| 245 | + f"### Evidence\n\n" |
| 246 | + f"Test evidence is included directly in this PR as markdown files under " |
| 247 | + f"`v{kube_minor}/{KOPS_DIR_NAME}/tests/`.\n\n" |
| 248 | + f"The tests were run automatically by the " |
| 249 | + f"[e2e-kops-ai-conformance](https://prow.k8s.io/view/gs/{GCS_BUCKET}/logs/{JOB_NAME}/{build_id}) Prow job.\n\n" |
| 250 | + f"Full artifacts: https://gcsweb.k8s.io/gcs/{GCS_BUCKET}/logs/{JOB_NAME}/{build_id}/artifacts/\n" |
| 251 | + ) |
| 252 | + pr_url = capture([ |
| 253 | + "gh", "pr", "create", |
| 254 | + "--repo", CONFORMANCE_REPO, |
| 255 | + "--head", f"{github_user}:{branch}", |
| 256 | + "--title", f"Add kOps AI Conformance results for v{kube_minor}", |
| 257 | + "--body", pr_body, |
| 258 | + ], cwd=clone_dir) |
| 259 | + |
| 260 | + print(f"\nPull request created: {pr_url}") |
| 261 | + |
| 262 | + |
| 263 | +def main(): |
| 264 | + args = sys.argv[1:] |
| 265 | + dry_run = True |
| 266 | + if "--submit" in args: |
| 267 | + dry_run = False |
| 268 | + args.remove("--submit") |
| 269 | + |
| 270 | + if len(args) < 1: |
| 271 | + print(__doc__, file=sys.stderr) |
| 272 | + sys.exit(1) |
| 273 | + |
| 274 | + build_id = parse_build_id(args[0]) |
| 275 | + print(f"Build ID: {build_id}") |
| 276 | + if dry_run: |
| 277 | + print("DRY RUN: will build submission locally but not create a PR") |
| 278 | + |
| 279 | + tmpdir = tempfile.mkdtemp() |
| 280 | + try: |
| 281 | + # Download artifacts. |
| 282 | + download_artifacts(build_id, tmpdir) |
| 283 | + |
| 284 | + # Load our results. |
| 285 | + results = load_yaml(os.path.join(tmpdir, "ai-conformance.yaml")) |
| 286 | + kube_version = results["metadata"]["kubernetesVersion"] |
| 287 | + platform_version = results["metadata"]["platformVersion"] |
| 288 | + kube_minor = re.sub(r"^v", "", kube_version).rsplit(".", 1)[0] |
| 289 | + print(f"Kubernetes version: {kube_version} (minor: {kube_minor})") |
| 290 | + print(f"Platform version: {platform_version}") |
| 291 | + |
| 292 | + # Download and merge with template. |
| 293 | + template_path = download_template(kube_minor, tmpdir) |
| 294 | + template = load_yaml(template_path) |
| 295 | + product = build_product_yaml(template, results) |
| 296 | + |
| 297 | + # Prepare submission directory. |
| 298 | + submit_dir = os.path.join(tmpdir, "submission") |
| 299 | + os.makedirs(submit_dir) |
| 300 | + write_product_yaml(product, os.path.join(submit_dir, "PRODUCT.yaml")) |
| 301 | + copy_evidence(tmpdir, submit_dir) |
| 302 | + |
| 303 | + # Show what we're submitting. |
| 304 | + print("\nSubmission contents:") |
| 305 | + for root, _dirs, files in os.walk(submit_dir): |
| 306 | + for f in sorted(files): |
| 307 | + print(f" {os.path.relpath(os.path.join(root, f), submit_dir)}") |
| 308 | + |
| 309 | + if dry_run: |
| 310 | + print(f"\nDry run output is in {submit_dir}") |
| 311 | + print("PRODUCT.yaml:") |
| 312 | + with open(os.path.join(submit_dir, "PRODUCT.yaml")) as f: |
| 313 | + print(f.read()) |
| 314 | + return |
| 315 | + |
| 316 | + # Create the PR. |
| 317 | + create_pr(tmpdir, submit_dir, kube_minor, kube_version, platform_version, build_id) |
| 318 | + finally: |
| 319 | + if not dry_run: |
| 320 | + shutil.rmtree(tmpdir) |
| 321 | + |
| 322 | + |
| 323 | +if __name__ == "__main__": |
| 324 | + main() |
0 commit comments