-
-
Notifications
You must be signed in to change notification settings - Fork 62
fix(sbom): fetch last 10 releases for Dakota history #838
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -338,27 +338,37 @@ const STREAM_SPECS = [ | |
| * @param {object|null} existing Existing SBOM cache for incremental updates | ||
| */ | ||
| async function processLatestTagStream(spec, existing) { | ||
| const imageRef = `ghcr.io/${spec.org}/${spec.package}:latest`; | ||
| console.log(` ${spec.id}: processing :latest tag (${imageRef})`); | ||
|
|
||
| const dateStr = await getImageCreatedDate(imageRef); | ||
| const cacheKey = dateStr ? `latest-${dateStr}` : "latest-unknown"; | ||
| console.log(` ${spec.id}: cache key = ${cacheKey}`); | ||
|
|
||
| const existingEntry = existing?.streams?.[spec.id]?.releases?.[cacheKey]; | ||
| const hasVersions = existingEntry?.packageVersions != null; | ||
| const hasAllPackages = | ||
| existingEntry?.packageVersions?.allPackages != null && | ||
| Object.keys(existingEntry.packageVersions.allPackages).length > 0; | ||
| const isVerified = existingEntry?.attestation?.verified === true; | ||
| const isCacheHit = | ||
| !FORCE_REFRESH && hasVersions && hasAllPackages && isVerified; | ||
|
|
||
| // Seed releases from existing cache so history accumulates across nightly runs. | ||
| // Without this, every run discards all prior entries — leaving only today's key. | ||
| // Seed from existing cache — accumulates history across nightly runs. | ||
| const existingReleases = existing?.streams?.[spec.id]?.releases || {}; | ||
| const releases = { ...existingReleases }; | ||
|
|
||
| // Build the list of image refs to process: :latest plus the 10 most recent | ||
| // commit-SHA tags (each is a distinct tagged build pushed to GHCR). | ||
| const allTags = await fetchGhcrTags(spec.org, spec.package); | ||
| const commitTags = allTags | ||
| .filter((t) => /^[0-9a-f]{40}$/.test(t)) | ||
| .slice(-10); // last 10 = most recently pushed | ||
| const imageRefs = [ | ||
| `ghcr.io/${spec.org}/${spec.package}:latest`, | ||
| ...commitTags.map((t) => `ghcr.io/${spec.org}/${spec.package}:${t}`), | ||
| ]; | ||
|
|
||
| for (const imageRef of imageRefs) { | ||
| const dateStr = await getImageCreatedDate(imageRef); | ||
| const cacheKey = dateStr ? `latest-${dateStr}` : null; | ||
| if (!cacheKey) continue; | ||
|
Comment on lines
+357
to
+359
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Date-only cache keys can overwrite multiple same-day Dakota builds. Using 💡 Proposed fix- const dateStr = await getImageCreatedDate(imageRef);
- const cacheKey = dateStr ? `latest-${dateStr}` : null;
- if (!cacheKey) continue;
+ const dateStr = await getImageCreatedDate(imageRef);
+ const refTag = imageRef.split(":").pop() || "unknown";
+ let cacheKey = dateStr
+ ? `latest-${dateStr}`
+ : `latest-unknown-${refTag.slice(0, 12)}`;
+ if (releases[cacheKey] && releases[cacheKey].imageRef !== imageRef) {
+ cacheKey = `${cacheKey}-${refTag.slice(0, 12)}`;
+ }Also applies to: 417-419 🤖 Prompt for AI Agents |
||
|
|
||
| const existingEntry = releases[cacheKey]; | ||
| const hasVersions = existingEntry?.packageVersions != null; | ||
| const hasAllPackages = | ||
| existingEntry?.packageVersions?.allPackages != null && | ||
| Object.keys(existingEntry.packageVersions.allPackages).length > 0; | ||
| const isVerified = existingEntry?.attestation?.verified === true; | ||
| const isCacheHit = | ||
| !FORCE_REFRESH && hasVersions && hasAllPackages && isVerified; | ||
|
|
||
| console.log(` ${spec.id}: ${cacheKey}${isCacheHit ? " (cache hit)" : ""}`); | ||
|
|
||
| if (isCacheHit) { | ||
| releases[cacheKey] = existingEntry; | ||
| } else { | ||
|
|
@@ -405,14 +415,15 @@ async function processLatestTagStream(spec, existing) { | |
| } | ||
|
|
||
| releases[cacheKey] = { | ||
| tag: "latest", | ||
| tag: imageRef, | ||
| imageRef, | ||
| digest: null, | ||
| attestation, | ||
| packageVersions, | ||
| checkedAt: new Date().toISOString(), | ||
| }; | ||
| } | ||
| } // end for imageRefs | ||
|
|
||
| return { | ||
| id: spec.id, | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Latest-stream backfill logic is not reachable with current stream specs.
processLatestTagStream()is only called whenspec.usesLatestTagis truthy (Line 454), but Dakota specs in this file are configured viastreamPrefix: "latest"and don’t setusesLatestTag. That makes the new multi-ref logic effectively dead for Dakota.💡 Proposed fix
@@ { id: "dakota-latest", @@ streamPrefix: "latest", + usesLatestTag: true, keyRepo: "projectbluefin/dakota", keyless: true, }, { id: "dakota-nvidia-latest", @@ streamPrefix: "latest", + usesLatestTag: true, keyRepo: "projectbluefin/dakota", keyless: true, },Also applies to: 454-456
🤖 Prompt for AI Agents