diff --git a/.github/scripts/dependency_age.py b/.github/scripts/dependency_age.py
new file mode 100644
index 00000000000..49d80ed56a7
--- /dev/null
+++ b/.github/scripts/dependency_age.py
@@ -0,0 +1,569 @@
+#!/usr/bin/env python3
+
+import argparse
+import email.utils
+import json
+import os
+import re
+import sys
+import urllib.error
+import urllib.parse
+import urllib.request
+from dataclasses import dataclass
+from datetime import datetime, timedelta, timezone
+from pathlib import Path
+from typing import Any
+
+
+GRADLE_VERSIONS_URL = "https://services.gradle.org/versions/all"
+MAVEN_SEARCH_URL = "https://search.maven.org/solrsearch/select"
+DEFAULT_MIN_AGE_HOURS = 48
+GRADLE_PRERELEASE_PATTERN = re.compile(r"(?:^|[.\-])(rc|milestone)(?:[.\-\d]|$)", re.IGNORECASE)
+AKKA_REPOSITORY_URL = os.environ.get("DEPENDENCY_AGE_AKKA_REPOSITORY_URL", "https://repo.akka.io/maven")
+REPOSITORY_FALLBACKS: tuple[tuple[str, str], ...] = (
+ ("com.typesafe.akka", AKKA_REPOSITORY_URL),
+ ("io.akka", AKKA_REPOSITORY_URL),
+)
+
+
+@dataclass(frozen=True)
+class Candidate:
+ version: str
+ published_at: datetime
+
+
+# Entry point for GitHub Actions workflows
+# select-gradle: get newest Gradle release that is at least MIN_DEPENDENCY_AGE_HOURS hours old
+# select-maven: get newest Maven artifact release that is at least MIN_DEPENDENCY_AGE_HOURS hours old
+# validate-lockfiles: validate changed Gradle lockfile entries
+def parse_args() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description="Dependency age helpers for GitHub workflows.")
+ subparsers = parser.add_subparsers(dest="command", required=True)
+
+ gradle = subparsers.add_parser("select-gradle", help="Select the newest eligible Gradle release.")
+ add_common_selection_args(gradle)
+ gradle.add_argument("--versions-url", default=GRADLE_VERSIONS_URL)
+ gradle.add_argument("--versions-file")
+
+ maven = subparsers.add_parser("select-maven", help="Select the newest eligible Maven artifact release.")
+ add_common_selection_args(maven)
+ maven.add_argument("--group-id", required=True)
+ maven.add_argument("--artifact-id", required=True)
+ maven.add_argument("--search-url", default=MAVEN_SEARCH_URL)
+ maven.add_argument("--search-response-file")
+ maven.add_argument(
+ "--prerelease-pattern",
+ action="append",
+ default=[],
+ help="Case-insensitive regex fragment used to exclude prerelease versions.",
+ )
+
+ validate = subparsers.add_parser("validate-lockfiles", help="Validate changed Gradle lockfile entries.")
+ validate.add_argument("--baseline-dir", required=True)
+ validate.add_argument("--current-dir", default=".")
+ validate.add_argument("--metadata-file")
+ validate.add_argument("--search-url", default=MAVEN_SEARCH_URL)
+ validate.add_argument("--min-age-hours", type=int, default=default_min_age_hours())
+ validate.add_argument("--now")
+ validate.add_argument("--github-output", default=None)
+
+ return parser.parse_args()
+
+
+# add shared args used by select-gradle and select-maven
+def add_common_selection_args(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument("--min-age-hours", type=int, default=default_min_age_hours())
+ parser.add_argument("--now")
+ parser.add_argument("--github-output", default=None)
+
+
+# get MIN_DEPENDENCY_AGE_HOURS from environment variable; default is 48 hours
+def default_min_age_hours() -> int:
+ try:
+ return int(os.environ.get("MIN_DEPENDENCY_AGE_HOURS", DEFAULT_MIN_AGE_HOURS))
+ except ValueError:
+ return DEFAULT_MIN_AGE_HOURS
+
+
+# return input as a datetime object; default to current UTC time
+def now_utc(raw: str | None) -> datetime:
+ if raw:
+ return parse_datetime(raw)
+ return datetime.now(timezone.utc)
+
+
+# now_utc helper to parse input as a datetime object; used for Gradle and Maven timestamps
+def parse_datetime(value: Any) -> datetime:
+ if isinstance(value, datetime):
+ return value.astimezone(timezone.utc)
+ if isinstance(value, (int, float)):
+ timestamp = float(value)
+ if timestamp > 10_000_000_000:
+ timestamp /= 1000.0
+ return datetime.fromtimestamp(timestamp, tz=timezone.utc)
+ if value is None:
+ raise ValueError("timestamp is required")
+
+ text = str(value).strip()
+ if not text:
+ raise ValueError("timestamp is empty")
+
+ # Gradle buildTime compact format: 20260423130000+0000
+ try:
+ return datetime.strptime(text, "%Y%m%d%H%M%S%z").astimezone(timezone.utc)
+ except ValueError:
+ pass
+
+ # ISO 8601: normalise Z and +HHMM → +HH:MM for fromisoformat
+ text = re.sub(r"([+-])(\d{2})(\d{2})$", r"\1\2:\3", text.replace("Z", "+00:00"))
+ return datetime.fromisoformat(text).astimezone(timezone.utc)
+
+
+# normalize datetime to YYYY-MM-DDTHH:MM:SSZ for GitHub Actions outputs
+def format_datetime(value: datetime) -> str:
+ return value.astimezone(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+
+# emit key=value lines to stdout and GitHub Actions output file
+def emit_outputs(outputs: dict[str, Any], github_output: str | None) -> None:
+ lines = [f"{key}={'' if value is None else value}" for key, value in outputs.items()]
+ for line in lines:
+ print(line)
+ if github_output:
+ with open(github_output, "a", encoding="utf-8") as handle:
+ for line in lines:
+ handle.write(f"{line}\n")
+
+
+# load JSON from file or URL
+def load_json(file_path: str | None, url: str | None) -> Any:
+ if file_path:
+ text = Path(file_path).read_text(encoding="utf-8")
+ text = re.sub(r"(? int:
+ cutoff = now_utc(args.now) - timedelta(hours=args.min_age_hours)
+ payload = load_json(args.versions_file, args.versions_url)
+ candidates: list[Candidate] = []
+ for entry in payload:
+ version = entry.get("version")
+ build_time = entry.get("buildTime")
+ if not version or not build_time:
+ continue
+ if any(bool(entry.get(flag)) for flag in ("snapshot", "nightly", "releaseNightly", "broken", "activeRc")):
+ continue
+ if entry.get("rcFor") or GRADLE_PRERELEASE_PATTERN.search(version):
+ continue
+ published_at = parse_datetime(build_time)
+ if published_at <= cutoff:
+ candidates.append(Candidate(version=version, published_at=published_at))
+
+ return emit_selection_result(
+ label="Gradle",
+ cutoff=cutoff,
+ github_output=args.github_output,
+ candidates=candidates,
+ not_found_reason=(
+ f"No eligible stable Gradle release is at least {args.min_age_hours} hours old."
+ ),
+ )
+
+
+# select latest Maven artifact release that is at least MIN_DEPENDENCY_AGE_HOURS hours old
+def select_maven_release(args: argparse.Namespace) -> int:
+ cutoff = now_utc(args.now) - timedelta(hours=args.min_age_hours)
+ pattern = combine_patterns(args.prerelease_pattern)
+ candidates: list[Candidate] = []
+ for document in load_maven_documents(
+ group_id=args.group_id,
+ artifact_id=args.artifact_id,
+ search_url=args.search_url,
+ response_file=args.search_response_file,
+ ):
+ version = document.get("v")
+ timestamp = document.get("timestamp")
+ if not version or timestamp is None:
+ continue
+ if pattern and pattern.search(version):
+ continue
+ published_at = parse_datetime(timestamp)
+ if published_at <= cutoff:
+ candidates.append(Candidate(version=version, published_at=published_at))
+
+ return emit_selection_result(
+ label=f"{args.group_id}:{args.artifact_id}",
+ cutoff=cutoff,
+ github_output=args.github_output,
+ candidates=candidates,
+ not_found_reason=(
+ f"No eligible stable release found for {args.group_id}:{args.artifact_id} "
+ f"that is at least {args.min_age_hours} hours old."
+ ),
+ )
+
+
+# combine prerelease patterns into a single regex pattern
+def combine_patterns(patterns: list[str]) -> re.Pattern[str] | None:
+ non_empty = [pattern for pattern in patterns if pattern]
+ if not non_empty:
+ return None
+ return re.compile("|".join(f"(?:{pattern})" for pattern in non_empty), re.IGNORECASE)
+
+
+# load all Maven Central versions for given group:artifact
+def load_maven_documents(
+ *,
+ group_id: str,
+ artifact_id: str,
+ search_url: str,
+ response_file: str | None,
+) -> list[dict[str, Any]]:
+ if response_file:
+ payload = load_json(response_file, None)
+ return list(payload.get("response", {}).get("docs", []))
+
+ docs: list[dict[str, Any]] = []
+ start = 0
+ rows = 200
+ total = None
+ while total is None or start < total:
+ query = urllib.parse.urlencode(
+ {
+ "q": f'g:"{group_id}" AND a:"{artifact_id}"',
+ "core": "gav",
+ "rows": rows,
+ "start": start,
+ "wt": "json",
+ "sort": "timestamp desc",
+ }
+ )
+ payload = load_json(None, f"{search_url}?{query}")
+ response = payload.get("response", {})
+ total = int(response.get("numFound", 0))
+ batch = list(response.get("docs", []))
+ docs.extend(batch)
+ if not batch:
+ break
+ start += len(batch)
+ return docs
+
+
+# parse a version string into a tuple of ints for numeric comparison (e.g. "3.9.11" → (3, 9, 11))
+def _version_sort_key(version: str) -> tuple:
+ parts = []
+ for segment in re.split(r"([.\-])", version):
+ if segment in {"", ".", "-"}:
+ continue
+ try:
+ parts.append((0, int(segment)))
+ except ValueError:
+ parts.append((1, segment))
+ return tuple(parts)
+
+
+# emit selection result to stdout and GitHub Actions output file for select-gradle and select-maven
+def emit_selection_result(
+ *,
+ label: str,
+ cutoff: datetime,
+ github_output: str | None,
+ candidates: list[Candidate],
+ not_found_reason: str,
+) -> int:
+ selected = max(candidates, key=lambda candidate: _version_sort_key(candidate.version), default=None)
+ outputs: dict[str, Any] = {
+ "cutoff_at": format_datetime(cutoff),
+ }
+ if not selected:
+ outputs.update(
+ {
+ "found": "false",
+ "version": "",
+ "published_at": "",
+ "reason": not_found_reason,
+ }
+ )
+ emit_outputs(outputs, github_output)
+ print(f"::error::{not_found_reason}")
+ return 1
+
+ outputs.update(
+ {
+ "found": "true",
+ "version": selected.version,
+ "published_at": format_datetime(selected.published_at),
+ "reason": "",
+ }
+ )
+ emit_outputs(outputs, github_output)
+ print(
+ f"Selected latest eligible stable version for {label}: "
+ f"{selected.version} (published {format_datetime(selected.published_at)}, cutoff {format_datetime(cutoff)})"
+ )
+ return 0
+
+
+# ensure every changed Gradle lockfile entry is at least MIN_DEPENDENCY_AGE_HOURS hours old
+def validate_lockfiles(args: argparse.Namespace) -> int:
+ cutoff = now_utc(args.now) - timedelta(hours=args.min_age_hours)
+ baseline_dir = Path(args.baseline_dir)
+ current_dir = Path(args.current_dir)
+ metadata = load_metadata_overrides(args.metadata_file)
+
+ changed = changed_lockfile_coordinates(baseline_dir=baseline_dir, current_dir=current_dir)
+ outputs = {
+ "cutoff_at": format_datetime(cutoff),
+ "validated_coordinates": len(changed),
+ "reverted_coordinates": 0,
+ }
+ if not changed:
+ emit_outputs(outputs, args.github_output)
+ print("No dependency version changes detected across Gradle lockfiles.")
+ return 0
+
+ changed_by_file: dict[str, list[str]] = {}
+ for relative_path, gav in changed:
+ changed_by_file.setdefault(relative_path, []).append(gav)
+
+ timestamp_cache: dict[str, tuple[datetime | None, str | None]] = {}
+ violations_by_file: dict[str, list[tuple[str, str]]] = {}
+ for relative_path, gavs in sorted(changed_by_file.items()):
+ for gav in gavs:
+ if gav not in timestamp_cache:
+ timestamp_cache[gav] = resolve_gav_timestamp(
+ gav=gav,
+ metadata=metadata,
+ search_url=args.search_url,
+ )
+ published_at, reason = timestamp_cache[gav]
+ if published_at is None:
+ violations_by_file.setdefault(relative_path, []).append(
+ (gav, reason or "Unable to determine publish timestamp.")
+ )
+ continue
+ if published_at > cutoff:
+ violations_by_file.setdefault(relative_path, []).append(
+ (
+ gav,
+ f"Published at {format_datetime(published_at)}, which is newer than cutoff "
+ f"{format_datetime(cutoff)}.",
+ )
+ )
+ continue
+ print(
+ f"Verified {gav} in {relative_path} "
+ f"(published {format_datetime(published_at)}, cutoff {format_datetime(cutoff)})"
+ )
+
+ if violations_by_file:
+ revert_lockfiles_to_baseline(
+ violations_by_file=violations_by_file,
+ baseline_dir=baseline_dir,
+ current_dir=current_dir,
+ )
+ outputs["reverted_coordinates"] = sum(len(entries) for entries in violations_by_file.values())
+ for path, entries in sorted(violations_by_file.items()):
+ for gav, message in entries:
+ print(f"::warning file={path}::{gav}: {message} Reverted lockfile to baseline.")
+
+ emit_outputs(outputs, args.github_output)
+ print(
+ f"Validated {len(changed)} changed dependency selections against cutoff {format_datetime(cutoff)}. "
+ f"{len(violations_by_file)} lockfiles reverted."
+ )
+ return 0
+
+
+# restore each violating lockfile to its baseline copy to keep the file internally consistent
+def revert_lockfiles_to_baseline(
+ *,
+ violations_by_file: dict[str, list[tuple[str, str]]],
+ baseline_dir: Path,
+ current_dir: Path,
+) -> None:
+ for relative_path in sorted(violations_by_file):
+ current_path = current_dir / relative_path
+ baseline_path = baseline_dir / relative_path
+ if baseline_path.exists():
+ current_path.write_text(baseline_path.read_text(encoding="utf-8"), encoding="utf-8")
+ print(f"Reverted lockfile {relative_path} to baseline.")
+ else:
+ current_path.unlink(missing_ok=True)
+ print(f"Removed new lockfile {relative_path} because it has no baseline copy to restore.")
+
+
+# load metadata overrides from file
+def load_metadata_overrides(path: str | None) -> dict[str, Any]:
+ if not path:
+ return {}
+ return load_json(path, None)
+
+
+# find publish time for given group:artifact:version
+def resolve_gav_timestamp(
+ *,
+ gav: str,
+ metadata: dict[str, Any],
+ search_url: str,
+) -> tuple[datetime | None, str | None]:
+ if gav in metadata:
+ override = metadata[gav]
+ return parse_metadata_override(gav, override)
+
+ group_id, artifact_id, version = gav.split(":", 2)
+ query = urllib.parse.urlencode(
+ {
+ "q": f'g:"{group_id}" AND a:"{artifact_id}" AND v:"{version}"',
+ "core": "gav",
+ "rows": 20,
+ "wt": "json",
+ }
+ )
+ try:
+ payload = load_json(None, f"{search_url}?{query}")
+ docs = payload.get("response", {}).get("docs", [])
+ except (urllib.error.HTTPError, urllib.error.URLError, TimeoutError, ValueError):
+ docs = []
+ for document in docs:
+ if document.get("v") != version:
+ continue
+ timestamp = document.get("timestamp")
+ if timestamp is None:
+ return None, "Maven Central search result did not include a publish timestamp."
+ return parse_datetime(timestamp), None
+
+ fallback_timestamp = resolve_repository_fallback_timestamp(
+ group_id=group_id,
+ artifact_id=artifact_id,
+ version=version,
+ )
+ if fallback_timestamp is not None:
+ return fallback_timestamp, None
+
+ return None, "No metadata was found for this coordinate in Maven Central search or configured repository fallbacks."
+
+
+def resolve_repository_fallback_timestamp(
+ *,
+ group_id: str,
+ artifact_id: str,
+ version: str,
+) -> datetime | None:
+ relative_dir = "/".join(group_id.split(".")) + f"/{artifact_id}/{version}"
+ candidate_files = [
+ f"{artifact_id}-{version}.pom",
+ f"{artifact_id}-{version}.module",
+ f"{artifact_id}-{version}.jar",
+ ]
+ for prefix, base_url in REPOSITORY_FALLBACKS:
+ if group_id != prefix and not group_id.startswith(f"{prefix}."):
+ continue
+ for filename in candidate_files:
+ timestamp = fetch_last_modified(f"{base_url.rstrip('/')}/{relative_dir}/{filename}")
+ if timestamp is not None:
+ return timestamp
+ return None
+
+
+def fetch_last_modified(url: str) -> datetime | None:
+ for method in ("HEAD", "GET"):
+ request = urllib.request.Request(url, method=method)
+ try:
+ with urllib.request.urlopen(request, timeout=30) as response:
+ header = response.headers.get("Last-Modified")
+ except urllib.error.HTTPError as error:
+ if error.code in {403, 404, 405}:
+ continue
+ return None
+ except urllib.error.URLError:
+ return None
+ if not header:
+ continue
+ try:
+ parsed = email.utils.parsedate_to_datetime(header)
+ except (TypeError, ValueError, IndexError):
+ continue
+ if parsed.tzinfo is None:
+ parsed = parsed.replace(tzinfo=timezone.utc)
+ return parsed.astimezone(timezone.utc)
+ return None
+
+
+# parse override format for given group:artifact:version
+def parse_metadata_override(gav: str, override: Any) -> tuple[datetime | None, str | None]:
+ if isinstance(override, dict):
+ if "reason" in override:
+ return None, str(override["reason"])
+ for key in ("timestamp", "published_at", "timestamp_ms"):
+ if key in override:
+ return parse_datetime(override[key]), None
+ return None, f"Metadata override for {gav} is missing a timestamp."
+ if isinstance(override, (int, float, str)):
+ return parse_datetime(override), None
+ return None, f"Unsupported metadata override format for {gav}."
+
+
+# compare baseline and current lockfiles to find changed coordinates
+def changed_lockfile_coordinates(*, baseline_dir: Path, current_dir: Path) -> list[tuple[str, str]]:
+ changed: list[tuple[str, str]] = []
+ baseline_lockfiles = collect_lockfiles(baseline_dir)
+ current_lockfiles = collect_lockfiles(current_dir)
+
+ for relative_path in sorted(set(baseline_lockfiles) | set(current_lockfiles)):
+ before = baseline_lockfiles.get(relative_path, set())
+ after = current_lockfiles.get(relative_path, set())
+ for gav in sorted(after - before):
+ changed.append((relative_path, gav))
+ return changed
+
+
+# parse_lockfile helper to read lockfile into group:artifact:version coordinates
+def read_lockfile_lines(path: Path) -> dict[str, str]:
+ """Maps group:artifact:version to the full lockfile line for a given file."""
+ lines: dict[str, str] = {}
+ for raw_line in path.read_text(encoding="utf-8").splitlines():
+ line = raw_line.strip()
+ if not line or line.startswith("#"):
+ continue
+ coordinate = line.split("=", 1)[0]
+ if coordinate.count(":") != 2:
+ continue
+ lines[coordinate] = line
+ return lines
+
+
+# recursively collect all gradle.lockfile paths from given root
+def collect_lockfiles(root: Path) -> dict[str, set[str]]:
+ lockfiles: dict[str, set[str]] = {}
+ if not root.exists():
+ return lockfiles
+ for path in root.rglob("gradle.lockfile"):
+ lockfiles[str(path.relative_to(root))] = parse_lockfile(path)
+ return lockfiles
+
+
+# parse lockfile into a set of group:artifact:version coordinates
+def parse_lockfile(path: Path) -> set[str]:
+ return set(read_lockfile_lines(path))
+
+
+def main() -> int:
+ args = parse_args()
+ if args.command == "select-gradle":
+ return select_gradle_release(args)
+ if args.command == "select-maven":
+ return select_maven_release(args)
+ if args.command == "validate-lockfiles":
+ return validate_lockfiles(args)
+ raise ValueError(f"Unsupported command: {args.command}")
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/.github/scripts/tests/fixtures/gradle-newest-too-new.json b/.github/scripts/tests/fixtures/gradle-newest-too-new.json
new file mode 100644
index 00000000000..3af6eb17acf
--- /dev/null
+++ b/.github/scripts/tests/fixtures/gradle-newest-too-new.json
@@ -0,0 +1,29 @@
+[
+ {
+ "version": "9.5-rc-1",
+ "buildTime": "20260420120000+0000",
+ "snapshot": false,
+ "nightly": false,
+ "releaseNightly": false,
+ "broken": false,
+ "activeRc": true
+ },
+ {
+ "version": "9.4.2",
+ "buildTime": "20260423130000+0000",
+ "snapshot": false,
+ "nightly": false,
+ "releaseNightly": false,
+ "broken": false,
+ "activeRc": false
+ },
+ {
+ "version": "9.4.1",
+ "buildTime": "20260422110000+0000",
+ "snapshot": false,
+ "nightly": false,
+ "releaseNightly": false,
+ "broken": false,
+ "activeRc": false
+ }
+]
diff --git a/.github/scripts/tests/fixtures/gradle-no-eligible.json b/.github/scripts/tests/fixtures/gradle-no-eligible.json
new file mode 100644
index 00000000000..f7e6b9c49e6
--- /dev/null
+++ b/.github/scripts/tests/fixtures/gradle-no-eligible.json
@@ -0,0 +1,20 @@
+[
+ {
+ "version": "9.5-rc-1",
+ "buildTime": "20260420120000+0000",
+ "snapshot": false,
+ "nightly": false,
+ "releaseNightly": false,
+ "broken": false,
+ "activeRc": true
+ },
+ {
+ "version": "9.4.2",
+ "buildTime": "20260423130000+0000",
+ "snapshot": false,
+ "nightly": false,
+ "releaseNightly": false,
+ "broken": false,
+ "activeRc": false
+ }
+]
diff --git a/.github/scripts/tests/fixtures/maven-newest-too-new.json b/.github/scripts/tests/fixtures/maven-newest-too-new.json
new file mode 100644
index 00000000000..7fb9fbf21e0
--- /dev/null
+++ b/.github/scripts/tests/fixtures/maven-newest-too-new.json
@@ -0,0 +1,18 @@
+{
+ "response": {
+ "docs": [
+ {
+ "v": "4.0.0",
+ "timestamp": "2026-04-23T13:00:00Z"
+ },
+ {
+ "v": "4.0.0-beta-3",
+ "timestamp": "2026-04-21T12:00:00Z"
+ },
+ {
+ "v": "3.9.8",
+ "timestamp": "2026-04-20T12:00:00Z"
+ }
+ ]
+ }
+}
diff --git a/.github/scripts/tests/fixtures/surefire-boundary.json b/.github/scripts/tests/fixtures/surefire-boundary.json
new file mode 100644
index 00000000000..d25abb31471
--- /dev/null
+++ b/.github/scripts/tests/fixtures/surefire-boundary.json
@@ -0,0 +1,14 @@
+{
+ "response": {
+ "docs": [
+ {
+ "v": "3.5.6",
+ "timestamp": "2026-04-23T13:00:00Z"
+ },
+ {
+ "v": "3.5.5",
+ "timestamp": "2026-04-22T12:00:00Z"
+ }
+ ]
+ }
+}
diff --git a/.github/scripts/tests/test_dependency_age.py b/.github/scripts/tests/test_dependency_age.py
new file mode 100644
index 00000000000..8c9736ea1e5
--- /dev/null
+++ b/.github/scripts/tests/test_dependency_age.py
@@ -0,0 +1,392 @@
+import json
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+import unittest
+from pathlib import Path
+
+
+REPO_ROOT = Path(__file__).resolve().parents[3]
+SCRIPT = REPO_ROOT / ".github/scripts/dependency_age.py"
+FIXTURES = Path(__file__).resolve().parent / "fixtures"
+NOW = "2026-04-24T12:00:00Z"
+OUTPUT_PATTERN = re.compile(
+ r"^(cutoff_at|found|version|published_at|reason|validated_coordinates|reverted_coordinates)=(.*)$"
+)
+
+
+class DependencyAgeScriptTest(unittest.TestCase):
+ def run_script(self, *args: str, env: dict[str, str] | None = None) -> subprocess.CompletedProcess[str]:
+ process_env = os.environ.copy()
+ if env:
+ process_env.update(env)
+ return subprocess.run(
+ ["python3", str(SCRIPT), *args],
+ cwd=REPO_ROOT,
+ capture_output=True,
+ text=True,
+ check=False,
+ env=process_env,
+ )
+
+ def parse_outputs(self, stdout: str) -> dict[str, str]:
+ outputs: dict[str, str] = {}
+ for line in stdout.splitlines():
+ match = OUTPUT_PATTERN.match(line)
+ if match:
+ outputs[match.group(1)] = match.group(2)
+ return outputs
+
+ def test_selects_previous_gradle_release_when_newest_is_too_new(self) -> None:
+ result = self.run_script(
+ "select-gradle",
+ "--now",
+ NOW,
+ "--versions-file",
+ str(FIXTURES / "gradle-newest-too-new.json"),
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ outputs = self.parse_outputs(result.stdout)
+ self.assertEqual(outputs["version"], "9.4.1")
+ self.assertEqual(outputs["published_at"], "2026-04-22T11:00:00Z")
+ self.assertEqual(outputs["cutoff_at"], "2026-04-22T12:00:00Z")
+
+ def test_reports_when_no_eligible_gradle_release_exists(self) -> None:
+ result = self.run_script(
+ "select-gradle",
+ "--now",
+ NOW,
+ "--versions-file",
+ str(FIXTURES / "gradle-no-eligible.json"),
+ )
+
+ self.assertEqual(result.returncode, 1, result.stdout)
+ outputs = self.parse_outputs(result.stdout)
+ self.assertEqual(outputs["found"], "false")
+ self.assertIn("No eligible stable Gradle release", outputs["reason"])
+
+ def test_selects_previous_maven_release_when_newest_is_too_new(self) -> None:
+ result = self.run_script(
+ "select-maven",
+ "--now",
+ NOW,
+ "--group-id",
+ "org.apache.maven",
+ "--artifact-id",
+ "apache-maven",
+ "--search-response-file",
+ str(FIXTURES / "maven-newest-too-new.json"),
+ "--prerelease-pattern",
+ "alpha",
+ "--prerelease-pattern",
+ "beta",
+ "--prerelease-pattern",
+ "rc",
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ outputs = self.parse_outputs(result.stdout)
+ self.assertEqual(outputs["version"], "3.9.8")
+
+ def test_exact_48_hour_boundary_is_accepted(self) -> None:
+ result = self.run_script(
+ "select-maven",
+ "--now",
+ NOW,
+ "--group-id",
+ "org.apache.maven.plugins",
+ "--artifact-id",
+ "maven-surefire-plugin",
+ "--search-response-file",
+ str(FIXTURES / "surefire-boundary.json"),
+ "--prerelease-pattern",
+ "alpha",
+ "--prerelease-pattern",
+ "beta",
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ outputs = self.parse_outputs(result.stdout)
+ self.assertEqual(outputs["version"], "3.5.5")
+ self.assertEqual(outputs["published_at"], "2026-04-22T12:00:00Z")
+
+ def run_validate_lockfiles(
+ self,
+ *,
+ baseline: dict[str, str],
+ current: dict[str, str],
+ metadata: dict,
+ now: str = NOW,
+ search_url: str | None = None,
+ env: dict[str, str] | None = None,
+ ) -> tuple[subprocess.CompletedProcess[str], Path]:
+ """
+ Runs validate-lockfiles with in-memory lockfile content.
+
+ baseline / current map relative paths (e.g. "module/gradle.lockfile")
+ to the text content of that file. Returns the completed process and
+ the temp dir root so callers can read back modified files.
+ """
+ tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp, True)
+ baseline_dir = Path(tmp) / "before"
+ current_dir = Path(tmp) / "after"
+ metadata_file = Path(tmp) / "metadata.json"
+
+ for rel_path, content in baseline.items():
+ p = baseline_dir / rel_path
+ p.parent.mkdir(parents=True, exist_ok=True)
+ p.write_text(content, encoding="utf-8")
+
+ for rel_path, content in current.items():
+ p = current_dir / rel_path
+ p.parent.mkdir(parents=True, exist_ok=True)
+ p.write_text(content, encoding="utf-8")
+
+ metadata_file.write_text(json.dumps(metadata), encoding="utf-8")
+
+ args = [
+ "validate-lockfiles",
+ "--baseline-dir", str(baseline_dir),
+ "--current-dir", str(current_dir),
+ "--metadata-file", str(metadata_file),
+ "--now", now,
+ ]
+ if search_url:
+ args.extend(["--search-url", search_url])
+ result = self.run_script(*args, env=env)
+ return result, current_dir
+
+ def test_validates_changed_lockfiles_when_all_updates_are_old_enough(self) -> None:
+ lockfile = "module/gradle.lockfile"
+ baseline_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:lib-a:1.0.0=runtimeClasspath",
+ "com.example:lib-b:1.0.0=runtimeClasspath",
+ "",
+ ])
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:lib-a:1.1.0=runtimeClasspath", # valid upgrade
+ "com.example:lib-b:1.1.0=runtimeClasspath", # valid upgrade
+ "",
+ ])
+ metadata = {
+ "com.example:lib-a:1.1.0": "2026-04-20T12:00:00Z", # old enough
+ "com.example:lib-b:1.1.0": "2026-04-20T11:00:00Z", # old enough
+ }
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={"module/gradle.lockfile": baseline_content},
+ current={"module/gradle.lockfile": current_content},
+ metadata=metadata,
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ outputs = self.parse_outputs(result.stdout)
+ self.assertEqual(outputs["validated_coordinates"], "2")
+ self.assertEqual(outputs["reverted_coordinates"], "0")
+ final = (current_dir / lockfile).read_text(encoding="utf-8")
+ self.assertEqual(final, current_content)
+
+ def test_reverts_lockfile_when_any_changed_dependency_is_too_new(self) -> None:
+ lockfile = "module/gradle.lockfile"
+ baseline_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:lib-a:1.0.0=runtimeClasspath",
+ "com.example:lib-b:1.0.0=runtimeClasspath",
+ "",
+ ])
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:lib-a:1.1.0=runtimeClasspath", # valid upgrade
+ "com.example:lib-b:2.0.0=runtimeClasspath", # too new
+ "",
+ ])
+ metadata = {
+ "com.example:lib-a:1.1.0": "2026-04-20T12:00:00Z", # old enough
+ "com.example:lib-b:2.0.0": "2026-04-24T11:00:00Z", # too new (after cutoff 2026-04-22T12:00:00Z)
+ }
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={"module/gradle.lockfile": baseline_content},
+ current={"module/gradle.lockfile": current_content},
+ metadata=metadata,
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ final = (current_dir / lockfile).read_text(encoding="utf-8")
+ self.assertEqual(final, baseline_content)
+
+ def test_reverts_lockfile_when_multiple_versions_coexist_and_one_is_too_new(self) -> None:
+ lockfile = "module/gradle.lockfile"
+ baseline_content = "\n".join([
+ "# Gradle lockfile",
+ "com.typesafe:config:1.3.1=compileClasspath,testCompileClasspath",
+ "com.typesafe:config:1.4.4=runtimeClasspath,testRuntimeClasspath",
+ "",
+ ])
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.typesafe:config:1.3.1=compileClasspath,testCompileClasspath", # unchanged
+ "com.typesafe:config:1.5.0=runtimeClasspath,testRuntimeClasspath", # too new
+ "",
+ ])
+ metadata = {
+ "com.typesafe:config:1.5.0": "2026-04-24T11:00:00Z", # too new
+ }
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={"module/gradle.lockfile": baseline_content},
+ current={"module/gradle.lockfile": current_content},
+ metadata=metadata,
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ final = (current_dir / lockfile).read_text(encoding="utf-8")
+ self.assertEqual(final, baseline_content)
+
+ def test_reverts_lockfile_when_same_group_artifact_has_multiple_invalid_updates(self) -> None:
+ lockfile = "module/gradle.lockfile"
+ baseline_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:lib:1.0.0=compileClasspath",
+ "com.example:lib:2.0.0=runtimeClasspath",
+ "",
+ ])
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:lib:1.1.0=compileClasspath", # too new, should revert to 1.0.0
+ "com.example:lib:2.1.0=runtimeClasspath", # too new, should revert to 2.0.0
+ "",
+ ])
+ metadata = {
+ "com.example:lib:1.1.0": "2026-04-24T11:00:00Z", # too new
+ "com.example:lib:2.1.0": "2026-04-24T11:00:00Z", # too new
+ }
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={"module/gradle.lockfile": baseline_content},
+ current={"module/gradle.lockfile": current_content},
+ metadata=metadata,
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ final = (current_dir / lockfile).read_text(encoding="utf-8")
+ self.assertEqual(final, baseline_content)
+
+ def test_removes_brand_new_dependency_that_is_too_new(self) -> None:
+ lockfile = "module/gradle.lockfile"
+ baseline_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:existing:1.0.0=runtimeClasspath",
+ "",
+ ])
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:existing:1.0.0=runtimeClasspath",
+ "com.example:brand-new:1.0.0=runtimeClasspath", # new transitive dep, too new
+ "",
+ ])
+ metadata = {
+ "com.example:brand-new:1.0.0": "2026-04-24T11:00:00Z", # too new
+ }
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={"module/gradle.lockfile": baseline_content},
+ current={"module/gradle.lockfile": current_content},
+ metadata=metadata,
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ final = (current_dir / lockfile).read_text(encoding="utf-8")
+ self.assertEqual(final, baseline_content)
+
+ def test_reverts_lockfile_when_valid_upgrade_introduces_too_new_brand_new_dependency(self) -> None:
+ lockfile = "module/gradle.lockfile"
+ baseline_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:existing:1.0.0=runtimeClasspath",
+ "",
+ ])
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:existing:1.1.0=runtimeClasspath", # old enough on its own
+ "com.example:brand-new:1.0.0=runtimeClasspath", # too new, no predecessor
+ "",
+ ])
+ metadata = {
+ "com.example:existing:1.1.0": "2026-04-20T11:00:00Z",
+ "com.example:brand-new:1.0.0": "2026-04-24T11:00:00Z",
+ }
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={"module/gradle.lockfile": baseline_content},
+ current={"module/gradle.lockfile": current_content},
+ metadata=metadata,
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ final = (current_dir / lockfile).read_text(encoding="utf-8")
+ self.assertEqual(final, baseline_content)
+
+ def test_removes_new_lockfile_when_it_has_no_baseline_copy(self) -> None:
+ lockfile = "module/gradle.lockfile"
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.example:brand-new:1.0.0=runtimeClasspath",
+ "",
+ ])
+ metadata = {
+ "com.example:brand-new:1.0.0": "2026-04-24T11:00:00Z",
+ }
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={},
+ current={"module/gradle.lockfile": current_content},
+ metadata=metadata,
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ self.assertFalse((current_dir / lockfile).exists())
+
+ def test_resolves_akka_publish_time_from_repository_fallback(self) -> None:
+ tmp = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, tmp, True)
+ repo_root = Path(tmp) / "maven"
+ artifact_path = repo_root / "com/typesafe/akka/akka-http_2.13/10.7.4/akka-http_2.13-10.7.4.pom"
+ artifact_path.parent.mkdir(parents=True, exist_ok=True)
+ artifact_path.write_text("", encoding="utf-8")
+ publish_timestamp = 1_776_681_000 # 2026-04-20T11:10:00Z
+ os.utime(artifact_path, (publish_timestamp, publish_timestamp))
+ lockfile = "module/gradle.lockfile"
+ baseline_content = "\n".join([
+ "# Gradle lockfile",
+ "com.typesafe.akka:akka-http_2.13:10.6.0=latestDepTestRuntimeClasspath",
+ "",
+ ])
+ current_content = "\n".join([
+ "# Gradle lockfile",
+ "com.typesafe.akka:akka-http_2.13:10.7.4=latestDepTestRuntimeClasspath",
+ "",
+ ])
+
+ result, current_dir = self.run_validate_lockfiles(
+ baseline={"module/gradle.lockfile": baseline_content},
+ current={"module/gradle.lockfile": current_content},
+ metadata={},
+ search_url=(Path(tmp) / "missing-search-endpoint").as_uri(),
+ env={"DEPENDENCY_AGE_AKKA_REPOSITORY_URL": repo_root.as_uri()},
+ )
+
+ self.assertEqual(result.returncode, 0, result.stderr)
+ final = (current_dir / lockfile).read_text(encoding="utf-8")
+ self.assertEqual(final, current_content)
+ self.assertIn("Verified com.typesafe.akka:akka-http_2.13:10.7.4", result.stdout)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/.github/workflows/update-gradle-dependencies.yaml b/.github/workflows/update-gradle-dependencies.yaml
index ef2ff1e301b..17b20cf5087 100644
--- a/.github/workflows/update-gradle-dependencies.yaml
+++ b/.github/workflows/update-gradle-dependencies.yaml
@@ -8,6 +8,8 @@ jobs:
update-gradle-dependencies:
runs-on: ubuntu-latest
name: Update Gradle dependencies
+ env:
+ MIN_DEPENDENCY_AGE_HOURS: 48
permissions:
contents: read
id-token: write # Required for OIDC token federation
@@ -41,6 +43,11 @@ jobs:
echo "core_branch=ci/update-gradle-dependencies-${DATE}" >> $GITHUB_OUTPUT
echo "instrumentation_branch=ci/update-gradle-dependencies-instrumentation-${DATE}" >> $GITHUB_OUTPUT
+ - name: Snapshot current Gradle lock files
+ run: |
+ mkdir -p /tmp/gradle-lockfiles-before
+ find . -name 'gradle.lockfile' -exec cp --parents {} /tmp/gradle-lockfiles-before/ \;
+
- name: Update Gradle dependencies
env:
ORG_GRADLE_PROJECT_akkaRepositoryToken: ${{ secrets.AKKA_REPO_TOKEN }}
@@ -49,6 +56,15 @@ jobs:
GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms2G -Xmx3G'" \
./gradlew resolveAndLockAll --write-locks --parallel --stacktrace --no-daemon --max-workers=4
+ - name: Validate changed lock files meet dependency age policy
+ id: validate-lockfiles
+ run: |
+ python3 .github/scripts/dependency_age.py validate-lockfiles \
+ --baseline-dir /tmp/gradle-lockfiles-before \
+ --current-dir . \
+ --min-age-hours "${MIN_DEPENDENCY_AGE_HOURS}" \
+ --github-output "$GITHUB_OUTPUT"
+
- name: Save instrumentation lock files
run: |
mkdir -p /tmp/instrumentation-lockfiles
@@ -100,10 +116,11 @@ jobs:
# What Does This Do
This PR updates the Gradle dependency locks for common and product modules.
+ Dependency resolution was performed through delayed proxies, and changed dependencies were validated to be at least ${{ env.MIN_DEPENDENCY_AGE_HOURS }} hours old.
# Motivation
- Refresh Gradle dependencies to make sure to apply the latest version available when bumping dependencies.
+ Refresh Gradle dependencies while enforcing the ${{ env.MIN_DEPENDENCY_AGE_HOURS }}-hour dependency age policy for new lockfile selections.
# Contributor Checklist
@@ -160,10 +177,11 @@ jobs:
# What Does This Do
This PR updates the Gradle dependency locks for instrumentations and their tests.
+ Dependency resolution was performed through delayed proxies, and changed dependencies were validated to be at least ${{ env.MIN_DEPENDENCY_AGE_HOURS }} hours old.
# Motivation
- Refresh Gradle dependencies to make sure to test latest versions of dependencies within their supported versions.
+ Refresh Gradle dependencies while enforcing the ${{ env.MIN_DEPENDENCY_AGE_HOURS }}-hour dependency age policy for newly selected versions.
# Contributor Checklist
diff --git a/.github/workflows/update-smoke-test-latest-versions.yaml b/.github/workflows/update-smoke-test-latest-versions.yaml
index 2fd41117634..f3e19d58150 100644
--- a/.github/workflows/update-smoke-test-latest-versions.yaml
+++ b/.github/workflows/update-smoke-test-latest-versions.yaml
@@ -8,6 +8,8 @@ jobs:
update-smoke-test-latest-versions:
runs-on: ubuntu-latest
name: Update smoke test latest versions
+ env:
+ MIN_DEPENDENCY_AGE_HOURS: 48
permissions:
contents: read
id-token: write # Required for OIDC token federation
@@ -27,52 +29,35 @@ jobs:
DATE=$(date +'%Y%m%d')
echo "branch=ci/update-smoke-test-latest-versions-${DATE}" >> "$GITHUB_OUTPUT"
- - name: Fetch latest Gradle version
+ - name: Resolve latest eligible Gradle version
id: gradle
run: |
- VERSION=$(curl -sf https://services.gradle.org/versions/current | jq -r '.version')
- if [ -z "$VERSION" ] || [ "$VERSION" = "null" ]; then
- echo "::error::Failed to fetch latest Gradle version"
- exit 1
- fi
- echo "version=$VERSION" >> "$GITHUB_OUTPUT"
- echo "Latest Gradle version: $VERSION"
+ python3 .github/scripts/dependency_age.py select-gradle \
+ --min-age-hours "${MIN_DEPENDENCY_AGE_HOURS}" \
+ --github-output "$GITHUB_OUTPUT"
- - name: Fetch latest stable Maven version
+ - name: Resolve latest eligible stable Maven version
id: maven
run: |
- METADATA=$(curl -sf https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/maven-metadata.xml)
- # Get all versions, filter out alpha/beta/rc, take the latest
- VERSION=$(echo "$METADATA" \
- | grep -o '[^<]*' \
- | sed 's/<[^>]*>//g' \
- | grep -v -E '(alpha|beta|rc)' \
- | sort -V \
- | tail -1)
- if [ -z "$VERSION" ]; then
- echo "::error::Failed to fetch latest stable Maven version"
- exit 1
- fi
- echo "version=$VERSION" >> "$GITHUB_OUTPUT"
- echo "Latest stable Maven version: $VERSION"
-
- - name: Fetch latest stable Maven Surefire version
+ python3 .github/scripts/dependency_age.py select-maven \
+ --group-id org.apache.maven \
+ --artifact-id apache-maven \
+ --prerelease-pattern alpha \
+ --prerelease-pattern beta \
+ --prerelease-pattern rc \
+ --min-age-hours "${MIN_DEPENDENCY_AGE_HOURS}" \
+ --github-output "$GITHUB_OUTPUT"
+
+ - name: Resolve latest eligible stable Maven Surefire version
id: surefire
run: |
- METADATA=$(curl -sf https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-surefire-plugin/maven-metadata.xml)
- # Get all versions, filter out alpha/beta, take the latest
- VERSION=$(echo "$METADATA" \
- | grep -o '[^<]*' \
- | sed 's/<[^>]*>//g' \
- | grep -v -E '(alpha|beta)' \
- | sort -V \
- | tail -1)
- if [ -z "$VERSION" ]; then
- echo "::error::Failed to fetch latest stable Maven Surefire version"
- exit 1
- fi
- echo "version=$VERSION" >> "$GITHUB_OUTPUT"
- echo "Latest stable Maven Surefire version: $VERSION"
+ python3 .github/scripts/dependency_age.py select-maven \
+ --group-id org.apache.maven.plugins \
+ --artifact-id maven-surefire-plugin \
+ --prerelease-pattern alpha \
+ --prerelease-pattern beta \
+ --min-age-hours "${MIN_DEPENDENCY_AGE_HOURS}" \
+ --github-output "$GITHUB_OUTPUT"
- name: Update properties files
env:
@@ -80,19 +65,20 @@ jobs:
MAVEN_VERSION: ${{ steps.maven.outputs.version }}
SUREFIRE_VERSION: ${{ steps.surefire.outputs.version }}
run: |
- echo "Writing resolved versions to properties files:"
- echo " Gradle: ${GRADLE_VERSION}"
- echo " Maven: ${MAVEN_VERSION}"
- echo " Maven Surefire: ${SUREFIRE_VERSION}"
+ echo "Writing latest eligible stable versions (>=${MIN_DEPENDENCY_AGE_HOURS}h old) to properties files:"
+ echo " Gradle: ${GRADLE_VERSION} (published ${{ steps.gradle.outputs.published_at }})"
+ echo " Maven: ${MAVEN_VERSION} (published ${{ steps.maven.outputs.published_at }})"
+ echo " Maven Surefire: ${SUREFIRE_VERSION} (published ${{ steps.surefire.outputs.published_at }})"
+ echo " Eligibility cutoff: ${{ steps.gradle.outputs.cutoff_at }}"
printf '%s\n' \
- "# Pinned \"latest\" versions for CI Visibility Gradle smoke tests." \
+ "# Pinned latest eligible stable versions (>=${MIN_DEPENDENCY_AGE_HOURS}h old) for CI Visibility Gradle smoke tests." \
"# Updated automatically by the update-smoke-test-latest-versions workflow." \
"gradle.version=${GRADLE_VERSION}" \
> dd-smoke-tests/gradle/src/test/resources/latest-tool-versions.properties
printf '%s\n' \
- "# Pinned \"latest\" versions for CI Visibility Maven smoke tests." \
+ "# Pinned latest eligible stable versions (>=${MIN_DEPENDENCY_AGE_HOURS}h old) for CI Visibility Maven smoke tests." \
"# Updated automatically by the update-smoke-test-latest-versions workflow." \
"maven.version=${MAVEN_VERSION}" \
"maven-surefire.version=${SUREFIRE_VERSION}" \
@@ -102,7 +88,7 @@ jobs:
id: check-changes
run: |
if [[ -z "$(git status -s)" ]]; then
- echo "No changes detected — pinned versions are already up to date."
+ echo "No changes detected — pinned versions are already on the latest eligible stable releases."
echo "has_changes=false" >> "$GITHUB_OUTPUT"
else
echo "Changes detected in the following files:"
@@ -152,14 +138,17 @@ jobs:
--body "$(cat <<'EOF'
# What Does This Do
- This PR updates the pinned "latest" tool versions used by CI Visibility smoke tests:
- - Gradle: ${{ steps.gradle.outputs.version }}
- - Maven: ${{ steps.maven.outputs.version }}
- - Maven Surefire: ${{ steps.surefire.outputs.version }}
+ This PR updates the pinned latest eligible stable tool versions used by CI Visibility smoke tests.
+ Only releases at least ${{ env.MIN_DEPENDENCY_AGE_HOURS }} hours old are eligible.
+
+ - Gradle: ${{ steps.gradle.outputs.version }} (published ${{ steps.gradle.outputs.published_at }})
+ - Maven: ${{ steps.maven.outputs.version }} (published ${{ steps.maven.outputs.published_at }})
+ - Maven Surefire: ${{ steps.surefire.outputs.version }} (published ${{ steps.surefire.outputs.published_at }})
+ - Eligibility cutoff: ${{ steps.gradle.outputs.cutoff_at }}
# Motivation
- Keep smoke tests running against the latest stable versions of build tools.
+ Keep smoke tests running against the latest eligible stable versions of build tools without adopting releases in their first 48 hours.
# Contributor Checklist