diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index d4c1d05a1..a649c8af1 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -178,6 +178,7 @@ jobs: --platform linux \ --labels "${STEPS_GET_LABELS_OUTPUTS_LABELS}" \ --max-shards 2 \ + --event ${{ github.event_name }} \ ${{ (steps.check-pythonbuild.outputs.changed == 'true' || github.ref == 'refs/heads/main') && '--force-crate-build' || '' }} \ --free-runners \ > matrix.json diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index ab97c1cd0..519f3c6b5 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -92,7 +92,11 @@ jobs: - name: Generate build matrix id: set-matrix run: | +<<<<<<< HEAD uv run ci-matrix.py --platform darwin --labels "${STEPS_GET_LABELS_OUTPUTS_LABELS}" ${{ (steps.check-pythonbuild.outputs.changed == 'true' || github.ref == 'refs/heads/main') && '--force-crate-build' || '' }} --free-runners > matrix.json +======= + uv run ci-matrix.py --platform darwin --labels "${STEPS_GET_LABELS_OUTPUTS_LABELS}" --event ${{ github.event_name }} ${{ (steps.check-pythonbuild.outputs.changed == 'true' || github.ref == 'refs/heads/main') && '--force-crate-build' || '' }} > matrix.json +>>>>>>> refs/tags/20260504 # Extract python-build matrix echo "matrix=$(jq -c '."python-build"' matrix.json)" >> $GITHUB_OUTPUT diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index fbf633419..9cdb6ade5 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -92,7 +92,11 @@ jobs: - name: Generate build matrix id: set-matrix run: | +<<<<<<< HEAD uv run ci-matrix.py --platform windows --labels "${STEPS_GET_LABELS_OUTPUTS_LABELS}" ${{ (steps.check-pythonbuild.outputs.changed == 'true' || github.ref == 'refs/heads/main') && '--force-crate-build' || '' }} --free-runners > matrix.json +======= + uv run ci-matrix.py --platform windows --labels "${STEPS_GET_LABELS_OUTPUTS_LABELS}" --event ${{ github.event_name }} ${{ (steps.check-pythonbuild.outputs.changed == 'true' || github.ref == 'refs/heads/main') && '--force-crate-build' || '' }} > matrix.json +>>>>>>> refs/tags/20260504 # Extract python-build matrix echo "matrix=$(jq -c '."python-build"' matrix.json)" >> $GITHUB_OUTPUT diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index c7e0193b5..4a9716cb3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -8,28 +8,41 @@ Building distributions See the [documentation](https://gregoryszorc.com/docs/python-build-standalone/main/building.html) for instructions on building distributions locally. -CI labels -========= -By default, submitting a pull request triggers a complete build of all -distributions in CI, which can be time-consuming. +Pull request labels +=================== +By default, pull requests build a small subset of targets defined in +``ci-defaults.yaml`` under ``pull_request``. Pushes to ``main`` build the full +matrix from ``ci-targets.yaml``. -To conserve CI resources and reduce build times, you can limit the matrix of -distributions built by applying specific labels to your pull request. Only -distributions matching the specified labels will be built. +Pull request labels can be used to change what CI builds: -The following label prefixes can be used to customize the build matrix: +* ``platform:`` filters the selected targets by platform. +* ``arch:`` filters the selected targets by architecture. +* ``libc:`` filters the selected targets by libc. +* ``python:`` filters the selected Python versions. +* ``build:`` filters the selected build options by component. -* `platform` -* `python` -* `build` -* `arch` -* `libc` +The ``:all`` labels expand only their own dimension: -To bypass CI entirely for changes that do not affect the build (such as -documentation updates), use the `ci:skip` label. +* ``platform:all`` expands the selected platforms. +* ``arch:all`` expands the selected architectures. +* ``libc:all`` expands the selected libc variants. +* ``python:all`` expands the selected Python versions. +* ``build:all`` expands the selected build options. -Please utilize these tags when appropriate for your changes to minimize CI -resource consumption. +Use ``ci:all-targets`` to build the full matrix from ``ci-targets.yaml``. + +Examples: + +* ``platform:linux`` builds only the Linux targets from ``ci-defaults.yaml``. +* ``python:3.13`` builds the default targets with Python 3.13. +* ``build:pgo`` builds the selected targets whose build options include ``pgo``. +* ``platform:linux,arch:all,libc:all,python:all,build:all`` builds the full + Linux matrix. + +To bypass CI entirely for changes that do not affect the build, use the +``ci:skip`` label. The ``documentation`` label is treated the same way. To run +a dry-run build matrix, use ``ci:dry-run``. Releases ======== diff --git a/ci-defaults.yaml b/ci-defaults.yaml new file mode 100644 index 000000000..56f612675 --- /dev/null +++ b/ci-defaults.yaml @@ -0,0 +1,28 @@ +# Describes the default targets that CI will build for different events. + +pull_request: + python_version: "3.14" + + targets: + x86_64-pc-windows-msvc: + build_options: + - pgo + + aarch64-apple-darwin: + build_options: + - pgo+lto + + x86_64-unknown-linux-gnu: + build_options: + - pgo+lto + - freethreaded+pgo+lto + + x86_64-unknown-linux-musl: + build_options: + - lto + - lto+static + - freethreaded+lto + + armv7-unknown-linux-gnueabihf: + build_options: + - lto diff --git a/ci-matrix.py b/ci-matrix.py index e4b1f3ba8..247f84dd7 100644 --- a/ci-matrix.py +++ b/ci-matrix.py @@ -16,6 +16,7 @@ CI_TARGETS_YAML = "ci-targets.yaml" CI_RUNNERS_YAML = "ci-runners.yaml" +CI_DEFAULTS_YAML = "ci-defaults.yaml" CI_EXTRA_SKIP_LABELS = ["documentation"] CI_MATRIX_SIZE_LIMIT = 256 # The maximum size of a matrix in GitHub Actions @@ -40,7 +41,7 @@ def meets_conditional_version(version: str, min_version: str) -> bool: def parse_labels(labels: str | None) -> dict[str, set[str]]: - """Parse labels into a dict of category filters.""" + """Parse labels into a dict of category -> set of values.""" if not labels: return {} @@ -75,30 +76,157 @@ def parse_labels(labels: str | None) -> dict[str, set[str]]: return result -def should_include_entry(entry: dict[str, str], filters: dict[str, set[str]]) -> bool: - """Check if an entry satisfies the label filters.""" - if filters.get("directives") and "skip" in filters["directives"]: - return False +def get_all_build_options(ci_config: dict[str, Any], target_triple: str) -> list[str]: + """Get all build options (including conditional) for a target from ci-targets.yaml.""" + for platform_config in ci_config.values(): + if target_triple in platform_config: + config = platform_config[target_triple] + options = list(config["build_options"]) + for conditional in config.get("build_options_conditional", []): + options.extend(conditional["options"]) + return options + raise KeyError(f"Target triple {target_triple!r} not found in ci-targets.yaml") + + +def find_target_platform(ci_config: dict[str, Any], target_triple: str) -> str: + """Find which platform a target triple belongs to in ci-targets.yaml.""" + for platform, platform_config in ci_config.items(): + if target_triple in platform_config: + return platform + raise KeyError(f"Target triple {target_triple!r} not found in ci-targets.yaml") + + +def expand_default_triples( + ci_config: dict[str, Any], + pull_request_defaults: dict[str, Any], + labels: dict[str, set[str]], +) -> set[str]: + """Compute the set of allowed target triples for a pull request. + + Starts from the explicit defaults in ci-defaults.yaml. When a target + dimension has an :all label, that dimension is relaxed and additional + triples from ci-targets.yaml that match on the non-expanded dimensions + are included. + """ + default_triples = set(pull_request_defaults["targets"]) + + platform_labels = labels.get("platform", set()) + arch_labels = labels.get("arch", set()) + libc_labels = labels.get("libc", set()) + + platform_filters = platform_labels - {"all"} + arch_filters = arch_labels - {"all"} + libc_filters = libc_labels - {"all"} + + expand_platform = "all" in platform_labels or bool(platform_filters) + expand_arch = "all" in arch_labels or bool(arch_filters) + expand_libc = "all" in libc_labels or bool(libc_filters) + + if not (expand_platform or expand_arch or expand_libc): + return default_triples + + # Build reference tuples from the default triples. + default_attrs = [] + for triple in default_triples: + platform = find_target_platform(ci_config, triple) + config = ci_config[platform][triple] + default_attrs.append( + ( + platform, + config["arch"], + config.get("arch_variant"), + config.get("libc"), + ) + ) - if filters.get("platform") and entry["platform"] not in filters["platform"]: + # Include any triple whose non-expanded dimensions match a default. + allowed = set(default_triples) + for platform, platform_config in ci_config.items(): + for triple, config in platform_config.items(): + for d_platform, d_arch, d_arch_variant, d_libc in default_attrs: + if platform_filters: + if platform not in platform_filters: + continue + elif "all" not in platform_labels and platform != d_platform: + continue + + if arch_filters: + if config["arch"] not in arch_filters: + continue + if config.get("arch_variant") != d_arch_variant: + continue + elif "all" not in arch_labels and ( + config["arch"] != d_arch + or config.get("arch_variant") != d_arch_variant + ): + continue + + if libc_filters: + if config.get("libc") not in libc_filters: + continue + elif "all" not in libc_labels and config.get("libc") != d_libc: + continue + allowed.add(triple) + break + + return allowed + + +def should_include_entry( + entry: dict[str, str], + labels: dict[str, set[str]], + pull_request_defaults: dict[str, Any] | None = None, + allowed_triples: set[str] | None = None, +) -> bool: + """Check if a matrix entry should be included. + + For pull requests, entries are restricted to the allowed target set + (computed by expand_default_triples), the default python version, and + the curated build options — unless overridden by labels. For pushes + (pull_request_defaults is None), only label filters apply. + """ + if pull_request_defaults is not None: + triple = entry["target_triple"] + default_targets = pull_request_defaults["targets"] + + # Target must be in the allowed set. + if allowed_triples is not None and triple not in allowed_triples: + return False + + # Python: restrict to default version unless python labels override. + if not labels.get("python"): + if entry["python"] != pull_request_defaults["python_version"]: + return False + + # Build options: restrict to curated defaults for default triples + # unless build labels override. Non-default triples (brought in by + # :all expansion) are unrestricted. + if not labels.get("build") and triple in default_targets: + if entry["build_options"] not in default_targets[triple]["build_options"]: + return False + + # Label filters + platform_filters = labels.get("platform", set()) - {"all"} + if platform_filters and entry["platform"] not in platform_filters: return False - if filters.get("python") and entry["python"] not in filters["python"]: + python_filters = labels.get("python", set()) - {"all"} + if python_filters and entry["python"] not in python_filters: return False - if filters.get("arch") and entry["arch"] not in filters["arch"]: + arch_filters = labels.get("arch", set()) - {"all"} + if arch_filters and entry["arch"] not in arch_filters: return False - if ( - filters.get("libc") - and entry.get("libc") - and entry["libc"] not in filters["libc"] - ): + libc_filters = labels.get("libc", set()) - {"all"} + if libc_filters and entry.get("libc") and entry["libc"] not in libc_filters: return False - if filters.get("build"): - build_options = set(entry.get("build_options", "").split("+")) - if not all(f in build_options for f in filters["build"]): + build_filters = labels.get("build", set()) - {"all"} + if build_filters: + build_components = set(entry.get("build_options", "").split("+")) + required = {c for f in build_filters for c in f.split("+")} + if not required.issubset(build_components): return False return True @@ -106,17 +234,32 @@ def should_include_entry(entry: dict[str, str], filters: dict[str, set[str]]) -> def generate_docker_matrix_entries( runners: dict[str, Any], + python_entries: list[dict[str, str]], platform_filter: str | None = None, free_runners: bool = False, ) -> list[dict[str, str]]: - """Generate matrix entries for docker image builds.""" + """Generate matrix entries for Docker image builds.""" if platform_filter and platform_filter != "linux": return [] + needed_archs = { + runners[entry["runner"]]["arch"] + for entry in python_entries + if entry.get("platform") == "linux" + } + matrix_entries = [] for image in DOCKER_BUILD_IMAGES: +<<<<<<< HEAD # Find appropriate runner for Linux platform with the specified architecture runner = find_runner(runners, "linux", image["arch"], free_runners) +======= + if image["arch"] not in needed_archs: + continue + + # Find appropriate runner for Linux platform with the specified architecture. + runner = find_runner(runners, "linux", image["arch"], False) +>>>>>>> refs/tags/20260504 entry = { "name": image["name"], @@ -203,14 +346,6 @@ def generate_python_build_matrix_entries( free_runners, ) - # Apply label filters if present - if label_filters: - matrix_entries = [ - entry - for entry in matrix_entries - if should_include_entry(entry, label_filters) - ] - return matrix_entries @@ -327,6 +462,50 @@ def add_python_build_entries_for_config( matrix_entries.append(entry) +def validate_pull_request_defaults( + ci_config: dict[str, Any], pull_request_defaults: dict[str, Any] +) -> None: + """Validate the pull_request defaults in ci-defaults.yaml.""" + all_triples = set() + for platform_config in ci_config.values(): + all_triples.update(platform_config.keys()) + + for triple in pull_request_defaults["targets"]: + if triple not in all_triples: + print( + f"error: target triple {triple!r} in {CI_DEFAULTS_YAML}:pull_request " + f"not found in {CI_TARGETS_YAML}", + file=sys.stderr, + ) + sys.exit(1) + + # Validate that each build option listed is valid for the target. + all_options = set(get_all_build_options(ci_config, triple)) + for option in pull_request_defaults["targets"][triple]["build_options"]: + if option not in all_options: + print( + f"error: build option {option!r} for {triple} in " + f"{CI_DEFAULTS_YAML}:pull_request not found in {CI_TARGETS_YAML} " + f"(valid: {sorted(all_options)})", + file=sys.stderr, + ) + sys.exit(1) + + # Validate that the default python version exists in ci-targets.yaml. + default_version = pull_request_defaults["python_version"] + for triple in pull_request_defaults["targets"]: + platform = find_target_platform(ci_config, triple) + ci_versions = ci_config[platform][triple]["python_versions"] + if default_version not in ci_versions: + print( + f"error: python version {default_version!r} in " + f"{CI_DEFAULTS_YAML}:pull_request not available for {triple} in " + f"{CI_TARGETS_YAML} (valid: {ci_versions})", + file=sys.stderr, + ) + sys.exit(1) + + def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description="Generate a JSON matrix for building distributions in CI" @@ -344,7 +523,12 @@ def parse_args() -> argparse.Namespace: ) parser.add_argument( "--labels", - help="Comma-separated list of labels to filter by (e.g., 'platform:darwin,python:3.13,build:debug'), all must match.", + help="Comma-separated list of labels to filter by (e.g., 'platform:linux,python:3.13')", + ) + parser.add_argument( + "--event", + choices=["pull_request", "push"], + help="The GitHub event type. When 'pull_request', uses ci-defaults.yaml for the default subset.", ) parser.add_argument( "--free-runners", @@ -370,11 +554,14 @@ def main() -> None: labels = parse_labels(args.labels) with open(CI_TARGETS_YAML) as f: - config = yaml.safe_load(f) + ci_config = yaml.safe_load(f) with open(CI_RUNNERS_YAML) as f: runners = yaml.safe_load(f) + with open(CI_DEFAULTS_YAML) as f: + ci_defaults = yaml.safe_load(f) or {} + # If only free runners are allowed, reduce to a subset if args.free_runners: runners = { @@ -383,16 +570,48 @@ def main() -> None: if runner_config.get("free") } + # Check for skip directive + if labels.get("directives") and "skip" in labels["directives"]: + # Emit empty matrices + result = {} + if args.matrix_type in ["python-build", "all"]: + if args.max_shards: + result["python-build"] = { + str(i): {"include": []} for i in range(args.max_shards) + } + else: + result["python-build"] = {"include": []} + if args.matrix_type in ["docker-build", "all"]: + result["docker-build"] = {"include": []} + if args.matrix_type in ["crate-build", "all"]: + result["crate-build"] = {"include": []} + print(json.dumps(result)) + return + + event_defaults = ci_defaults.get(args.event) if args.event else None + if "all-targets" in labels.get("directives", set()): + event_defaults = None + + allowed_triples = None + if event_defaults is not None: + validate_pull_request_defaults(ci_config, event_defaults) + allowed_triples = expand_default_triples(ci_config, event_defaults, labels) + result = {} - # Generate python build entries + # Generate all python build entries, then filter python_entries = generate_python_build_matrix_entries( - config, + ci_config, runners, args.platform, labels, args.free_runners, ) + python_entries = [ + entry + for entry in python_entries + if should_include_entry(entry, labels, event_defaults, allowed_triples) + ] # Output python-build matrix if requested if args.matrix_type in ["python-build", "all"]: @@ -420,18 +639,19 @@ def main() -> None: result["python-build"] = {"include": python_entries} # Generate docker-build matrix if requested - # Only include docker builds if there are Linux python builds + # Only include docker builds if there are Linux python builds. if args.matrix_type in ["docker-build", "all"]: - # Check if we have any Linux python builds + # Check if we have any Linux python builds. has_linux_builds = any( entry.get("platform") == "linux" for entry in python_entries ) - # If no platform filter or explicitly requesting docker-build only, include docker builds - # Otherwise, only include if there are Linux python builds + # If no platform filter or explicitly requesting docker-build only, include docker builds. + # Otherwise, only include if there are Linux python builds. if args.matrix_type == "docker-build" or has_linux_builds: docker_entries = generate_docker_matrix_entries( runners, + python_entries, args.platform, args.free_runners, ) @@ -442,7 +662,7 @@ def main() -> None: crate_entries = generate_crate_build_matrix_entries( python_entries, runners, - config, + ci_config, # Use the full target config so --force-crate-build adds all native crate builds. args.force_crate_build, args.platform, args.free_runners, diff --git a/cpython-unix/build-cpython.sh b/cpython-unix/build-cpython.sh index fdb49c588..377f214f9 100755 --- a/cpython-unix/build-cpython.sh +++ b/cpython-unix/build-cpython.sh @@ -154,12 +154,15 @@ else patch -p1 -i "${ROOT}/patch-macos-link-extension-modules.patch" fi +<<<<<<< HEAD # Also on macOS, the `python` executable is linked against libraries defined by statically # linked modules. But those libraries should only get linked into libpython, not the # executable. This behavior is kinda suspect on all platforms, as it could be adding # library dependencies that shouldn't need to be there. # PYSTANDALONE: skip this patch. +======= +>>>>>>> refs/tags/20260504 # The macOS code for sniffing for _dyld_shared_cache_contains_path falls back on a # possibly inappropriate code path if a configure time check fails. This is not # appropriate for certain cross-compiling scenarios. See discussion at @@ -438,19 +441,12 @@ CONFIGURE_FLAGS=" # Build a libpython3.x.so, but statically link the interpreter against # libpython. -# -# For now skip this on macos, because it causes some linker failures. Note that -# this patch mildly conflicts with the macos-only patch-python-link-modules -# applied above, so you will need to resolve that conflict if you re-enable -# this for macos. -if [[ "${PYBUILD_PLATFORM}" != macos* ]]; then - if [ -n "${PYTHON_MEETS_MINIMUM_VERSION_3_12}" ]; then - patch -p1 -i "${ROOT}/patch-python-configure-add-enable-static-libpython-for-interpreter.patch" - else - patch -p1 -i "${ROOT}/patch-python-configure-add-enable-static-libpython-for-interpreter-${PYTHON_MAJMIN_VERSION}.patch" - fi - CONFIGURE_FLAGS="${CONFIGURE_FLAGS} --enable-static-libpython-for-interpreter" +if [ -n "${PYTHON_MEETS_MINIMUM_VERSION_3_12}" ]; then + patch -p1 -i "${ROOT}/patch-python-configure-add-enable-static-libpython-for-interpreter.patch" +else + patch -p1 -i "${ROOT}/patch-python-configure-add-enable-static-libpython-for-interpreter-${PYTHON_MAJMIN_VERSION}.patch" fi +CONFIGURE_FLAGS="${CONFIGURE_FLAGS} --enable-static-libpython-for-interpreter" if [ "${CC}" = "musl-clang" ]; then # In order to build the _blake2 extension module with SSE3+ instructions, we need @@ -631,12 +627,6 @@ if [[ -n "${PYTHON_MEETS_MINIMUM_VERSION_3_14}" && -n "${CROSS_COMPILING}" && "$ PROFILE_TASK="${PROFILE_TASK} --ignore test_strftime_y2k" fi -# On 3.14+ `test_json.test_recursion.TestCRecursion.test_highly_nested_objects_decoding` fails during -# PGO due to RecursionError not being raised as expected. See https://github.com/python/cpython/issues/140125 -if [[ -n "${PYTHON_MEETS_MINIMUM_VERSION_3_14}" ]]; then - PROFILE_TASK="${PROFILE_TASK} --ignore test_json" -fi - # PGO optimized / BOLT instrumented binaries segfault in a test_bytes test. Skip it. if [[ -n "${PYTHON_MEETS_MINIMUM_VERSION_3_13}" && "${TARGET_TRIPLE}" == x86_64* ]]; then PROFILE_TASK="${PROFILE_TASK} --ignore test.test_bytes.BytesTest.test_from_format" @@ -1376,6 +1366,13 @@ if [ -d "${TOOLS_PATH}/deps/lib/tcl9" ]; then ) fi +# Prune the tk demos which are > 1 MB and not used +rm -rf "${ROOT}/out/python/install/lib/tk9.0/demos" + +# Prune the tcl/tk translations +rm -rf "${ROOT}/out/python/install/lib/tcl9.0/msgs" +rm -rf "${ROOT}/out/python/install/lib/tk9.0/msgs" + # Copy the terminfo database if present. if [ -d "${TOOLS_PATH}/deps/usr/share/terminfo" ]; then cp -av "${TOOLS_PATH}/deps/usr/share/terminfo" "${ROOT}/out/python/install/share/" diff --git a/cpython-unix/build-patchelf.sh b/cpython-unix/build-patchelf.sh index c21663303..1acf6c559 100755 --- a/cpython-unix/build-patchelf.sh +++ b/cpython-unix/build-patchelf.sh @@ -13,6 +13,37 @@ tar -xf "patchelf-${PATCHELF_VERSION}.tar.bz2" pushd patchelf-0.13.1.20211127.72b6d44 +# TODO: Drop this patch once patchelf is updated to 0.14.0 or newer, +# which includes native LoongArch64 support. +# See: https://github.com/astral-sh/python-build-standalone/issues/1106 +if [[ "${TARGET_TRIPLE}" = loongarch64* ]]; then + patch -p1 << 'EOF' +diff --git a/src/patchelf.cc b/src/patchelf.cc +index 2b7ec8b9..06f41c6f 100644 +--- a/src/patchelf.cc ++++ b/src/patchelf.cc +@@ -57,6 +57,10 @@ static int forcedPageSize = DEFAULT_PAGESIZE; + static int forcedPageSize = -1; + #endif + ++#ifndef EM_LOONGARCH ++#define EM_LOONGARCH 258 ++#endif ++ + using FileContents = std::shared_ptr>; + + #define ElfFileParams class Elf_Ehdr, class Elf_Phdr, class Elf_Shdr, class Elf_Addr, class Elf_Off, class Elf_Dyn, class Elf_Sym, class Elf_Verneed, class Elf_Versym +@@ -460,6 +464,7 @@ unsigned int ElfFile::getPageSize() const + case EM_PPC64: + case EM_AARCH64: + case EM_TILEGX: ++ case EM_LOONGARCH: + return 0x10000; + default: + return 0x1000; +EOF +fi + CC="${HOST_CC}" CXX="${HOST_CXX}" CFLAGS="${EXTRA_HOST_CFLAGS} -fPIC" CPPFLAGS="${EXTRA_HOST_CFLAGS} -fPIC" \ ./configure \ --build="${BUILD_TRIPLE}" \ diff --git a/cpython-unix/patch-jit-llvm-version-3.14.patch b/cpython-unix/patch-jit-llvm-version-3.14.patch index 48d5e63bd..a68bfadd2 100644 --- a/cpython-unix/patch-jit-llvm-version-3.14.patch +++ b/cpython-unix/patch-jit-llvm-version-3.14.patch @@ -1,12 +1,12 @@ diff --git a/Tools/jit/_llvm.py b/Tools/jit/_llvm.py --- a/Tools/jit/_llvm.py +++ b/Tools/jit/_llvm.py -@@ -8,7 +8,7 @@ - import subprocess - import typing +@@ -10,7 +10,7 @@ import typing + + import _targets -_LLVM_VERSION = 19 +_LLVM_VERSION = 22 - _LLVM_VERSION_PATTERN = re.compile(rf"version\s+{_LLVM_VERSION}\.\d+\.\d+\S*\s+") - - _P = typing.ParamSpec("_P") + _LLVM_VERSION_PATTERN = re.compile( + rf"(?platform diff --git a/cpython-unix/patch-python-link-modules-3.11.patch b/cpython-unix/patch-python-link-modules-3.11.patch deleted file mode 100644 index 8bc7aee63..000000000 --- a/cpython-unix/patch-python-link-modules-3.11.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/Makefile.pre.in b/Makefile.pre.in -index b356f6293e..89fddd4d4e 100644 ---- a/Makefile.pre.in -+++ b/Makefile.pre.in -@@ -702,7 +702,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c - - # Build the interpreter - $(BUILDPYTHON): Programs/python.o $(LINK_PYTHON_DEPS) -- $(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(LINK_PYTHON_OBJS) $(LIBS) $(MODLIBS) $(SYSLIBS) -+ $(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(LINK_PYTHON_OBJS) $(LIBS) $(SYSLIBS) - - platform: $(PYTHON_FOR_BUILD_DEPS) pybuilddir.txt - $(RUNSHARED) $(PYTHON_FOR_BUILD) -c 'import sys ; from sysconfig import get_platform ; print("%s-%d.%d" % (get_platform(), *sys.version_info[:2]))' >platform diff --git a/cpython-unix/patch-python-link-modules-3.15.patch b/cpython-unix/patch-python-link-modules-3.15.patch deleted file mode 100644 index 5225bb6f9..000000000 --- a/cpython-unix/patch-python-link-modules-3.15.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/Makefile.pre.in b/Makefile.pre.in -index 120a6add385..4d8abc5256a 100644 ---- a/Makefile.pre.in -+++ b/Makefile.pre.in -@@ -990,7 +990,7 @@ clinic-tests: check-clean-src $(srcdir)/Lib/test/clinic.test.c - - # Build the interpreter - $(BUILDPYTHON): Programs/python.o $(LINK_PYTHON_DEPS) -- $(LINKCC) $(PY_CORE_EXE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(LINK_PYTHON_OBJS) $(LIBS) $(MODLIBS) $(SYSLIBS) -+ $(LINKCC) $(PY_CORE_EXE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(LINK_PYTHON_OBJS) $(LIBS) $(SYSLIBS) - - platform: $(PYTHON_FOR_BUILD_DEPS) pybuilddir.txt - $(RUNSHARED) $(PYTHON_FOR_BUILD) -c 'import sys ; from sysconfig import get_platform ; print("%s-%d.%d" % (get_platform(), *sys.version_info[:2]))' >platform diff --git a/cpython-unix/targets.yml b/cpython-unix/targets.yml index 588c2db79..d0db3e39e 100644 --- a/cpython-unix/targets.yml +++ b/cpython-unix/targets.yml @@ -267,6 +267,7 @@ loongarch64-unknown-linux-gnu: - '3.12' - '3.13' - '3.14' + - '3.15' docker_image_suffix: .cross-loongarch64 host_cc: /usr/bin/x86_64-linux-gnu-gcc host_cxx: /usr/bin/x86_64-linux-gnu-g++ @@ -275,6 +276,7 @@ loongarch64-unknown-linux-gnu: target_ldflags: # Hardening - '-Wl,-z,noexecstack' + - '-Wl,-z,max-page-size=0x10000' needs: - autoconf - bdb diff --git a/pythonbuild/disttests/__init__.py b/pythonbuild/disttests/__init__.py index 7b92f43f2..5062a91d2 100644 --- a/pythonbuild/disttests/__init__.py +++ b/pythonbuild/disttests/__init__.py @@ -424,6 +424,42 @@ def assertPythonWorks(path: Path, argv0: Optional[str] = None): with self.subTest(msg="weird argv[0]"): assertPythonWorks(sys.executable, argv0="/dev/null") + @unittest.skipUnless(sys.platform == "linux", "Linux-specific prctl") + @unittest.skipIf( + "static" in os.environ["BUILD_OPTIONS"], + "cannot import libc on static builds", + ) + def test_nx_thread_creation(self): + "Test that thread creation works under e.g. systemd's MemoryDenyWriteExecute." + # Note that NX cannot be unset so this pollutes the current process, + # but if something else breaks under NX we probably want to know! + import ctypes + import threading + + libc = ctypes.CDLL(None, use_errno=True) + # + PR_SET_MDWE = 65 + PR_GET_MDWE = 66 + PR_MDWE_REFUSE_EXEC_GAIN = 1 << 0 + PR_MDWE_NO_INHERIT = 1 << 1 + mdwe = libc.prctl(PR_GET_MDWE, 0, 0, 0, 0) + if mdwe < 0: + self.skipTest("prctl(PR_SET_MDWE) unsupported") + elif not (mdwe & PR_MDWE_REFUSE_EXEC_GAIN): + if ( + libc.prctl( + PR_SET_MDWE, PR_MDWE_REFUSE_EXEC_GAIN | PR_MDWE_NO_INHERIT, 0, 0, 0 + ) + != 0 + ): + self.fail("prctl(PR_SET_MDWE): " + os.strerror(ctypes.get_errno())) + + a = [] + t = threading.Thread(target=a.append, args=("Thread was here",)) + t.start() + t.join() + self.assertEqual(a, ["Thread was here"]) + if __name__ == "__main__": unittest.main() diff --git a/pythonbuild/downloads.py b/pythonbuild/downloads.py index 8a28180e8..92466247b 100644 --- a/pythonbuild/downloads.py +++ b/pythonbuild/downloads.py @@ -84,10 +84,10 @@ "python_tag": "cp313", }, "cpython-3.14": { - "url": "https://www.python.org/ftp/python/3.14.4/Python-3.14.4.tar.xz", - "size": 23855332, - "sha256": "d923c51303e38e249136fc1bdf3568d56ecb03214efdef48516176d3d7faaef8", - "version": "3.14.4", + "url": "https://www.python.org/ftp/python/3.14.5/Python-3.14.5rc1.tar.xz", + "size": 23894408, + "sha256": "67ee56f36fc22e5ada84d452430362e71081804c4f85c33dc5bf4206c27f973c", + "version": "3.14.5rc1", "licenses": ["Python-2.0", "CNRI-Python"], "license_file": "LICENSE.cpython.txt", "python_tag": "cp314", @@ -279,10 +279,10 @@ "version": "0.13.1", }, "pip": { - "url": "https://files.pythonhosted.org/packages/de/f0/c81e05b613866b76d2d1066490adf1a3dbc4ee9d9c839961c3fc8a6997af/pip-26.0.1-py3-none-any.whl", - "size": 1787723, - "sha256": "bdb1b08f4274833d62c1aa29e20907365a2ceb950410df15fc9521bad440122b", - "version": "26.0.1", + "url": "https://files.pythonhosted.org/packages/70/7a/be4bd8bcbb24ea475856dd68159d78b03b2bb53dae369f69c9606b8888f5/pip-26.1-py3-none-any.whl", + "size": 1812804, + "sha256": "4e8486d821d814b77319acb7b9e8bf5a4ee7590a643e7cb21029f209be8573c1", + "version": "26.1", }, "readline": { # Mirrored from https://ftp.gnu.org/gnu/readline/readline-8.2.tar.gz