diff --git a/etc/spack/defaults/windows/config.yaml b/etc/spack/defaults/windows/config.yaml
index 367bf831cff69f..de173f296559c8 100644
--- a/etc/spack/defaults/windows/config.yaml
+++ b/etc/spack/defaults/windows/config.yaml
@@ -3,3 +3,4 @@ config:
concretizer: clingo
build_stage::
- '$spack/.staging'
+ cmake_ext_build: true
\ No newline at end of file
diff --git a/etc/spack/defaults/windows/packages.yaml b/etc/spack/defaults/windows/packages.yaml
index 863cf7cf182cbd..c72ba8c0330fbd 100644
--- a/etc/spack/defaults/windows/packages.yaml
+++ b/etc/spack/defaults/windows/packages.yaml
@@ -19,3 +19,4 @@ packages:
- msvc
providers:
mpi: [msmpi]
+ gl: [wgl]
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
index 06822c89a99ab9..d7eef9dadaa6ed 100644
--- a/lib/spack/spack/build_systems/cmake.py
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -375,7 +375,12 @@ def build_dirname(self):
@property
def build_directory(self):
"""Full-path to the directory to use when building the package."""
- return os.path.join(self.pkg.stage.path, self.build_dirname)
+ stage_path = (
+ os.path.join(self.pkg.stage.path, self.build_dirname)
+ if not self.pkg.spec.dag_hash(7) in spack.stage.CMakeBuildStage.dispatch
+ else spack.stage.CMakeBuildStage.dispatch[self.pkg.spec.dag_hash(7)]
+ )
+ return stage_path
def cmake_args(self):
"""List of all the arguments that must be passed to cmake, except:
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 81cfebc19eac9d..085d9c0590c086 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -51,6 +51,7 @@ def install_kwargs_from_args(args):
"fail_fast": args.fail_fast,
"keep_prefix": args.keep_prefix,
"keep_stage": args.keep_stage,
+ "cmake_stage": args.cmake_external_stage,
"restage": not args.dont_restage,
"install_source": args.install_source,
"verbose": args.verbose or args.install_verbose,
@@ -114,7 +115,13 @@ def setup_parser(subparser):
action="store_true",
help="if a partial install is detected, don't delete prior state",
)
-
+ subparser.add_argument(
+ "--cmake-external-stage",
+ action="store",
+ dest="cmake_external_stage",
+ help="""Path to root directory where CMake should build relevant
+ projects external to Spack's stage (no impact on non Windows systems)""",
+ )
cache_group = subparser.add_mutually_exclusive_group()
cache_group.add_argument(
"--use-cache",
diff --git a/lib/spack/spack/compilers/msvc.py b/lib/spack/spack/compilers/msvc.py
index 7a8432ecfa0b17..4d448265d175b8 100644
--- a/lib/spack/spack/compilers/msvc.py
+++ b/lib/spack/spack/compilers/msvc.py
@@ -122,7 +122,11 @@ def platform_toolset_ver(self):
@property
def cl_version(self):
"""Cl toolset version"""
- return spack.compiler.get_compiler_version_output(self.cc)
+ return Version(re.search(Msvc.version_regex, spack.compiler.get_compiler_version_output(self.cc, "")).group(1))
+
+ @property
+ def vs_root(self):
+ return os.path.abspath(os.path.join(self.cc, "../../../../../../../.."))
def setup_custom_environment(self, pkg, env):
"""Set environment variables for MSVC using the
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index 1b9800054df32c..386724fb611741 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -45,6 +45,7 @@
from llnl.util.tty.log import log_output
import spack.binary_distribution as binary_distribution
+import spack.build_systems.cmake
import spack.compilers
import spack.error
import spack.hooks
@@ -588,7 +589,10 @@ def log(pkg):
# Check that we are trying to copy things that are
# in the stage tree (not arbitrary files)
abs_expr = os.path.realpath(glob_expr)
- if os.path.realpath(pkg.stage.path) not in abs_expr:
+ if os.path.realpath(pkg.stage.path) not in abs_expr and not (
+ isinstance(pkg.builder, spack.build_systems.cmake.CMakeBuilder)
+ and spack.config.get("config:cmake_ext_build")
+ ):
errors.write("[OUTSIDE SOURCE PATH]: {0}\n".format(glob_expr))
continue
# Now that we are sure that the path is within the correct
@@ -1880,6 +1884,8 @@ def __init__(self, pkg, install_args):
# whether to keep the build stage after installation
self.keep_stage = install_args.get("keep_stage", False)
+ self.cmake_build_stage = install_args.get("cmake_stage", "")
+
# whether to skip the patch phase
self.skip_patch = install_args.get("skip_patch", False)
@@ -1909,6 +1915,9 @@ def run(self):
self.timer.start("stage")
+ if self.cmake_build_stage:
+ self.pkg.cmake_stage_dir = self.cmake_build_stage
+
if not self.fake:
if not self.skip_patch:
self.pkg.do_patch()
@@ -2398,6 +2407,7 @@ def _add_default_args(self):
("package_use_cache", True),
("keep_prefix", False),
("keep_stage", False),
+ ("cmake_stage", ""),
("restage", False),
("skip_patch", False),
("tests", False),
diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py
index 7c6a2d301b71f9..2e5fb4082045f8 100644
--- a/lib/spack/spack/package_base.py
+++ b/lib/spack/spack/package_base.py
@@ -57,7 +57,13 @@
from spack.filesystem_view import YamlFilesystemView
from spack.install_test import TestFailure, TestSuite
from spack.installer import InstallError, PackageInstaller
-from spack.stage import ResourceStage, Stage, StageComposite, stage_prefix
+from spack.stage import (
+ CMakeBuildStage,
+ ResourceStage,
+ Stage,
+ StageComposite,
+ stage_prefix,
+)
from spack.util.executable import ProcessError, which
from spack.util.package_hash import package_hash
from spack.util.prefix import Prefix
@@ -1038,6 +1044,13 @@ def _make_root_stage(self, fetcher):
)
return stage
+ def _make_cmake_build_stage(self):
+ root_stage_name = "{0}{1}-{2}-{3}".format(
+ stage_prefix, self.spec.name, self.spec.version, self.spec.dag_hash()
+ )
+ stage = CMakeBuildStage(self.spec.dag_hash(7), root_stage_name, root=self.cmake_stage_dir)
+ return stage
+
def _make_stage(self):
# If it's a dev package (not transitively), use a DIY stage object
dev_path_var = self.spec.variants.get("dev_path", None)
@@ -1059,6 +1072,17 @@ def _make_stage(self):
# Append the item to the composite
composite_stage.append(stage)
+ # if we're building a CMake package on Windows
+ # and the user set the requsite config option, setup a
+ # custom CMake build stage to relocate the cmake build dir to
+ # add here to take advantage of stage cleanup
+ if (
+ is_windows
+ and self.spec.variants["build_system"].value == "cmake"
+ and spack.config.get("config:cmake_ext_build", False)
+ ):
+ composite_stage.append(self._make_cmake_build_stage())
+
return composite_stage
@property
@@ -1660,6 +1684,16 @@ def content_hash(self, content=None):
return b32_hash
+ @property
+ def cmake_stage_dir(self):
+ if not getattr(self, "_cmake_build_stage", False):
+ return ""
+ return self._cmake_build_stage
+
+ @cmake_stage_dir.setter
+ def cmake_stage_dir(self, val):
+ self._cmake_build_stage = val
+
@property
def cmake_prefix_paths(self):
return [self.prefix]
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index f6ed3c1ef4eebe..094138dda1ddd5 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -14,6 +14,8 @@
import stat
import sys
import tempfile
+import time
+from pathlib import Path
from typing import Dict
import llnl.util.lang
@@ -28,6 +30,7 @@
partition_path,
remove_linked_tree,
)
+from llnl.util.symlink import symlink
import spack.caches
import spack.config
@@ -725,6 +728,191 @@ def _add_to_root_stage(self):
install(src, destination_path)
+class CMakeBuildStage:
+ """Interface abstracting a CMake build tree at a location outside
+ of a Spack stage directory but still managed by Spack.
+
+ CMake's build tree can be located arbitrarily on a filesystem independent
+ from a source directory. This class relocates that build directory out of the
+ stage and to a path under a Users home directory or to a location of a users chosing set
+ either by config or the command line via the --cmake-build-dir cl argument
+ to the install command.
+ Interactions with the stage expecting an in stage build tree will work as normal
+ as this class serves to obfucscate the external stage and allow for all stage behavior
+ to perform as normal.
+ The external build tree is given the same lifespan as its stage dir, is spun up when the stage
+ spins up, and destroyed when the stage is destroyed. After the build and installation are done,
+ the build tree is first relocated to what would be its proper place in the stage.
+ and a symlink is placed in place of the build tree pointing at the external directory
+
+ Note: This is not, nor should it be, used on *nix platforms and is intended as a solution to
+ reduce file path lengths on Windows during compilation/linking. This class should be removed
+ when MSVC fully supports the LongPath feature on Windows.
+ """
+
+ dispatch: Dict[str, str] = {}
+
+ def __init__(self, hash, name, root=None, keep=False):
+ # Users can override external cmake build dir, default is %USERPROFILE%
+ # overrides can come from command line or config, command line will override all
+ self._hash = hash
+ self._path = Path(get_stage_root(), name)
+ self._remote_stage = None
+ self.keep = keep
+ if not root:
+ fallback_path = Path(os.environ["USERPROFILE"], ".sp-stage")
+ self._root = Path(spack.config.get("config:cmake_ext_build_stage_dir", fallback_path))
+
+ def __enter__(self):
+ self.create()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.destroy()
+
+ def _establish_context_link(self):
+ symlink(str(self._remote_stage), str(self._path / ("spack-build-%s" % self._hash)))
+
+ def _remove_context_link(self):
+ Path(self._path, "spack-build-%s" % self._hash).unlink()
+
+ def _rebuild_remote_stage(self):
+ full_subdir = Path(CMakeBuildStage.dispatch[self._hash])
+ if not full_subdir.exists():
+ try:
+ full_subdir.mkdir()
+ except FileExistsError:
+ full_subdir = self._setup_remote_build_stage()
+ return full_subdir
+
+ def _setup_remote_build_stage(self):
+ # try to create root if it doesn't exist
+ self._root.mkdir(parents=True, exist_ok=True)
+ sub_dir = self._compute_next_open_subdir()
+ full_subdir = self._root / sub_dir
+ try:
+ full_subdir.mkdir()
+ except FileExistsError:
+ # another process must have created the same directory as us
+ # try again
+ return self._setup_remote_build_stage()
+ CMakeBuildStage.dispatch[self._hash] = str(full_subdir)
+ return full_subdir
+
+ def _compute_next_open_subdir(self, last=None):
+ def inc(c):
+ curr = ord(c[-1])
+ over = curr // 122
+ if over:
+ return (inc(c[:-1]) if len(c[:-1]) else "a") + "a"
+ return c[:-1] + chr(curr + 1)
+
+ sort_key = lambda x: (len(x.name), x.name)
+ current_ext_stages = list(Path(self._root).iterdir())
+ if not current_ext_stages:
+ # no currently extant external stages, start enumerating with 'a'
+ return "a"
+ last = sorted(current_ext_stages, key=sort_key)[-1]
+ return inc(last.name)
+
+ def _return_to_stage(self):
+ """Copy external build tree back to stage in normal CMake build dir location"""
+ dest = Path(self._path, "spack-build-%s" % self._hash)
+ install_tree(str(self._remote_stage), str(dest))
+
+ def _teardown_remote_stage(self):
+ """Destroy external build tree if not keep-stage
+ Otherwise this is kept as usual"""
+
+ def teardown(pth: Path):
+ for sub_item in pth.iterdir():
+ if sub_item.is_dir():
+ teardown(sub_item)
+ else:
+ sub_item.unlink()
+ pth.rmdir()
+
+ if self._remote_stage:
+ teardown(self._remote_stage)
+
+ def _reclaim_remote_stage(self):
+ # another Spack process or build has taken this directory
+ # the cmake build will not work from a different dir
+ # so wait until we can take it - try five times
+ # waiting a little longer each time.
+ # This will cause a hang but this should only be called if we're trying
+ # to rebuild a pre-existing stage, so we need to get the previous
+ # build dir or CMake will error
+ ii = 0
+ while self._remote_stage.exists() and ii < 5:
+ time.sleep(0.5)
+ ii += 1
+ if ii == 5:
+ raise StageError(
+ "Could not re-create external CMake stage, one exists for this package already"
+ )
+ self._remote_stage.mkdir()
+
+ def destroy(self):
+ # copy back to stage may fail in event of error, make sure we clean up the
+ # associated external build dir in that event unless we're keeping the
+ # parent stage on cleanup
+ # If remote stage is not set, we never created one, package is already
+ # installed and we should do nothing here
+ if self._remote_stage:
+ try:
+ self._remove_context_link()
+ self._return_to_stage()
+ finally:
+ self._teardown_remote_stage()
+
+ def restage(self):
+ if self._hash in CMakeBuildStage.dispatch:
+ self._remote_stage = Path(CMakeBuildStage.dispatch[self._hash])
+ if self._remote_stage.exists():
+ self._reclaim_remote_stage()
+ else:
+ self.create()
+
+ def create(self):
+ if not self.created:
+ try:
+ self._remote_stage = self._setup_remote_build_stage()
+ self._establish_context_link()
+ except Exception:
+ self._teardown_remote_stage()
+ raise
+
+ def steal_source(self, dest):
+ if not self._remote_stage:
+ self.create()
+
+ self._path = Path(dest)
+
+ @property
+ def created(self):
+ return bool(self._remote_stage) and self._remote_stage.exists()
+
+ @property
+ def managed_by_spack(self):
+ return True
+
+ def fetch(self, mirror_only=False, err_msg=None):
+ pass
+
+ def cache_local(self):
+ pass
+
+ def cache_mirror(self):
+ pass
+
+ def check(self):
+ pass
+
+ def expand_archive(self):
+ pass
+
+
class StageComposite(pattern.Composite):
"""Composite for Stage type objects. The first item in this composite is
considered to be the root package, and operations that return a value are
diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py
index fa98a129c48129..86c3e659b0bef0 100644
--- a/var/spack/repos/builtin/packages/adios2/package.py
+++ b/var/spack/repos/builtin/packages/adios2/package.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
+import sys
from spack.package import *
@@ -175,6 +176,45 @@ def setup_build_environment(self, env):
elif self.spec.satisfies("%fj +fortran"):
env.set("FFLAGS", "-Ccpp")
+ @property
+ def libs(self):
+ spec = self.spec
+ libs_to_seek = set()
+
+ if "@2.6:" in spec:
+ libs_to_seek.add("libadios2_core")
+ libs_to_seek.add("libadios2_c")
+ libs_to_seek.add("libadios2_cxx11")
+ if "+fortran" in spec:
+ libs_to_seek.add("libadios2_fortran")
+
+ if "+mpi" in spec:
+ libs_to_seek.add("libadios2_core_mpi")
+ libs_to_seek.add("libadios2_c_mpi")
+ libs_to_seek.add("libadios2_cxx11_mpi")
+ if "+fortran" in spec:
+ libs_to_seek.add("libadios2_fortran_mpi")
+
+ if "@2.7: +shared+hdf5" in spec and "@1.12:" in spec["hdf5"]:
+ libs_to_seek.add("libadios2_h5vol")
+
+ else:
+ libs_to_seek.add("libadios2")
+ if "+fortran" in spec:
+ libs_to_seek.add("libadios2_fortran")
+
+ return find_libraries(
+ list(libs_to_seek), root=self.spec.prefix, shared=("+shared" in spec), recursive=True
+ )
+
+ def setup_run_environment(self, env):
+ try:
+ all_libs = self.libs
+ idx = all_libs.basenames.index("libadios2_h5vol.so")
+ env.prepend_path("HDF5_PLUGIN_PATH", os.path.dirname(all_libs[idx]))
+ except ValueError:
+ pass
+
def cmake_args(self):
spec = self.spec
from_variant = self.define_from_variant
@@ -225,42 +265,3 @@ def cmake_args(self):
args.append("-DPython_EXECUTABLE:FILEPATH=%s" % spec["python"].command.path)
return args
-
- @property
- def libs(self):
- spec = self.spec
- libs_to_seek = set()
-
- if "@2.6:" in spec:
- libs_to_seek.add("libadios2_core")
- libs_to_seek.add("libadios2_c")
- libs_to_seek.add("libadios2_cxx11")
- if "+fortran" in spec:
- libs_to_seek.add("libadios2_fortran")
-
- if "+mpi" in spec:
- libs_to_seek.add("libadios2_core_mpi")
- libs_to_seek.add("libadios2_c_mpi")
- libs_to_seek.add("libadios2_cxx11_mpi")
- if "+fortran" in spec:
- libs_to_seek.add("libadios2_fortran_mpi")
-
- if "@2.7: +shared+hdf5" in spec and "@1.12:" in spec["hdf5"]:
- libs_to_seek.add("libadios2_h5vol")
-
- else:
- libs_to_seek.add("libadios2")
- if "+fortran" in spec:
- libs_to_seek.add("libadios2_fortran")
-
- return find_libraries(
- list(libs_to_seek), root=self.spec.prefix, shared=("+shared" in spec), recursive=True
- )
-
- def setup_run_environment(self, env):
- try:
- all_libs = self.libs
- idx = all_libs.basenames.index("libadios2_h5vol.so")
- env.prepend_path("HDF5_PLUGIN_PATH", os.path.dirname(all_libs[idx]))
- except ValueError:
- pass
diff --git a/var/spack/repos/builtin/packages/expat/package.py b/var/spack/repos/builtin/packages/expat/package.py
index 8f837e3d1d3d99..a3a3501f8ae864 100644
--- a/var/spack/repos/builtin/packages/expat/package.py
+++ b/var/spack/repos/builtin/packages/expat/package.py
@@ -8,7 +8,7 @@
from spack.package import *
-class Expat(AutotoolsPackage):
+class Expat(CMakePackage):
"""Expat is an XML parser library written in C."""
homepage = "https://libexpat.github.io/"
@@ -102,7 +102,7 @@ class Expat(AutotoolsPackage):
# `~libbsd`.
variant(
"libbsd",
- default=sys.platform != "darwin",
+ default=sys.platform != "darwin" and sys.platform != "win32",
description="Use libbsd (for high quality randomness)",
)
@@ -112,12 +112,19 @@ def url_for_version(self, version):
url = "https://github.com/libexpat/libexpat/releases/download/R_{0}/expat-{1}.tar.bz2"
return url.format(version.underscored, version.dotted)
- def configure_args(self):
- spec = self.spec
+ # def configure_args(self):
+ # spec = self.spec
+ # args = [
+ # "--without-docbook",
+ # "--enable-static",
+ # ]
+ # if "+libbsd" in spec and "@2.2.1:" in spec:
+ # args.append("--with-libbsd")
+ # return args
+
+ def cmake_args(self):
args = [
- "--without-docbook",
- "--enable-static",
+ self.define("EXPAT_BUILD_DOCS", False),
+ self.define_from_variant("EXPAT__WITH_LIBBSD", "libbsd"),
]
- if "+libbsd" in spec and "@2.2.1:" in spec:
- args.append("--with-libbsd")
return args
diff --git a/var/spack/repos/builtin/packages/libiconv/package.py b/var/spack/repos/builtin/packages/libiconv/package.py
index d0ebf01ad3c918..2052aef2c6cb92 100644
--- a/var/spack/repos/builtin/packages/libiconv/package.py
+++ b/var/spack/repos/builtin/packages/libiconv/package.py
@@ -29,7 +29,8 @@ class Libiconv(AutotoolsPackage, GNUMirrorPackage):
# We cannot set up a warning for gets(), since gets() is not part
# of C11 any more and thus might not exist.
patch("gets.patch", when="@1.14")
- provides("iconv")
+
+ conflicts("platform=windows")
conflicts("@1.14", when="%gcc@5:")
diff --git a/var/spack/repos/builtin/packages/libogg/package.py b/var/spack/repos/builtin/packages/libogg/package.py
index c0fad6bbf3cda5..6cce01aed01f09 100644
--- a/var/spack/repos/builtin/packages/libogg/package.py
+++ b/var/spack/repos/builtin/packages/libogg/package.py
@@ -3,10 +3,14 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
+import platform
+
+from spack.build_systems.generic import GenericBuilder
from spack.package import *
-class Libogg(AutotoolsPackage):
+class Libogg(CMakePackage, AutotoolsPackage, Package):
"""Ogg is a multimedia container format, and the native file and stream
format for the Xiph.org multimedia codecs."""
@@ -24,3 +28,35 @@ class Libogg(AutotoolsPackage):
sha256="0f4d289aecb3d5f7329d51f1a72ab10c04c336b25481a40d6d841120721be485",
when="@1.3.4 platform=darwin",
)
+ build_system(
+ conditional("cmake", when="@1.3.4:"),
+ conditional("generic", when="@1.3.2 platform=windows"),
+ "autotools",
+ default="autotools",
+ )
+
+
+class GenericBuilder(GenericBuilder):
+ phases = ["build", "install"]
+
+ def is_64bit(self):
+ return platform.machine().endswith("64")
+
+ def build(self, spec, prefix):
+ if spec.satisfies("%msvc"):
+ plat_tools = self.pkg.compiler.msvc_version
+ else:
+ raise RuntimeError("Package does not support non MSVC compilers on Windows")
+ ms_build_args = ["libogg_static.vcxproj", "/p:PlatformToolset=v%s" % plat_tools]
+ msbuild(*ms_build_args)
+
+ def install(self, spec, prefix):
+ mkdirp(prefix.include.ogg)
+ mkdirp(prefix.lib)
+ mkdirp(prefix.share)
+ install(
+ os.path.join(self.pkg.stage.source_path, "include", "ogg", "*.h"), prefix.include.ogg
+ )
+ plat_prefix = "x64" if self.is_64bit() else "x86"
+ install(os.path.join(plat_prefix, "Debug", "*.lib"), prefix.lib)
+ install_tree(os.path.join(self.pkg.stage.source_path, "doc"), prefix.share)
diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py
index ddf5d757e3c7fa..9c548db1fb9195 100644
--- a/var/spack/repos/builtin/packages/libpng/package.py
+++ b/var/spack/repos/builtin/packages/libpng/package.py
@@ -4,9 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
+from spack.build_systems.cmake import CMakeBuilder
-
-class Libpng(AutotoolsPackage):
+class Libpng(CMakePackage):
"""libpng is the official PNG reference library."""
homepage = "http://www.libpng.org/pub/png/libpng.html"
@@ -48,7 +48,11 @@ def configure_args(self):
args += self.enable_or_disable("libs")
return args
- def check(self):
- # Libpng has both 'check' and 'test' targets that are aliases.
- # Only need to run the tests once.
- make("check")
+class CMakeBuilder(CMakeBuilder):
+ def cmake_args(self):
+ return [
+ self.define("CMAKE_CXX_FLAGS", self.spec["zlib"].headers.include_flags),
+ self.define("ZLIB_ROOT", self.spec["zlib"].prefix),
+ self.define("PNG_SHARED", "shared" in self.spec.variants["libs"].value),
+ self.define("PNG_STATIC", "static" in self.spec.variants["libs"].value),
+ ]
diff --git a/var/spack/repos/builtin/packages/libtheora/libtheora-inc-external-ogg.patch b/var/spack/repos/builtin/packages/libtheora/libtheora-inc-external-ogg.patch
new file mode 100644
index 00000000000000..fd88480e7c4df2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libtheora/libtheora-inc-external-ogg.patch
@@ -0,0 +1,35 @@
+diff --git a/win32/VS2008/libogg.vsprops b/win32/VS2008/libogg.vsprops
+index 1355b50..8b3c5b8 100644
+--- a/win32/VS2008/libogg.vsprops
++++ b/win32/VS2008/libogg.vsprops
+@@ -6,11 +6,11 @@
+ >
+
+
+
++ CHECK_C_SOURCE_COMPILES("#include
+ #if !H5_HAVE_ZLIB_H
+ #error
+ #endif
diff --git a/var/spack/repos/builtin/packages/netcdf-c/netcdfc-cmake-config-import-mpi.patch b/var/spack/repos/builtin/packages/netcdf-c/netcdfc-cmake-config-import-mpi.patch
new file mode 100644
index 00000000000000..d4b581a90f2b71
--- /dev/null
+++ b/var/spack/repos/builtin/packages/netcdf-c/netcdfc-cmake-config-import-mpi.patch
@@ -0,0 +1,33 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index ba66a6d4..f0041764 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -1418,6 +1418,7 @@ ENDIF()
+
+ # Enable Parallel IO with netCDF-4/HDF5 files using HDF5 parallel I/O.
+ SET(STATUS_PARALLEL "OFF")
++set(IMPORT_MPI "")
+ OPTION(ENABLE_PARALLEL4 "Build netCDF-4 with parallel IO" "${HDF5_PARALLEL}")
+ IF(ENABLE_PARALLEL4 AND ENABLE_HDF5)
+ IF(NOT HDF5_PARALLEL)
+@@ -1439,6 +1440,7 @@ IF(ENABLE_PARALLEL4 AND ENABLE_HDF5)
+ FILE(COPY "${netCDF_BINARY_DIR}/tmp/run_par_tests.sh"
+ DESTINATION ${netCDF_BINARY_DIR}/h5_test
+ FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
++ set(IMPORT_MPI "include(CMakeFindDependencyMacro)\nfind_dependency(mpi COMPONENTS C)")
+ ENDIF()
+ ENDIF()
+
+diff --git a/netCDFConfig.cmake.in b/netCDFConfig.cmake.in
+index 9d68eec5..dae2429e 100644
+--- a/netCDFConfig.cmake.in
++++ b/netCDFConfig.cmake.in
+@@ -14,6 +14,8 @@ set(netCDF_LIBRARIES netCDF::netcdf)
+ # include target information
+ include("${CMAKE_CURRENT_LIST_DIR}/netCDFTargets.cmake")
+
++@IMPORT_MPI@
++
+ # Compiling Options
+ #
+ set(netCDF_C_COMPILER "@CC_VERSION@")
diff --git a/var/spack/repos/builtin/packages/netcdf-c/netcdfc-hdf5-link-mpi.patch b/var/spack/repos/builtin/packages/netcdf-c/netcdfc-hdf5-link-mpi.patch
new file mode 100644
index 00000000000000..f86e59dd71c551
--- /dev/null
+++ b/var/spack/repos/builtin/packages/netcdf-c/netcdfc-hdf5-link-mpi.patch
@@ -0,0 +1,12 @@
+diff --git a/liblib/CMakeLists.txt b/liblib/CMakeLists.txt
+index aa3a842d..691902c2 100644
+--- a/liblib/CMakeLists.txt
++++ b/liblib/CMakeLists.txt
+@@ -50,6 +50,7 @@ ADD_LIBRARY(netcdf nc_initialize.c ${LARGS} )
+
+ IF(MPI_C_INCLUDE_PATH)
+ target_include_directories(netcdf PUBLIC ${MPI_C_INCLUDE_PATH})
++ target_link_libraries(netcdf MPI::MPI_C)
+ ENDIF(MPI_C_INCLUDE_PATH)
+
+ IF(MOD_NETCDF_NAME)
diff --git a/var/spack/repos/builtin/packages/netcdf-c/netcdfc-win-inc-mpi.patch b/var/spack/repos/builtin/packages/netcdf-c/netcdfc-win-inc-mpi.patch
new file mode 100644
index 00000000000000..b4c7d4b6a4ebe0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/netcdf-c/netcdfc-win-inc-mpi.patch
@@ -0,0 +1,14 @@
+diff --git a/plugins/CMakeLists.txt b/plugins/CMakeLists.txt
+index 65891d82..15567c8f 100644
+--- a/plugins/CMakeLists.txt
++++ b/plugins/CMakeLists.txt
+@@ -62,6 +62,9 @@ MACRO(buildplugin TARGET TARGETLIB)
+ set_target_properties(${TARGET} PROPERTIES LINK_FLAGS "/INCREMENTAL:NO /DEBUG /OPT:REF /OPT:ICF")
+ # Set file name & location
+ set_target_properties(${TARGET} PROPERTIES COMPILE_PDB_NAME ${TARGET} COMPILE_PDB_OUTPUT_DIR ${CMAKE_BINARY_DIR})
++ IF(MPI_C_INCLUDE_PATH)
++ target_include_directories(${TARGET} PRIVATE ${MPI_C_INCLUDE_PATH})
++ ENDIF(MPI_C_INCLUDE_PATH)
+ ENDIF()
+ ENDMACRO()
+
diff --git a/var/spack/repos/builtin/packages/netcdf-c/package.py b/var/spack/repos/builtin/packages/netcdf-c/package.py
index a63b21a6d65021..e90ddbb4a28809 100644
--- a/var/spack/repos/builtin/packages/netcdf-c/package.py
+++ b/var/spack/repos/builtin/packages/netcdf-c/package.py
@@ -5,10 +5,12 @@
import os
+from spack.build_systems.autotools import AutotoolsBuilder
+from spack.build_systems.cmake import CMakeBuilder
from spack.package import *
-class NetcdfC(AutotoolsPackage):
+class NetcdfC(CMakePackage, AutotoolsPackage):
"""NetCDF (network Common Data Form) is a set of software libraries and
machine-independent data formats that support the creation, access, and
sharing of array-oriented scientific data. This is the C distribution."""
@@ -62,6 +64,14 @@ class NetcdfC(AutotoolsPackage):
when="@4.7.2",
)
+ patch("4.8.1-win-hdf5-with-zlib.patch", when="@4.8.1: platform=windows")
+
+ patch("netcdfc-win-inc-mpi.patch", when="platform=windows")
+
+ patch("netcdfc-hdf5-link-mpi.patch", when="platform=windows")
+
+ patch("netcdfc-cmake-config-import-mpi.patch", when="platform=windows")
+
# See https://github.com/Unidata/netcdf-c/pull/1752
patch("4.7.3-spectrum-mpi-pnetcdf-detect.patch", when="@4.7.3:4.7.4 +parallel-netcdf")
@@ -86,11 +96,12 @@ class NetcdfC(AutotoolsPackage):
# description='Enable CDM Remote support')
# The patch for 4.7.0 touches configure.ac. See force_autoreconf below.
- depends_on("autoconf", type="build", when="@4.7.0,main")
- depends_on("automake", type="build", when="@4.7.0,main")
- depends_on("libtool", type="build", when="@4.7.0,main")
+ with when("build_system=autotools"):
+ depends_on("autoconf", type="build", when="@4.7.0,main")
+ depends_on("automake", type="build", when="@4.7.0,main")
+ depends_on("libtool", type="build", when="@4.7.0,main")
+ depends_on("m4", type="build")
- depends_on("m4", type="build")
depends_on("hdf~netcdf", when="+hdf4")
# curl 7.18.0 or later is required:
@@ -142,12 +153,63 @@ class NetcdfC(AutotoolsPackage):
filter_compiler_wrappers("nc-config", relative_root="bin")
+ build_system("cmake", "autotools", default="cmake")
+
+ def setup_run_environment(self, env):
+ if "+zstd" in self.spec:
+ env.append_path("HDF5_PLUGIN_PATH", self.prefix.plugins)
+
+ @property
+ def libs(self):
+ shared = "+shared" in self.spec
+ return find_libraries("libnetcdf", root=self.prefix, shared=shared, recursive=True)
+
+
+class BackupStep(object):
+ @property
+ def _nc_config_backup_dir(self):
+ return join_path(self.pkg.metadata_dir, "spack-nc-config")
+
+ @run_after("install")
+ def backup_nc_config(self):
+ # We expect this to be run before filter_compiler_wrappers:
+ nc_config_file = self.prefix.bin.join("nc-config")
+ if os.path.exists(nc_config_file):
+ mkdirp(self._nc_config_backup_dir)
+ install(nc_config_file, self._nc_config_backup_dir)
+
+
+class CMakeBuilder(CMakeBuilder, BackupStep):
+ def cmake_args(self):
+ base_cmake_args = [
+ self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
+ self.define("BUILD_UTILITIES", True),
+ self.define("ENABLE_NETCDF_4", True),
+ self.define_from_variant("ENABLE_DAP", "dap"),
+ self.define("CMAKE_INSTALL_PREFIX", self.prefix),
+ self.define_from_variant("ENABLE_HDF4", "hdf4"),
+ self.define("ENABLE_PARALLEL_TESTS", False),
+ ]
+ if "+parallel-netcdf" in self.spec:
+ base_cmake_args.append(self.define("ENABLE_PNETCDF", True))
+ return base_cmake_args
+
+
+class AutotoolsBuilder(AutotoolsBuilder, BackupStep):
+ def setup_dependent_build_environment(self, env, dependent_spec):
+ self.setup_run_environment(env)
+ # Some packages, e.g. ncview, refuse to build if the compiler path returned by nc-config
+ # differs from the path to the compiler that the package should be built with. Therefore,
+ # we have to shadow nc-config from self.prefix.bin, which references the real compiler,
+ # with a backed up version, which references Spack compiler wrapper.
+ if os.path.exists(self._nc_config_backup_dir):
+ env.prepend_path("PATH", self._nc_config_backup_dir)
+
@property
def force_autoreconf(self):
# The patch for 4.7.0 touches configure.ac.
return self.spec.satisfies("@4.7.0")
- @when("@4.6.3:")
def autoreconf(self, spec, prefix):
if not os.path.exists(self.configure_abs_path):
Executable("./bootstrap")()
@@ -265,36 +327,6 @@ def configure_args(self):
return config_args
- def setup_run_environment(self, env):
- if "+zstd" in self.spec:
- env.append_path("HDF5_PLUGIN_PATH", self.prefix.plugins)
-
- def setup_dependent_build_environment(self, env, dependent_spec):
- self.setup_run_environment(env)
- # Some packages, e.g. ncview, refuse to build if the compiler path returned by nc-config
- # differs from the path to the compiler that the package should be built with. Therefore,
- # we have to shadow nc-config from self.prefix.bin, which references the real compiler,
- # with a backed up version, which references Spack compiler wrapper.
- if os.path.exists(self._nc_config_backup_dir):
- env.prepend_path("PATH", self._nc_config_backup_dir)
-
- @run_after("install")
- def backup_nc_config(self):
- # We expect this to be run before filter_compiler_wrappers:
- nc_config_file = self.prefix.bin.join("nc-config")
- if os.path.exists(nc_config_file):
- mkdirp(self._nc_config_backup_dir)
- install(nc_config_file, self._nc_config_backup_dir)
-
def check(self):
# h5_test fails when run in parallel
make("check", parallel=False)
-
- @property
- def libs(self):
- shared = "+shared" in self.spec
- return find_libraries("libnetcdf", root=self.prefix, shared=shared, recursive=True)
-
- @property
- def _nc_config_backup_dir(self):
- return join_path(self.metadata_dir, "spack-nc-config")
diff --git a/var/spack/repos/builtin/packages/netcdf-cxx/package.py b/var/spack/repos/builtin/packages/netcdf-cxx/package.py
index 7095de6d4e9c0b..766cd5bdd055bc 100644
--- a/var/spack/repos/builtin/packages/netcdf-cxx/package.py
+++ b/var/spack/repos/builtin/packages/netcdf-cxx/package.py
@@ -6,7 +6,7 @@
from spack.package import *
-class NetcdfCxx(AutotoolsPackage):
+class NetcdfCxx(CMakePackage):
"""Deprecated C++ compatibility bindings for NetCDF.
These do NOT read or write NetCDF-4 files, and are no longer
maintained by Unidata. Developers should migrate to current
@@ -29,8 +29,8 @@ def libs(self):
shared = True
return find_libraries("libnetcdf_c++", root=self.prefix, shared=shared, recursive=True)
- def configure_args(self):
- args = []
+ def cmake_args(self):
+ args = [self.define_from_variant("USE_NETCDF4", "netcdf4")]
if "+netcdf4" in self.spec:
# There is no clear way to set this via configure, so set the flag
# explicitly
diff --git a/var/spack/repos/builtin/packages/opengl/package.py b/var/spack/repos/builtin/packages/opengl/package.py
index 21cc3b8cd7bf8e..a81b4a20fb3ba6 100644
--- a/var/spack/repos/builtin/packages/opengl/package.py
+++ b/var/spack/repos/builtin/packages/opengl/package.py
@@ -98,7 +98,7 @@ def gl_headers(self):
def gl_libs(self):
spec = self.spec
if "platform=windows" in spec:
- lib_name = "opengl32"
+ lib_name = "OpenGL32"
elif "platform=darwin" in spec:
lib_name = "libOpenGL"
else: # linux and cray
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
index 702f9b800c0a26..f3f34ea0b82147 100644
--- a/var/spack/repos/builtin/packages/paraview/package.py
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -208,7 +208,6 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage):
depends_on("netcdf-c")
depends_on("pegtl")
depends_on("protobuf@3.4:")
- depends_on("libxml2")
depends_on("lz4")
depends_on("xz")
depends_on("zlib")
@@ -220,6 +219,8 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage):
when="+rocm amdgpu_target={0}".format(target),
)
+ depends_on("libxml2")
+
# Older builds of pugi export their symbols differently,
# and pre-5.9 is unable to handle that.
depends_on("pugixml@:1.10", when="@:5.8")
@@ -501,13 +502,16 @@ def nvariant_bool(feature):
cmake_args.append("-DPARAVIEW_ENABLE_PYTHON:BOOL=OFF")
if "+mpi" in spec:
+ ext = ".exe" if "platform=windows" in spec else ""
+ # changes below are temp stopgap support for Windows and should NOT be allowed to merge
+ # until MSMPI vs use of compiler wrappers is resolved
cmake_args.extend(
[
"-DPARAVIEW_USE_MPI:BOOL=ON",
- "-DMPIEXEC:FILEPATH=%s/bin/mpiexec" % spec["mpi"].prefix,
- "-DMPI_CXX_COMPILER:PATH=%s" % spec["mpi"].mpicxx,
- "-DMPI_C_COMPILER:PATH=%s" % spec["mpi"].mpicc,
- "-DMPI_Fortran_COMPILER:PATH=%s" % spec["mpi"].mpifc,
+ # "-DMPIEXEC:FILEPATH=%s/bin/mpiexec%s" % (spec["mpi"].prefix, ext),
+ # "-DMPI_CXX_COMPILER:PATH=%s" % spec["mpi"].mpicxx,
+ # "-DMPI_C_COMPILER:PATH=%s" % spec["mpi"].mpicc,
+ # "-DMPI_Fortran_COMPILER:PATH=%s" % spec["mpi"].mpifc,
]
)
@@ -568,6 +572,13 @@ def nvariant_bool(feature):
"-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON",
]
)
+ if "platform=windows" in spec:
+ # Obviously we can't use x on Windows
+ cmake_args.extend(
+ [
+ "-DVTK_USE_X:BOOL=OFF"
+ ]
+ )
if "+kits" in spec:
if spec.satisfies("@5.0:5.6"):
diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py
index 0e2e4a0202dd84..849925ed1ba51c 100644
--- a/var/spack/repos/builtin/packages/perl/package.py
+++ b/var/spack/repos/builtin/packages/perl/package.py
@@ -20,13 +20,15 @@
from llnl.util.lang import match_predicate
from llnl.util.symlink import symlink
+from spack.build_systems.autotools import AutotoolsBuilder
+from spack.build_systems.nmake import NMakeBuilder
from spack.operating_systems.mac_os import macos_version
from spack.package import *
is_windows = sys.platform == "win32"
-class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
+class Perl(AutotoolsPackage, NMakePackage):
"""Perl 5 is a highly capable, feature-rich programming language with over
27 years of development."""
@@ -172,7 +174,7 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
placement="cpanm",
)
- phases = ["configure", "build", "install"]
+ build_system(conditional("nmake", when="platform=windows"), "autotools", default="autotools")
def patch(self):
# https://github.com/Perl/perl5/issues/15544 long PATH(>1000 chars) fails a test
@@ -235,121 +237,88 @@ def do_stage(self, mirror_only=False):
perm = os.stat(filename).st_mode
os.chmod(filename, perm | 0o200)
- @property
- def nmake_arguments(self):
- args = []
- if self.spec.satisfies("%msvc"):
- args.append("CCTYPE=%s" % self.compiler.short_msvc_version)
- else:
- raise RuntimeError("Perl unsupported for non MSVC compilers on Windows")
- args.append("INST_TOP=%s" % self.prefix.replace("/", "\\"))
- args.append("INST_ARCH=\\$(ARCHNAME)")
- if self.spec.satisfies("~shared"):
- args.append("ALL_STATIC=%s" % "define")
- if self.spec.satisfies("~threads"):
- args.extend(["USE_MULTI=undef", "USE_ITHREADS=undef", "USE_IMP_SYS=undef"])
- if not self.is_64bit():
- args.append("WIN64=undef")
- return args
+ def setup_dependent_package(self, module, dependent_spec):
+ """Called before perl modules' install() methods.
+ In most cases, extensions will only need to have one line:
+ perl('Makefile.PL','INSTALL_BASE=%s' % self.prefix)
+ """
- def is_64bit(self):
- return platform.machine().endswith("64")
+ # If system perl is used through packages.yaml
+ # there cannot be extensions.
+ if dependent_spec.package.is_extension:
- def configure_args(self):
- spec = self.spec
- prefix = self.prefix
+ # perl extension builds can have a global perl
+ # executable function
+ module.perl = self.spec["perl"].command
- config_args = [
- "-des",
- "-Dprefix={0}".format(prefix),
- "-Dlocincpth=" + self.spec["gdbm"].prefix.include,
- "-Dloclibpth=" + self.spec["gdbm"].prefix.lib,
- ]
+ # Add variables for library directory
+ module.perl_lib_dir = dependent_spec.prefix.lib.perl5
- # Extensions are installed into their private tree via
- # `INSTALL_BASE`/`--install_base` (see [1]) which results in a
- # "predictable" installation tree that sadly does not match the
- # Perl core's @INC structure. This means that when activation
- # merges the extension into the extendee[2], the directory tree
- # containing the extensions is not on @INC and the extensions can
- # not be found.
- #
- # This bit prepends @INC with the directory that is used when
- # extensions are activated [3].
- #
- # [1] https://metacpan.org/pod/ExtUtils::MakeMaker#INSTALL_BASE
- # [2] via the activate method in the PackageBase class
- # [3] https://metacpan.org/pod/distribution/perl/INSTALL#APPLLIB_EXP
- config_args.append('-Accflags=-DAPPLLIB_EXP=\\"' + self.prefix.lib.perl5 + '\\"')
+ # Make the site packages directory for extensions,
+ # if it does not exist already.
+ mkdirp(module.perl_lib_dir)
- # Discussion of -fPIC for Intel at:
- # https://github.com/spack/spack/pull/3081 and
- # https://github.com/spack/spack/pull/4416
- if spec.satisfies("%intel"):
- config_args.append("-Accflags={0}".format(self.compiler.cc_pic_flag))
+ @contextmanager
+ def make_briefly_writable(self, path):
+ """Temporarily make a file writable, then reset"""
+ perm = os.stat(path).st_mode
+ os.chmod(path, perm | 0o200)
+ yield
+ os.chmod(path, perm)
- if "+shared" in spec:
- config_args.append("-Duseshrplib")
+ # ========================================================================
+ # Handle specifics of activating and deactivating perl modules.
+ # ========================================================================
- if "+threads" in spec:
- config_args.append("-Dusethreads")
+ def perl_ignore(self, ext_pkg, args):
+ """Add some ignore files to activate/deactivate args."""
+ ignore_arg = args.get("ignore", lambda f: False)
- # Development versions have an odd second component
- if spec.version[1] % 2 == 1:
- config_args.append("-Dusedevel")
+ # Many perl packages describe themselves in a perllocal.pod file,
+ # so the files conflict when multiple packages are activated.
+ # We could merge the perllocal.pod files in activated packages,
+ # but this is unnecessary for correct operation of perl.
+ # For simplicity, we simply ignore all perllocal.pod files:
+ patterns = [r"perllocal\.pod$"]
- return config_args
+ return match_predicate(ignore_arg, patterns)
- def configure(self, spec, prefix):
- if is_windows:
- return
- configure = Executable("./Configure")
- configure(*self.configure_args())
+ @property
+ def command(self):
+ """Returns the Perl command, which may vary depending on the version
+ of Perl. In general, Perl comes with a ``perl`` command. However,
+ development releases have a ``perlX.Y.Z`` command.
- def build(self, spec, prefix):
- if is_windows:
- pass
+ Returns:
+ Executable: the Perl command
+ """
+ for ver in ("", self.spec.version):
+ ext = ""
+ if is_windows:
+ ext = ".exe"
+ path = os.path.join(self.prefix.bin, "{0}{1}{2}".format(self.spec.name, ver, ext))
+ if os.path.exists(path):
+ return Executable(path)
else:
- make()
+ msg = "Unable to locate {0} command in {1}"
+ raise RuntimeError(msg.format(self.spec.name, self.prefix.bin))
- @run_after("build")
- @on_package_attributes(run_tests=True)
- def build_test(self):
- if is_windows:
- win32_dir = os.path.join(self.stage.source_path, "win32")
- with working_dir(win32_dir):
- nmake("test", ignore_quotes=True)
- else:
- make("test")
+ def test(self):
+ """Smoke tests"""
+ exe = self.spec["perl"].command.name
- def install(self, spec, prefix):
- if is_windows:
- win32_dir = os.path.join(self.stage.source_path, "win32")
- with working_dir(win32_dir):
- nmake("install", *self.nmake_arguments, ignore_quotes=True)
- else:
- make("install")
+ reason = "test: checking version is {0}".format(self.spec.version)
+ self.run_test(
+ exe, "--version", ["perl", str(self.spec.version)], installed=True, purpose=reason
+ )
- @run_after("install")
- def symlink_windows(self):
- if not is_windows:
- return
- win_install_path = os.path.join(self.prefix.bin, "MSWin32")
- if self.is_64bit():
- win_install_path += "-x64"
- else:
- win_install_path += "-x86"
- if self.spec.satisfies("+threads"):
- win_install_path += "-multi-thread"
- else:
- win_install_path += "-perlio"
+ reason = "test: ensuring perl runs"
+ msg = "Hello, World!"
+ options = ["-e", 'use warnings; use strict;\nprint("%s\n");' % msg]
+ self.run_test(exe, options, msg, installed=True, purpose=reason)
- for f in os.listdir(os.path.join(self.prefix.bin, win_install_path)):
- lnk_path = os.path.join(self.prefix.bin, f)
- src_path = os.path.join(win_install_path, f)
- if not os.path.exists(lnk_path):
- symlink(src_path, lnk_path)
+class RunAfter(object):
@run_after("install")
def install_cpanm(self):
spec = self.spec
@@ -365,68 +334,6 @@ def install_cpanm(self):
maker()
maker("install")
- def _setup_dependent_env(self, env, dependent_spec, deptype):
- """Set PATH and PERL5LIB to include the extension and
- any other perl extensions it depends on,
- assuming they were installed with INSTALL_BASE defined."""
- perl_lib_dirs = []
- for d in dependent_spec.traverse(deptype=deptype):
- if d.package.extends(self.spec):
- perl_lib_dirs.append(d.prefix.lib.perl5)
- if perl_lib_dirs:
- perl_lib_path = ":".join(perl_lib_dirs)
- env.prepend_path("PERL5LIB", perl_lib_path)
- if is_windows:
- env.append_path("PATH", self.prefix.bin)
-
- def setup_dependent_build_environment(self, env, dependent_spec):
- self._setup_dependent_env(env, dependent_spec, deptype=("build", "run", "test"))
-
- def setup_dependent_run_environment(self, env, dependent_spec):
- self._setup_dependent_env(env, dependent_spec, deptype=("run",))
-
- def setup_dependent_package(self, module, dependent_spec):
- """Called before perl modules' install() methods.
- In most cases, extensions will only need to have one line:
- perl('Makefile.PL','INSTALL_BASE=%s' % self.prefix)
- """
-
- # If system perl is used through packages.yaml
- # there cannot be extensions.
- if dependent_spec.package.is_extension:
-
- # perl extension builds can have a global perl
- # executable function
- module.perl = self.spec["perl"].command
-
- # Add variables for library directory
- module.perl_lib_dir = dependent_spec.prefix.lib.perl5
-
- # Make the site packages directory for extensions,
- # if it does not exist already.
- mkdirp(module.perl_lib_dir)
-
- def setup_build_environment(self, env):
- if is_windows:
- env.append_path("PATH", self.prefix.bin)
- return
-
- spec = self.spec
-
- if spec.satisfies("@:5.34 platform=darwin") and macos_version() >= Version("10.16"):
- # Older perl versions reject MACOSX_DEPLOYMENT_TARGET=11 or higher
- # as "unexpected"; override the environment variable set by spack's
- # platforms.darwin .
- env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
-
- # This is how we tell perl the locations of bzip and zlib.
- env.set("BUILD_BZIP2", 0)
- env.set("BZIP2_INCLUDE", spec["bzip2"].prefix.include)
- env.set("BZIP2_LIB", spec["bzip2"].libs.directories[0])
- env.set("BUILD_ZLIB", 0)
- env.set("ZLIB_INCLUDE", spec["zlib"].prefix.include)
- env.set("ZLIB_LIB", spec["zlib"].libs.directories[0])
-
@run_after("install")
def filter_config_dot_pm(self):
"""Run after install so that Config.pm records the compiler that Spack
@@ -467,61 +374,170 @@ def filter_config_dot_pm(self):
substitute = "ccflags='%s " % " ".join(self.spec.compiler_flags["cflags"])
filter_file(match, substitute, config_heavy, **kwargs)
- @contextmanager
- def make_briefly_writable(self, path):
- """Temporarily make a file writable, then reset"""
- perm = os.stat(path).st_mode
- os.chmod(path, perm | 0o200)
- yield
- os.chmod(path, perm)
+ @run_after("build")
+ @on_package_attributes(run_tests=True)
+ def build_test(self):
+ if is_windows:
+ with working_dir(self.build_directory):
+ nmake("test", ignore_quotes=True)
+ else:
+ make("test")
- # ========================================================================
- # Handle specifics of activating and deactivating perl modules.
- # ========================================================================
- def perl_ignore(self, ext_pkg, args):
- """Add some ignore files to activate/deactivate args."""
- ignore_arg = args.get("ignore", lambda f: False)
+class Setup(object):
+ def setup_dependent_build_environment(self, env, dependent_spec):
+ self._setup_dependent_env(env, dependent_spec, deptype=("build", "run", "test"))
- # Many perl packages describe themselves in a perllocal.pod file,
- # so the files conflict when multiple packages are activated.
- # We could merge the perllocal.pod files in activated packages,
- # but this is unnecessary for correct operation of perl.
- # For simplicity, we simply ignore all perllocal.pod files:
- patterns = [r"perllocal\.pod$"]
+ def setup_dependent_run_environment(self, env, dependent_spec):
+ self._setup_dependent_env(env, dependent_spec, deptype=("run",))
- return match_predicate(ignore_arg, patterns)
+
+class AutotoolsBuilder(AutotoolsBuilder, RunAfter, Setup):
+ def setup_build_environment(self, env):
+ spec = self.spec
+
+ if spec.satisfies("@:5.34 platform=darwin") and macos_version() >= Version("10.16"):
+ # Older perl versions reject MACOSX_DEPLOYMENT_TARGET=11 or higher
+ # as "unexpected"; override the environment variable set by spack's
+ # platforms.darwin .
+ env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
+
+ # This is how we tell perl the locations of bzip and zlib.
+ env.set("BUILD_BZIP2", 0)
+ env.set("BZIP2_INCLUDE", spec["bzip2"].prefix.include)
+ env.set("BZIP2_LIB", spec["bzip2"].libs.directories[0])
+ env.set("BUILD_ZLIB", 0)
+ env.set("ZLIB_INCLUDE", spec["zlib"].prefix.include)
+ env.set("ZLIB_LIB", spec["zlib"].libs.directories[0])
+
+ def _setup_dependent_env(self, env, dependent_spec, deptypes):
+ """Set PATH and PERL5LIB to include the extension and
+ any other perl extensions it depends on,
+ assuming they were installed with INSTALL_BASE defined."""
+ perl_lib_dirs = []
+ for d in dependent_spec.traverse(deptype=deptype):
+ if d.package.extends(self.spec):
+ perl_lib_dirs.append(d.prefix.lib.perl5)
+ if perl_lib_dirs:
+ perl_lib_path = ":".join(perl_lib_dirs)
+ env.prepend_path("PERL5LIB", perl_lib_path)
+
+ def configure_args(self):
+ spec = self.spec
+ prefix = self.prefix
+
+ config_args = [
+ "-des",
+ "-Dprefix={0}".format(prefix),
+ "-Dlocincpth=" + self.spec["gdbm"].prefix.include,
+ "-Dloclibpth=" + self.spec["gdbm"].prefix.lib,
+ ]
+
+ # Extensions are installed into their private tree via
+ # `INSTALL_BASE`/`--install_base` (see [1]) which results in a
+ # "predictable" installation tree that sadly does not match the
+ # Perl core's @INC structure. This means that when activation
+ # merges the extension into the extendee[2], the directory tree
+ # containing the extensions is not on @INC and the extensions can
+ # not be found.
+ #
+ # This bit prepends @INC with the directory that is used when
+ # extensions are activated [3].
+ #
+ # [1] https://metacpan.org/pod/ExtUtils::MakeMaker#INSTALL_BASE
+ # [2] via the activate method in the PackageBase class
+ # [3] https://metacpan.org/pod/distribution/perl/INSTALL#APPLLIB_EXP
+ config_args.append('-Accflags=-DAPPLLIB_EXP=\\"' + self.prefix.lib.perl5 + '\\"')
+
+ # Discussion of -fPIC for Intel at:
+ # https://github.com/spack/spack/pull/3081 and
+ # https://github.com/spack/spack/pull/4416
+ if spec.satisfies("%intel"):
+ config_args.append("-Accflags={0}".format(self.compiler.cc_pic_flag))
+
+ if "+shared" in spec:
+ config_args.append("-Duseshrplib")
+
+ if "+threads" in spec:
+ config_args.append("-Dusethreads")
+
+ # Development versions have an odd second component
+ if spec.version[1] % 2 == 1:
+ config_args.append("-Dusedevel")
+
+ return config_args
+
+
+class NMakeBuilder(NMakeBuilder, RunAfter, Setup):
+ build_targets: List[str] = ["install"]
+
+ def setup_build_environment(self, env):
+ env.append_path("PATH", self.prefix.bin)
+
+ def _setup_dependent_env(self, env, dependent_spec, deptype):
+ """Set PATH and PERL5LIB to include the extension and
+ any other perl extensions it depends on,
+ assuming they were installed with INSTALL_BASE defined."""
+ perl_lib_dirs = []
+ for d in dependent_spec.traverse(deptype=deptype):
+ if d.package.extends(self.spec):
+ perl_lib_dirs.append(d.prefix.lib.perl5)
+ if perl_lib_dirs:
+ perl_lib_path = ":".join(perl_lib_dirs)
+ env.prepend_path("PERL5LIB", perl_lib_path)
+
+ env.append_path("PATH", self.prefix.bin)
@property
- def command(self):
- """Returns the Perl command, which may vary depending on the version
- of Perl. In general, Perl comes with a ``perl`` command. However,
- development releases have a ``perlX.Y.Z`` command.
+ def ignore_quotes(self):
+ return True
- Returns:
- Executable: the Perl command
- """
- for ver in ("", self.spec.version):
- ext = ""
- if is_windows:
- ext = ".exe"
- path = os.path.join(self.prefix.bin, "{0}{1}{2}".format(self.spec.name, ver, ext))
- if os.path.exists(path):
- return Executable(path)
+ @property
+ def build_directory(self):
+ return os.path.join(super(NMakeBuilder, self).build_directory, "win32")
+
+ def nmake_args(self):
+ args = []
+ if self.spec.satisfies("%msvc"):
+ args.append("CCTYPE=%s" % self.pkg.compiler.short_msvc_version)
else:
- msg = "Unable to locate {0} command in {1}"
- raise RuntimeError(msg.format(self.spec.name, self.prefix.bin))
+ raise RuntimeError("Perl unsupported for non MSVC compilers on Windows")
+ args.append("INST_TOP=%s" % self.prefix.replace("/", "\\"))
+ args.append("INST_ARCH=\\$(ARCHNAME)")
+ if self.spec.satisfies("~shared"):
+ args.append("ALL_STATIC=%s" % "define")
+ if self.spec.satisfies("~threads"):
+ args.extend(["USE_MULTI=undef", "USE_ITHREADS=undef", "USE_IMP_SYS=undef"])
+ if not self.is_64bit():
+ args.append("WIN64=undef")
+ return args
- def test(self):
- """Smoke tests"""
- exe = self.spec["perl"].command.name
+ def is_64bit(self):
+ return platform.machine().endswith("64")
- reason = "test: checking version is {0}".format(self.spec.version)
- self.run_test(
- exe, "--version", ["perl", str(self.spec.version)], installed=True, purpose=reason
- )
+ def install(self, pkg, spec, prefix):
+ """Perl's build command will install the project. The install target
+ runs the build job regardless of whether or not the project has already been built.
+ So rather than run the build twice, we install during build and
+ just skip the install phase."""
+ return
- reason = "test: ensuring perl runs"
- msg = "Hello, World!"
- options = ["-e", 'use warnings; use strict;\nprint("%s\n");' % msg]
- self.run_test(exe, options, msg, installed=True, purpose=reason)
+ @run_after("install")
+ def symlink_windows(self):
+ if not is_windows:
+ return
+ win_install_path = os.path.join(self.prefix.bin, "MSWin32")
+ if self.is_64bit():
+ win_install_path += "-x64"
+ else:
+ win_install_path += "-x86"
+ if self.spec.satisfies("+threads"):
+ win_install_path += "-multi-thread"
+ else:
+ win_install_path += "-perlio"
+
+ for f in os.listdir(os.path.join(self.prefix.bin, win_install_path)):
+ lnk_path = os.path.join(self.prefix.bin, f)
+ src_path = os.path.join(win_install_path, f)
+ if not os.path.exists(lnk_path):
+ symlink(src_path, lnk_path)
diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py
index 6e39dacf58ca5a..752676e1c933ad 100644
--- a/var/spack/repos/builtin/packages/proj/package.py
+++ b/var/spack/repos/builtin/packages/proj/package.py
@@ -3,10 +3,11 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack.build_systems import autotools, cmake
from spack.package import *
-class Proj(AutotoolsPackage):
+class Proj(CMakePackage):
"""PROJ is a generic coordinate transformation software, that transforms
geospatial coordinates from one coordinate reference system (CRS) to
another. This includes cartographic projections as well as geodetic
@@ -83,12 +84,45 @@ class Proj(AutotoolsPackage):
)
# https://proj.org/install.html#build-requirements
- depends_on("pkgconfig@0.9.0:", type="build", when="@6:")
depends_on("googletest", when="@6:")
depends_on("sqlite@3.11:", when="@6:")
depends_on("libtiff@4.0:", when="@7:+tiff")
depends_on("curl@7.29.0:", when="@7:+curl")
+ depends_on("pkgconfig@0.9.0:", type="build", when="@6: build_system=autotools")
+ depends_on("cmake@2.6.0:", type="build", when="build_system=cmake")
+ build_system("autotools", conditional("cmake", when="@5.0.0:"), default="cmake")
+
+ def setup_run_environment(self, env):
+ # PROJ_LIB doesn't need to be set. However, it may be set by conda.
+ # If an incompatible version of PROJ is found in PROJ_LIB, it can
+ # cause the package to fail at run-time. See the following for details:
+ # * https://proj.org/usage/environmentvars.html
+ # * https://rasterio.readthedocs.io/en/latest/faq.html
+ env.set("PROJ_LIB", self.prefix.share.proj)
+
+
+class Setup:
+ def setup_dependent_build_environment(self, env, dependent_spec):
+ self.pkg.setup_run_environment(env)
+
+ def setup_dependent_run_environment(self, env, dependent_spec):
+ self.pkg.setup_run_environment(env)
+
+
+class CMakeBuilder(cmake.CMakeBuilder, Setup):
+ def cmake_args(self):
+ args = [
+ self.define("PROJ_LIB", join_path(self.stage.source_path, "nad")),
+ self.define_from_variant("ENABLE_TIFF", "tiff"),
+ self.define_from_variant("ENABLE_CURL", "curl"),
+ ]
+ if self.spec.satisfies("@6:"):
+ args.append(self.define("USE_EXTERNAL_GTEST", True))
+ return args
+
+
+class AutotoolsBuilder(autotools.AutotoolsBuilder, Setup):
def configure_args(self):
args = ["PROJ_LIB={0}".format(join_path(self.stage.source_path, "nad"))]
@@ -107,17 +141,3 @@ def configure_args(self):
args.append("--without-curl")
return args
-
- def setup_run_environment(self, env):
- # PROJ_LIB doesn't need to be set. However, it may be set by conda.
- # If an incompatible version of PROJ is found in PROJ_LIB, it can
- # cause the package to fail at run-time. See the following for details:
- # * https://proj.org/usage/environmentvars.html
- # * https://rasterio.readthedocs.io/en/latest/faq.html
- env.set("PROJ_LIB", self.prefix.share.proj)
-
- def setup_dependent_build_environment(self, env, dependent_spec):
- self.setup_run_environment(env)
-
- def setup_dependent_run_environment(self, env, dependent_spec):
- self.setup_run_environment(env)
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index 419e5b8c7b4530..572a1718bf17f0 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -11,7 +11,6 @@
import subprocess
import sys
from shutil import copy
-from typing import Dict, List
import llnl.util.tty as tty
from llnl.util.filesystem import (
@@ -21,6 +20,8 @@
from llnl.util.lang import dedupe
from spack.build_environment import dso_suffix, stat_suffix
+from spack.build_systems.autotools import AutotoolsBuilder
+from spack.build_systems.generic import GenericBuilder
from spack.package import *
from spack.util.environment import is_system_path
from spack.util.prefix import Prefix
@@ -28,7 +29,7 @@
is_windows = sys.platform == "win32"
-class Python(Package):
+class Python(AutotoolsPackage, Package):
"""The Python programming language."""
homepage = "https://www.python.org/"
@@ -39,19 +40,10 @@ class Python(Package):
maintainers("adamjstewart", "skosukhin", "scheibelp", "pradyunsg")
- phases = ["configure", "build", "install"]
- #: phase
- install_targets = ["install"]
- build_targets: List[str] = []
version("3.11.1", sha256="baed518e26b337d4d8105679caf68c5c32630d702614fc174e98cb95c46bdfa4")
version("3.11.0", sha256="64424e96e2457abbac899b90f9530985b51eef2905951febd935f0e73414caeb")
- version(
- "3.10.8",
- sha256="f400c3fb394b8bef1292f6dc1292c5fadc3533039a5bc0c3e885f3e16738029a",
- preferred=True,
- )
version("3.10.7", sha256="1b2e4e2df697c52d36731666979e648beeda5941d0f95740aafbf4163e5cc126")
version("3.10.6", sha256="848cb06a5caa85da5c45bd7a9221bb821e33fc2bdcba088c127c58fad44e6343")
version("3.10.5", sha256="18f57182a2de3b0be76dfc39fdcfd28156bb6dd23e5f08696f7492e9e3d0bf2d")
@@ -227,6 +219,8 @@ class Python(Package):
)
conflicts("%nvhpc")
+ conflicts("~dbm", when="platform=windows")
+
# Used to cache various attributes that are expensive to compute
_config_vars: Dict[str, Dict[str, str]] = {}
@@ -235,6 +229,8 @@ class Python(Package):
executables = [r"^python[\d.]*[mw]?$"]
+ build_system(conditional("generic", when="platform=windows"), "autotools", default="autotools")
+
@classmethod
def determine_version(cls, exe):
# Newer versions of Python support `--version`,
@@ -324,30 +320,6 @@ def patch(self):
r"^(.*)setup\.py(.*)((build)|(install))(.*)$", r"\1setup.py\2 --no-user-cfg \3\6"
)
- def setup_build_environment(self, env):
- spec = self.spec
-
- # TODO: Python has incomplete support for Python modules with mixed
- # C/C++ source, and patches are required to enable building for these
- # modules. All Python versions without a viable patch are installed
- # with a warning message about this potentially erroneous behavior.
- if not spec.satisfies("@3.7.2:"):
- tty.warn(
- (
- 'Python v{0} does not have the C++ "distutils" patch; '
- "errors may occur when installing Python modules w/ "
- "mixed C/C++ source files."
- ).format(self.version)
- )
-
- env.unset("PYTHONPATH")
- env.unset("PYTHONHOME")
-
- # avoid build error on fugaku
- if spec.satisfies("@3.10.0 arch=linux-rhel8-a64fx"):
- if spec.satisfies("%gcc") or spec.satisfies("%fj"):
- env.unset("LC_ALL")
-
def flag_handler(self, name, flags):
# python 3.8 requires -fwrapv when compiled with intel
if self.spec.satisfies("@3.8: %intel"):
@@ -364,469 +336,159 @@ def flag_handler(self, name, flags):
# allow flags to be passed through compiler wrapper
return (flags, None, None)
- @property
- def plat_arch(self):
- """
- String referencing platform architecture
- filtered through Python's Windows build file
- architecture support map
-
- Note: This function really only makes
- sense to use on Windows, could be overridden to
- cross compile however.
- """
-
- arch_map = {"AMD64": "x64", "x86": "Win32", "IA64": "Win32", "EM64T": "Win32"}
- arch = platform.machine()
- if arch in arch_map:
- arch = arch_map[arch]
- return arch
+ # ========================================================================
+ # Set up environment to make install easy for python extensions.
+ # ========================================================================
@property
- def win_build_params(self):
- """
- Arguments must be passed to the Python build batch script
- in order to configure it to spec and system.
- A number of these toggle optional MSBuild Projects
- directly corresponding to the python support of the same
- name.
- """
- args = []
- args.append("-p %s" % self.plat_arch)
- if self.spec.satisfies("+debug"):
- args.append("-d")
- if self.spec.satisfies("~ctypes"):
- args.append("--no-ctypes")
- if self.spec.satisfies("~ssl"):
- args.append("--no-ssl")
- if self.spec.satisfies("~tkinter"):
- args.append("--no-tkinter")
- return args
+ def command(self):
+ """Returns the Python command, which may vary depending
+ on the version of Python and how it was installed.
- def win_installer(self, prefix):
- """
- Python on Windows does not export an install target
- so we must handcraft one here. This structure
- directly mimics the install tree of the Python
- Installer on Windows.
+ In general, Python 2 comes with ``python`` and ``python2`` commands,
+ while Python 3 only comes with a ``python3`` command. However, some
+ package managers will symlink ``python`` to ``python3``, while others
+ may contain ``python3.6``, ``python3.5``, and ``python3.4`` in the
+ same directory.
- Parameters:
- prefix (str): Install prefix for package
+ Returns:
+ Executable: the Python command
"""
- proj_root = self.stage.source_path
- pcbuild_root = os.path.join(proj_root, "PCbuild")
- build_root = os.path.join(pcbuild_root, platform.machine().lower())
- include_dir = os.path.join(proj_root, "Include")
- copy_tree(include_dir, prefix.include)
- doc_dir = os.path.join(proj_root, "Doc")
- copy_tree(doc_dir, prefix.Doc)
- tools_dir = os.path.join(proj_root, "Tools")
- copy_tree(tools_dir, prefix.Tools)
- lib_dir = os.path.join(proj_root, "Lib")
- copy_tree(lib_dir, prefix.Lib)
- pyconfig = os.path.join(proj_root, "PC", "pyconfig.h")
- copy(pyconfig, prefix.include)
- shared_libraries = []
- shared_libraries.extend(glob.glob("%s\\*.exe" % build_root))
- shared_libraries.extend(glob.glob("%s\\*.dll" % build_root))
- shared_libraries.extend(glob.glob("%s\\*.pyd" % build_root))
- os.makedirs(prefix.DLLs)
- for lib in shared_libraries:
- file_name = os.path.basename(lib)
- if (
- file_name.endswith(".exe")
- or (file_name.endswith(".dll") and "python" in file_name)
- or "vcruntime" in file_name
- ):
- copy(lib, prefix)
+ # We need to be careful here. If the user is using an externally
+ # installed python, several different commands could be located
+ # in the same directory. Be as specific as possible. Search for:
+ #
+ # * python3.6
+ # * python3
+ # * python
+ #
+ # in that order if using python@3.6.5, for example.
+ version = self.spec.version
+ for ver in [version.up_to(2), version.up_to(1), ""]:
+ if not is_windows:
+ path = os.path.join(self.prefix.bin, "python{0}".format(ver))
else:
- copy(lib, prefix.DLLs)
- static_libraries = glob.glob("%s\\*.lib")
- for lib in static_libraries:
- copy(lib, prefix.libs)
-
- def configure_args(self):
- spec = self.spec
- config_args = []
- cflags = []
-
- # setup.py needs to be able to read the CPPFLAGS and LDFLAGS
- # as it scans for the library and headers to build
- link_deps = spec.dependencies(deptype="link")
-
- if link_deps:
- # Header files are often included assuming they reside in a
- # subdirectory of prefix.include, e.g. #include ,
- # which is why we don't use HeaderList here. The header files of
- # libffi reside in prefix.lib but the configure script of Python
- # finds them using pkg-config.
- cppflags = " ".join("-I" + spec[dep.name].prefix.include for dep in link_deps)
-
- # Currently, the only way to get SpecBuildInterface wrappers of the
- # dependencies (which we need to get their 'libs') is to get them
- # using spec.__getitem__.
- ldflags = " ".join(spec[dep.name].libs.search_flags for dep in link_deps)
-
- config_args.extend(["CPPFLAGS=" + cppflags, "LDFLAGS=" + ldflags])
+ path = os.path.join(self.prefix, "python{0}.exe".format(ver))
+ if os.path.exists(path):
+ return Executable(path)
- if "+optimizations" in spec:
- config_args.append("--enable-optimizations")
- # Prefer thin LTO for faster compilation times.
- if "@3.11.0: %clang@3.9:" in spec or "@3.11.0: %apple-clang@8:" in spec:
- config_args.append("--with-lto=thin")
- else:
- config_args.append("--with-lto")
- config_args.append("--with-computed-gotos")
+ else:
+ msg = "Unable to locate {0} command in {1}"
+ raise RuntimeError(msg.format(self.name, self.prefix.bin))
- if spec.satisfies("@3.7 %intel", strict=True):
- config_args.append("--with-icc={0}".format(spack_cc))
+ def print_string(self, string):
+ """Returns the appropriate print string depending on the
+ version of Python.
- if "+debug" in spec:
- config_args.append("--with-pydebug")
- else:
- config_args.append("--without-pydebug")
+ Examples:
- if "+shared" in spec:
- config_args.append("--enable-shared")
- else:
- config_args.append("--disable-shared")
+ * Python 2
config_args.append("--without-ensurepip")
- if "+pic" in spec:
- cflags.append(self.compiler.cc_pic_flag)
+ * Python 3
if "+ssl" in spec:
config_args.append("--with-openssl={0}".format(spec["openssl"].prefix))
+ .. code-block:: python
- if "+dbm" in spec:
- # Default order is ndbm:gdbm:bdb
- config_args.append("--with-dbmliborder=gdbm")
- else:
- config_args.append("--with-dbmliborder=")
- if "+pyexpat" in spec:
- config_args.append("--with-system-expat")
+ >>> self.print_string('sys.prefix')
+ 'print(sys.prefix)'
+ """
+ if self.spec.satisfies("@:2"):
+ return "print {0}".format(string)
else:
- config_args.append("--without-system-expat")
+ return "print({0})".format(string)
- if "+ctypes" in spec:
- config_args.append("--with-system-ffi")
- else:
- config_args.append("--without-system-ffi")
+ @property
+ def config_vars(self):
+ """Return a set of variable definitions associated with a Python installation.
- if "+tkinter" in spec:
- config_args.extend(
- [
- "--with-tcltk-includes=-I{0} -I{1}".format(
- spec["tcl"].prefix.include, spec["tk"].prefix.include
- ),
- "--with-tcltk-libs={0} {1}".format(
- spec["tcl"].libs.ld_flags, spec["tk"].libs.ld_flags
- ),
- ]
- )
+ Wrapper around various ``sysconfig`` functions. To see these variables on the
+ command line, run:
+
+ .. code-block:: console
# https://docs.python.org/3.8/library/sqlite3.html#f1
if spec.satisfies("+sqlite3 ^sqlite+dynamic_extensions"):
config_args.append("--enable-loadable-sqlite-extensions")
- if spec.satisfies("%oneapi"):
- cflags.append("-fp-model=strict")
+ Returns:
+ dict: variable definitions
+ """
+ cmd = """
+import json
+from sysconfig import (
+ get_config_vars,
+ get_config_h_filename,
+ get_makefile_filename,
+ get_paths,
+)
- if cflags:
- config_args.append("CFLAGS={0}".format(" ".join(cflags)))
+config = get_config_vars()
+config['config_h_filename'] = get_config_h_filename()
+config['makefile_filename'] = get_makefile_filename()
+config.update(get_paths())
- return config_args
+%s
+""" % self.print_string(
+ "json.dumps(config)"
+ )
- def configure(self, spec, prefix):
- """Runs configure with the arguments specified in
- :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
- and an appropriately set prefix.
- """
- with working_dir(self.stage.source_path, create=True):
+ dag_hash = self.spec.dag_hash()
+ lib_prefix = "lib" if not is_windows else ""
+ if dag_hash not in self._config_vars:
+ # Default config vars
+ version = self.version.up_to(2)
if is_windows:
+ version = str(version).split(".")[0]
+ config = {
+ # get_config_vars
+ "BINDIR": self.prefix.bin,
+ "CC": "cc",
+ "CONFINCLUDEPY": self.prefix.include.join("python{}").format(version),
+ "CXX": "c++",
+ "INCLUDEPY": self.prefix.include.join("python{}").format(version),
+ "LIBDEST": self.prefix.lib.join("python{}").format(version),
+ "LIBDIR": self.prefix.lib,
+ "LDLIBRARY": "{}python{}.{}".format(lib_prefix, version, dso_suffix),
+ "LIBRARY": "{}python{}.{}".format(lib_prefix, version, stat_suffix),
+ "LDSHARED": "cc",
+ "LDCXXSHARED": "c++",
+ "PYTHONFRAMEWORKPREFIX": "/System/Library/Frameworks",
+ "base": self.prefix,
+ "installed_base": self.prefix,
+ "installed_platbase": self.prefix,
+ "platbase": self.prefix,
+ "prefix": self.prefix,
+ # get_config_h_filename
+ "config_h_filename": self.prefix.include.join("python{}")
+ .join("pyconfig.h")
+ .format(version),
+ # get_makefile_filename
+ "makefile_filename": self.prefix.lib.join("python{0}")
+ .join("config-{0}-{1}")
+ .Makefile.format(version, sys.platform),
+ # get_paths
+ "data": self.prefix,
+ "include": self.prefix.include.join("python{}".format(version)),
+ "platinclude": self.prefix.include64.join("python{}".format(version)),
+ "platlib": self.prefix.lib64.join("python{}".format(version)).join(
+ "site-packages"
+ ),
+ "platstdlib": self.prefix.lib64.join("python{}".format(version)),
+ "purelib": self.prefix.lib.join("python{}".format(version)).join("site-packages"),
+ "scripts": self.prefix.bin,
+ "stdlib": self.prefix.lib.join("python{}".format(version)),
+ }
+
+ try:
+ config.update(json.loads(self.command("-c", cmd, output=str)))
+ except (ProcessError, RuntimeError):
pass
- else:
- options = getattr(self, "configure_flag_args", [])
- options += ["--prefix={0}".format(prefix)]
- options += self.configure_args()
- configure(*options)
-
- def build(self, spec, prefix):
- """Makes the build targets specified by
- :py:attr:``~.AutotoolsPackage.build_targets``
- """
- # Windows builds use a batch script to drive
- # configure and build in one step
- with working_dir(self.stage.source_path):
- if is_windows:
- pcbuild_root = os.path.join(self.stage.source_path, "PCbuild")
- builder_cmd = os.path.join(pcbuild_root, "build.bat")
- try:
- subprocess.check_output( # novermin
- " ".join([builder_cmd] + self.win_build_params), stderr=subprocess.STDOUT
- )
- except subprocess.CalledProcessError as e:
- raise ProcessError(
- "Process exited with status %d" % e.returncode,
- long_message=e.output.decode("utf-8"),
- )
- else:
- # See https://autotools.io/automake/silent.html
- params = ["V=1"]
- params += self.build_targets
- make(*params)
-
- def install(self, spec, prefix):
- """Makes the install targets specified by
- :py:attr:``~.AutotoolsPackage.install_targets``
- """
- with working_dir(self.stage.source_path):
- if is_windows:
- self.win_installer(prefix)
- else:
- make(*self.install_targets)
-
- @run_after("install")
- def filter_compilers(self):
- """Run after install to tell the configuration files and Makefiles
- to use the compilers that Spack built the package with.
-
- If this isn't done, they'll have CC and CXX set to Spack's generic
- cc and c++. We want them to be bound to whatever compiler
- they were built with."""
- if is_windows:
- return
- kwargs = {"ignore_absent": True, "backup": False, "string": True}
-
- filenames = [self.get_sysconfigdata_name(), self.config_vars["makefile_filename"]]
-
- filter_file(spack_cc, self.compiler.cc, *filenames, **kwargs)
- if spack_cxx and self.compiler.cxx:
- filter_file(spack_cxx, self.compiler.cxx, *filenames, **kwargs)
-
- @run_after("install")
- def symlink(self):
- if is_windows:
- return
- spec = self.spec
- prefix = self.prefix
-
- if spec.satisfies("+pythoncmd"):
- os.symlink(os.path.join(prefix.bin, "python3"), os.path.join(prefix.bin, "python"))
- os.symlink(
- os.path.join(prefix.bin, "python3-config"),
- os.path.join(prefix.bin, "python-config"),
- )
-
- @run_after("install")
- def install_python_gdb(self):
- # https://devguide.python.org/gdb/
- src = os.path.join("Tools", "gdb", "libpython.py")
- if os.path.exists(src):
- install(src, self.command.path + "-gdb.py")
-
- @run_after("install")
- @on_package_attributes(run_tests=True)
- def import_tests(self):
- """Test that basic Python functionality works."""
-
- spec = self.spec
-
- with working_dir("spack-test", create=True):
- # Ensure that readline module works
- if "+readline" in spec:
- self.command("-c", "import readline")
-
- # Ensure that ssl module works
- if "+ssl" in spec:
- self.command("-c", "import ssl")
- self.command("-c", "import hashlib")
-
- # Ensure that sqlite3 module works
- if "+sqlite3" in spec:
- self.command("-c", "import sqlite3")
-
- # Ensure that dbm module works
- if "+dbm" in spec:
- self.command("-c", "import dbm")
-
- # Ensure that nis module works
- if "+nis" in spec:
- self.command("-c", "import nis")
-
- # Ensure that zlib module works
- if "+zlib" in spec:
- self.command("-c", "import zlib")
-
- # Ensure that bz2 module works
- if "+bz2" in spec:
- self.command("-c", "import bz2")
-
- # Ensure that lzma module works
- if "+lzma" in spec:
- self.command("-c", "import lzma")
-
- # Ensure that pyexpat module works
- if "+pyexpat" in spec:
- self.command("-c", "import xml.parsers.expat")
- self.command("-c", "import xml.etree.ElementTree")
-
- # Ensure that ctypes module works
- if "+ctypes" in spec:
- self.command("-c", "import ctypes")
-
- # Ensure that tkinter module works
- # https://wiki.python.org/moin/TkInter
- if "+tkinter" in spec:
- # Only works if ForwardX11Trusted is enabled, i.e. `ssh -Y`
- if "DISPLAY" in env:
- self.command("-c", "import tkinter; tkinter._test()")
- else:
- self.command("-c", "import tkinter")
-
- # Ensure that uuid module works
- if "+uuid" in spec:
- self.command("-c", "import uuid")
-
- # Ensure that tix module works
- if "+tix" in spec:
- self.command("-c", "import tkinter.tix")
-
- # Ensure that crypt module works
- if "+crypt" in spec:
- self.command("-c", "import crypt")
-
- # ========================================================================
- # Set up environment to make install easy for python extensions.
- # ========================================================================
-
- @property
- def command(self):
- """Returns the Python command, which may vary depending
- on the version of Python and how it was installed.
-
- In general, Python 3 only comes with a ``python3`` command. However, some
- package managers will symlink ``python`` to ``python3``, while others
- may contain ``python3.11``, ``python3.10``, and ``python3.9`` in the
- same directory.
-
- Returns:
- Executable: the Python command
- """
- # We need to be careful here. If the user is using an externally
- # installed python, several different commands could be located
- # in the same directory. Be as specific as possible. Search for:
- #
- # * python3.11
- # * python3
- # * python
- #
- # in that order if using python@3.11.0, for example.
- version = self.spec.version
- for ver in [version.up_to(2), version.up_to(1), ""]:
- if not is_windows:
- path = os.path.join(self.prefix.bin, "python{0}".format(ver))
- else:
- path = os.path.join(self.prefix, "python{0}.exe".format(ver))
- if os.path.exists(path):
- return Executable(path)
-
- else:
- # Give a last try at rhel8 platform python
- if self.spec.external and self.prefix == "/usr" and self.spec.satisfies("os=rhel8"):
- path = os.path.join(self.prefix, "libexec", "platform-python")
- if os.path.exists(path):
- return Executable(path)
-
- msg = "Unable to locate {0} command in {1}"
- raise RuntimeError(msg.format(self.name, self.prefix.bin))
-
- @property
- def config_vars(self):
- """Return a set of variable definitions associated with a Python installation.
-
- Wrapper around various ``sysconfig`` functions. To see these variables on the
- command line, run:
-
- .. code-block:: console
-
- $ python -m sysconfig
-
- Returns:
- dict: variable definitions
- """
- cmd = """
-import json
-from sysconfig import (
- get_config_vars,
- get_config_h_filename,
- get_makefile_filename,
- get_paths,
-)
-
-config = get_config_vars()
-config['config_h_filename'] = get_config_h_filename()
-config['makefile_filename'] = get_makefile_filename()
-config.update(get_paths())
-
-print(json.dumps(config))
-"""
-
- dag_hash = self.spec.dag_hash()
- lib_prefix = "lib" if not is_windows else ""
- if dag_hash not in self._config_vars:
- # Default config vars
- version = self.version.up_to(2)
- if is_windows:
- version = str(version).split(".")[0]
- config = {
- # get_config_vars
- "BINDIR": self.prefix.bin,
- "CC": "cc",
- "CONFINCLUDEPY": self.prefix.include.join("python{}").format(version),
- "CXX": "c++",
- "INCLUDEPY": self.prefix.include.join("python{}").format(version),
- "LIBDEST": self.prefix.lib.join("python{}").format(version),
- "LIBDIR": self.prefix.lib,
- "LDLIBRARY": "{}python{}.{}".format(lib_prefix, version, dso_suffix),
- "LIBRARY": "{}python{}.{}".format(lib_prefix, version, stat_suffix),
- "LDSHARED": "cc",
- "LDCXXSHARED": "c++",
- "PYTHONFRAMEWORKPREFIX": "/System/Library/Frameworks",
- "base": self.prefix,
- "installed_base": self.prefix,
- "installed_platbase": self.prefix,
- "platbase": self.prefix,
- "prefix": self.prefix,
- # get_config_h_filename
- "config_h_filename": self.prefix.include.join("python{}")
- .join("pyconfig.h")
- .format(version),
- # get_makefile_filename
- "makefile_filename": self.prefix.lib.join("python{0}")
- .join("config-{0}-{1}")
- .Makefile.format(version, sys.platform),
- # get_paths
- "data": self.prefix,
- "include": self.prefix.include.join("python{}".format(version)),
- "platinclude": self.prefix.include64.join("python{}".format(version)),
- "platlib": self.prefix.lib64.join("python{}".format(version)).join(
- "site-packages"
- ),
- "platstdlib": self.prefix.lib64.join("python{}".format(version)),
- "purelib": self.prefix.lib.join("python{}".format(version)).join("site-packages"),
- "scripts": self.prefix.bin,
- "stdlib": self.prefix.lib.join("python{}".format(version)),
- }
-
- try:
- config.update(json.loads(self.command("-c", cmd, output=str)))
- except (ProcessError, RuntimeError):
- pass
- self._config_vars[dag_hash] = config
- return self._config_vars[dag_hash]
+ self._config_vars[dag_hash] = config
+ return self._config_vars[dag_hash]
def get_sysconfigdata_name(self):
"""Return the full path name of the sysconfigdata file."""
@@ -1054,12 +716,11 @@ def setup_dependent_build_environment(self, env, dependent_spec):
extension and any other python extensions it depends on.
"""
# Ensure the current Python is first in the PATH
- path = os.path.dirname(self.command.path)
+ path = os.path.dirname(self.pkg.command.path)
if not is_system_path(path):
env.prepend_path("PATH", path)
# Add installation prefix to PYTHONPATH, needed to run import tests
- prefixes = set()
if dependent_spec.package.extends(self.spec):
prefixes.add(dependent_spec.prefix)
@@ -1078,7 +739,7 @@ def setup_dependent_build_environment(self, env, dependent_spec):
for prefix in prefixes:
# Packages may be installed in platform-specific or platform-independent
# site-packages directories
- for directory in {self.platlib, self.purelib}:
+ for directory in {self.pkg.platlib, self.pkg.purelib}:
env.prepend_path("PYTHONPATH", os.path.join(prefix, directory))
# We need to make sure that the extensions are compiled and linked with
@@ -1102,8 +763,8 @@ def setup_dependent_build_environment(self, env, dependent_spec):
for compile_var, link_var in [("CC", "LDSHARED"), ("CXX", "LDCXXSHARED")]:
# First, we get the values from the sysconfigdata:
- config_compile = self.config_vars[compile_var]
- config_link = self.config_vars[link_var]
+ config_compile = self.pkg.config_vars[compile_var]
+ config_link = self.pkg.config_vars[link_var]
# The dependent environment will have the compilation command set to
# the following:
@@ -1137,82 +798,349 @@ def setup_dependent_build_environment(self, env, dependent_spec):
if config_link != new_link and not is_windows:
env.set(link_var, new_link)
- def setup_dependent_run_environment(self, env, dependent_spec):
- """Set PYTHONPATH to include the site-packages directory for the
- extension and any other python extensions it depends on.
+
+class RunAfter(object):
+ @run_after("install")
+ def filter_compilers(self):
+ """Run after install to tell the configuration files and Makefiles
+ to use the compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC and CXX set to Spack's generic
+ cc and c++. We want them to be bound to whatever compiler
+ they were built with."""
+ if is_windows:
+ return
+ kwargs = {"ignore_absent": True, "backup": False, "string": True}
+
+ filenames = [self.pkg.get_sysconfigdata_name(), self.pkg.config_vars["makefile_filename"]]
+
+ filter_file(spack_cc, self.pkg.compiler.cc, *filenames, **kwargs)
+ if spack_cxx and self.pkg.compiler.cxx:
+ filter_file(spack_cxx, self.pkg.compiler.cxx, *filenames, **kwargs)
+
+ @run_after("install")
+ def symlink(self):
+ if is_windows:
+ return
+ spec = self.pkg.spec
+ prefix = self.pkg.prefix
+
+ # TODO:
+ # On OpenSuse 13, python uses /lib64/python2.7/lib-dynload/*.so
+ # instead of /lib/python2.7/lib-dynload/*.so. Oddly enough the
+ # result is that Python can not find modules like cPickle. A workaround
+ # for now is to symlink to `lib`:
+ src = os.path.join(prefix.lib64, "python{0}".format(self.pkg.version.up_to(2)), "lib-dynload")
+ dst = os.path.join(prefix.lib, "python{0}".format(self.pkg.version.up_to(2)), "lib-dynload")
+ if os.path.isdir(src) and not os.path.isdir(dst):
+ mkdirp(dst)
+ for f in os.listdir(src):
+ os.symlink(os.path.join(src, f), os.path.join(dst, f))
+
+ if spec.satisfies("@3:") and spec.satisfies("+pythoncmd"):
+ os.symlink(os.path.join(prefix.bin, "python3"), os.path.join(prefix.bin, "python"))
+ os.symlink(
+ os.path.join(prefix.bin, "python3-config"),
+ os.path.join(prefix.bin, "python-config"),
+ )
+
+ @run_after("install")
+ def install_python_gdb(self):
+ # https://devguide.python.org/gdb/
+ src = os.path.join("Tools", "gdb", "libpython.py")
+ if os.path.exists(src):
+ install(src, self.pkg.command.path + "-gdb.py")
+
+ @run_after("install")
+ @on_package_attributes(run_tests=True)
+ def import_tests(self):
+ """Test that basic Python functionality works."""
+
+ spec = self.pkg.spec
+
+ with working_dir("spack-test", create=True):
+ # Ensure that readline module works
+ if "+readline" in spec:
+ self.pkg.command("-c", "import readline")
+
+ # Ensure that ssl module works
+ if "+ssl" in spec:
+ self.pkg.command("-c", "import ssl")
+ self.pkg.command("-c", "import hashlib")
+
+ # Ensure that sqlite3 module works
+ if "+sqlite3" in spec:
+ self.pkg.command("-c", "import sqlite3")
+
+ # Ensure that dbm module works
+ if "+dbm" in spec:
+ self.pkg.command("-c", "import dbm")
+
+ # Ensure that nis module works
+ if "+nis" in spec:
+ self.pkg.command("-c", "import nis")
+
+ # Ensure that zlib module works
+ if "+zlib" in spec:
+ self.pkg.command("-c", "import zlib")
+
+ # Ensure that bz2 module works
+ if "+bz2" in spec:
+ self.pkg.command("-c", "import bz2")
+
+ # Ensure that lzma module works
+ if spec.satisfies("@3.3:"):
+ if "+lzma" in spec:
+ self.pkg.command("-c", "import lzma")
+
+ # Ensure that pyexpat module works
+ if "+pyexpat" in spec:
+ self.pkg.command("-c", "import xml.parsers.expat")
+ self.pkg.command("-c", "import xml.etree.ElementTree")
+
+ # Ensure that ctypes module works
+ if "+ctypes" in spec:
+ self.pkg.command("-c", "import ctypes")
+
+ # Ensure that tkinter module works
+ # https://wiki.python.org/moin/TkInter
+ if "+tkinter" in spec:
+ # Only works if ForwardX11Trusted is enabled, i.e. `ssh -Y`
+ if "DISPLAY" in env:
+ if spec.satisfies("@3:"):
+ self.pkg.command("-c", "import tkinter; tkinter._test()")
+ else:
+ self.pkg.command("-c", "import Tkinter; Tkinter._test()")
+ else:
+ if spec.satisfies("@3:"):
+ self.pkg.command("-c", "import tkinter")
+ else:
+ self.pkg.command("-c", "import Tkinter")
+
+ # Ensure that uuid module works
+ if "+uuid" in spec:
+ self.pkg.command("-c", "import uuid")
+
+ # Ensure that tix module works
+ if "+tix" in spec:
+ if spec.satisfies("@3:"):
+ self.pkg.command("-c", "import tkinter.tix")
+ else:
+ self.pkg.command("-c", "import Tix")
+
+
+class AutotoolsBuilder(AutotoolsBuilder, RunAfter):
+ def configure_args(self):
+ spec = self.spec
+ config_args = []
+ cflags = []
+
+ # setup.py needs to be able to read the CPPFLAGS and LDFLAGS
+ # as it scans for the library and headers to build
+ link_deps = spec.dependencies(deptype="link")
+
+ if link_deps:
+ # Header files are often included assuming they reside in a
+ # subdirectory of prefix.include, e.g. #include ,
+ # which is why we don't use HeaderList here. The header files of
+ # libffi reside in prefix.lib but the configure script of Python
+ # finds them using pkg-config.
+ cppflags = " ".join("-I" + spec[dep.name].prefix.include for dep in link_deps)
+
+ # Currently, the only way to get SpecBuildInterface wrappers of the
+ # dependencies (which we need to get their 'libs') is to get them
+ # using spec.__getitem__.
+ ldflags = " ".join(spec[dep.name].libs.search_flags for dep in link_deps)
+
+ config_args.extend(["CPPFLAGS=" + cppflags, "LDFLAGS=" + ldflags])
+
+ # https://docs.python.org/3/whatsnew/3.7.html#build-changes
+ if spec.satisfies("@:3.6"):
+ config_args.append("--with-threads")
+
+ if spec.satisfies("@2.7.13:2.8,3.5.3:", strict=True) and "+optimizations" in spec:
+ config_args.append("--enable-optimizations")
+ config_args.append("--with-lto")
+ config_args.append("--with-computed-gotos")
+
+ if spec.satisfies("%gcc platform=darwin"):
+ config_args.append("--disable-toolbox-glue")
+
+ if spec.satisfies("%intel", strict=True) and spec.satisfies(
+ "@2.7.12:2.8,3.5.2:3.7", strict=True
+ ):
+ config_args.append("--with-icc={0}".format(spack_cc))
+
+ if "+debug" in spec:
+ config_args.append("--with-pydebug")
+ else:
+ config_args.append("--without-pydebug")
+
+ if "+shared" in spec:
+ config_args.append("--enable-shared")
+ else:
+ config_args.append("--disable-shared")
+
+ if "+ucs4" in spec:
+ if spec.satisfies("@:2.7"):
+ config_args.append("--enable-unicode=ucs4")
+ elif spec.satisfies("@3.0:3.2"):
+ config_args.append("--with-wide-unicode")
+ elif spec.satisfies("@3.3:"):
+ # https://docs.python.org/3.3/whatsnew/3.3.html#functionality
+ raise ValueError("+ucs4 variant not compatible with Python 3.3 and beyond")
+
+ if spec.satisfies("@2.7.9:2,3.4:"):
+ config_args.append("--without-ensurepip")
+
+ if "+pic" in spec:
+ cflags.append(self.compiler.cc_pic_flag)
+
+ if "+ssl" in spec:
+ if spec.satisfies("@3.7:"):
+ config_args.append("--with-openssl={0}".format(spec["openssl"].prefix))
+
+ if "+dbm" in spec:
+ # Default order is ndbm:gdbm:bdb
+ config_args.append("--with-dbmliborder=gdbm")
+ else:
+ config_args.append("--with-dbmliborder=")
+
+ if "+pyexpat" in spec:
+ config_args.append("--with-system-expat")
+ else:
+ config_args.append("--without-system-expat")
+
+ if "+ctypes" in spec:
+ config_args.append("--with-system-ffi")
+ else:
+ config_args.append("--without-system-ffi")
+
+ if "+tkinter" in spec:
+ config_args.extend(
+ [
+ "--with-tcltk-includes=-I{0} -I{1}".format(
+ spec["tcl"].prefix.include, spec["tk"].prefix.include
+ ),
+ "--with-tcltk-libs={0} {1}".format(
+ spec["tcl"].libs.ld_flags, spec["tk"].libs.ld_flags
+ ),
+ ]
+ )
+
+ # https://docs.python.org/3.8/library/sqlite3.html#f1
+ if spec.satisfies("@3.2: +sqlite3 ^sqlite+dynamic_extensions"):
+ config_args.append("--enable-loadable-sqlite-extensions")
+
+ if spec.satisfies("%oneapi"):
+ cflags.append("-fp-model=strict")
+
+ if cflags:
+ config_args.append("CFLAGS={0}".format(" ".join(cflags)))
+
+ return config_args
+
+
+class GenericBuilder(GenericBuilder, RunAfter):
+ phases = ("build", "install")
+
+ @property
+ def plat_arch(self):
+ """
+ String referencing platform architecture
+ filtered through Python's Windows build file
+ architecture support map
+
+ Note: This function really only makes
+ sense to use on Windows, could be overridden to
+ cross compile however.
+ """
+
+ arch_map = {"AMD64": "x64", "x86": "Win32", "IA64": "Win32", "EM64T": "Win32"}
+ arch = platform.machine()
+ if arch in arch_map:
+ arch = arch_map[arch]
+ return arch
+
+ @property
+ def win_build_params(self):
"""
- for d in dependent_spec.traverse(deptype=("run"), root=True):
- if d.package.extends(self.spec):
- # Packages may be installed in platform-specific or platform-independent
- # site-packages directories
- for directory in {self.platlib, self.purelib}:
- env.prepend_path("PYTHONPATH", os.path.join(d.prefix, directory))
-
- def setup_dependent_package(self, module, dependent_spec):
- """Called before python modules' install() methods."""
-
- module.python = self.command
-
- module.python_include = join_path(dependent_spec.prefix, self.include)
- module.python_platlib = join_path(dependent_spec.prefix, self.platlib)
- module.python_purelib = join_path(dependent_spec.prefix, self.purelib)
-
- # Make the site packages directory for extensions
- if dependent_spec.package.is_extension:
- mkdirp(module.python_platlib)
- mkdirp(module.python_purelib)
-
- def add_files_to_view(self, view, merge_map, skip_if_exists=True):
- bin_dir = self.spec.prefix.bin if sys.platform != "win32" else self.spec.prefix
- for src, dst in merge_map.items():
- if not path_contains_subdirectory(src, bin_dir):
- view.link(src, dst, spec=self.spec)
- elif not os.path.islink(src):
- copy(src, dst)
- if is_nonsymlink_exe_with_shebang(src):
- filter_file(
- self.spec.prefix,
- os.path.abspath(view.get_projection_for_spec(self.spec)),
- dst,
- backup=False,
+ Arguments must be passed to the Python build batch script
+ in order to configure it to spec and system.
+ A number of these toggle optional MSBuild Projects
+ directly corresponding to the python support of the same
+ name.
+ """
+ args = []
+ args.append("-p %s" % self.plat_arch)
+ if self.spec.satisfies("+debug"):
+ args.append("-d")
+ if self.spec.satisfies("~ctypes"):
+ args.append("--no-ctypes")
+ if self.spec.satisfies("~ssl"):
+ args.append("--no-ssl")
+ if self.spec.satisfies("~tkinter"):
+ args.append("--no-tkinter")
+ return args
+
+ def build(self, pkg, spec, prefix):
+ """Makes the build targets specified by
+ :py:attr:``~.AutotoolsPackage.build_targets``
+ """
+ # Windows builds use a batch script to drive
+ # configure and build in one step
+ with working_dir(self.stage.source_path):
+ if is_windows:
+ pcbuild_root = os.path.join(self.stage.source_path, "PCbuild")
+ builder_cmd = os.path.join(pcbuild_root, "build.bat")
+ try:
+ subprocess.check_output( # novermin
+ " ".join([builder_cmd] + self.win_build_params), stderr=subprocess.STDOUT
)
+ except subprocess.CalledProcessError as e:
+ raise ProcessError(
+ "Process exited with status %d" % e.returncode,
+ long_message=e.output.decode("utf-8"),
+ )
+
+ def install(self, pkg, spec, prefix):
+ """
+ Python on Windows does not export an install target
+ so we must handcraft one here. This structure
+ directly mimics the install tree of the Python
+ Installer on Windows.
+
+ Parameters:
+ prefix (str): Install prefix for package
+ """
+ proj_root = self.stage.source_path
+ pcbuild_root = os.path.join(proj_root, "PCbuild")
+ build_root = os.path.join(pcbuild_root, platform.machine().lower())
+ include_dir = os.path.join(proj_root, "Include")
+ copy_tree(include_dir, prefix.include)
+ doc_dir = os.path.join(proj_root, "Doc")
+ copy_tree(doc_dir, prefix.Doc)
+ tools_dir = os.path.join(proj_root, "Tools")
+ copy_tree(tools_dir, prefix.Tools)
+ lib_dir = os.path.join(proj_root, "Lib")
+ copy_tree(lib_dir, prefix.Lib)
+ pyconfig = os.path.join(proj_root, "PC", "pyconfig.h")
+ copy(pyconfig, prefix.include)
+ shared_libraries = []
+ shared_libraries.extend(glob.glob("%s\\*.exe" % build_root))
+ shared_libraries.extend(glob.glob("%s\\*.dll" % build_root))
+ shared_libraries.extend(glob.glob("%s\\*.pyd" % build_root))
+ os.makedirs(prefix.DLLs)
+ for lib in shared_libraries:
+ file_name = os.path.basename(lib)
+ if (
+ file_name.endswith(".exe")
+ or (file_name.endswith(".dll") and "python" in file_name)
+ or "vcruntime" in file_name
+ ):
+ copy(lib, prefix)
else:
- # orig_link_target = os.path.realpath(src) is insufficient when
- # the spack install tree is located at a symlink or a
- # descendent of a symlink. What we need here is the real
- # relative path from the python prefix to src
- # TODO: generalize this logic in the link_tree object
- # add a method to resolve a link relative to the link_tree
- # object root.
- realpath_src = os.path.realpath(src)
- realpath_prefix = os.path.realpath(self.spec.prefix)
- realpath_rel = os.path.relpath(realpath_src, realpath_prefix)
- orig_link_target = os.path.join(self.spec.prefix, realpath_rel)
-
- new_link_target = os.path.abspath(merge_map[orig_link_target])
- view.link(new_link_target, dst, spec=self.spec)
-
- def remove_files_from_view(self, view, merge_map):
- bin_dir = self.spec.prefix.bin if not is_windows else self.spec.prefix
- for src, dst in merge_map.items():
- if not path_contains_subdirectory(src, bin_dir):
- view.remove_file(src, dst)
- else:
- os.remove(dst)
-
- def test(self):
- # do not use self.command because we are also testing the run env
- exe = self.spec["python"].command.name
-
- # test hello world
- msg = "hello world!"
- reason = "test: running {0}".format(msg)
- options = ["-c", 'print("{0}")'.format(msg)]
- self.run_test(exe, options=options, expected=[msg], installed=True, purpose=reason)
-
- # checks import works and executable comes from the spec prefix
- reason = "test: checking import and executable"
- options = ["-c", "import sys; print(sys.executable)"]
- self.run_test(
- exe, options=options, expected=[self.spec.prefix], installed=True, purpose=reason
- )
+ copy(lib, prefix.DLLs)
+ static_libraries = glob.glob("%s\\*.lib")
+ for lib in static_libraries:
+ copy(lib, prefix.libs)
diff --git a/var/spack/repos/builtin/packages/scons/package.py b/var/spack/repos/builtin/packages/scons/package.py
index 6f8e6174ef178a..d7affc860f97f3 100644
--- a/var/spack/repos/builtin/packages/scons/package.py
+++ b/var/spack/repos/builtin/packages/scons/package.py
@@ -61,5 +61,5 @@ def setup_dependent_run_environment(self, env, dependent_spec):
def setup_dependent_package(self, module, dspec):
if is_windows:
module.scons = Executable(self.spec.prefix.Scripts.scons)
- else:
+ else:
module.scons = Executable(self.spec.prefix.bin.scons)
diff --git a/var/spack/repos/builtin/packages/wgl/package.py b/var/spack/repos/builtin/packages/wgl/package.py
index bbad4bf987195e..32894141957225 100644
--- a/var/spack/repos/builtin/packages/wgl/package.py
+++ b/var/spack/repos/builtin/packages/wgl/package.py
@@ -34,6 +34,7 @@ class Wgl(Package):
version("10.0.14393")
version("10.0.10586")
version("10.0.26639")
+ version("10.0.20348")
# As per https://github.com/spack/spack/pull/31748 this provisory version represents
# an arbitrary openGL version designed for maximum compatibility with calling packages
@@ -48,13 +49,15 @@ class Wgl(Package):
# needed to use OpenGL are found in the SDK (GL/gl.h)
# Dep is needed to consolidate sdk version to locate header files for
# version of SDK being used
- depends_on("win-sdk@10.0.19041", when="@10.0.19041")
- depends_on("win-sdk@10.0.18362", when="@10.0.18362")
- depends_on("win-sdk@10.0.17763", when="@10.0.17763")
- depends_on("win-sdk@10.0.17134", when="@10.0.17134")
- depends_on("win-sdk@10.0.16299", when="@10.0.16299")
- depends_on("win-sdk@10.0.15063", when="@10.0.15063")
- depends_on("win-sdk@10.0.14393", when="@10.0.14393")
+ # Generic depends to capture handling for external versions
+ depends_on("win-sdk", type=("build", "run"))
+ depends_on("win-sdk@10.0.19041", when="@10.0.19041", type=("build", "run"))
+ depends_on("win-sdk@10.0.18362", when="@10.0.18362", type=("build", "run"))
+ depends_on("win-sdk@10.0.17763", when="@10.0.17763", type=("build", "run"))
+ depends_on("win-sdk@10.0.17134", when="@10.0.17134", type=("build", "run"))
+ depends_on("win-sdk@10.0.16299", when="@10.0.16299", type=("build", "run"))
+ depends_on("win-sdk@10.0.15063", when="@10.0.15063", type=("build", "run"))
+ depends_on("win-sdk@10.0.14393", when="@10.0.14393", type=("build", "run"))
# WGL has no meaning on other platforms, should not be able to spec
for plat in ["linux", "darwin", "cray"]:
@@ -80,11 +83,11 @@ def determine_variants(cls, libs, ver_str):
# As noted above, the headers neccesary to include
@property
def headers(self):
- return find_headers("GL/gl.h", root=self.spec["win-sdk"].prefix.includes, recursive=True)
+ return find_headers("GL", root=os.path.join(self.prefix.Include, str(self.version)+".0"), recursive=True)
@property
def libs(self):
- return find_libraries("opengl32", shared=False, root=self.prefix, recursive=True)
+ return find_libraries("opengl32", shared=False, root=os.path.join(self.prefix.Lib, str(self.version)+".0", "um", self.spec.variants["plat"].value), recursive=True)
def install(self, spec, prefix):
raise RuntimeError(
diff --git a/var/spack/repos/builtin/packages/winlibiconv/package.py b/var/spack/repos/builtin/packages/winlibiconv/package.py
new file mode 100644
index 00000000000000..7b4cb6b1d36ebb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/winlibiconv/package.py
@@ -0,0 +1,47 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import glob
+import os
+
+from spack.build_systems.generic import GenericBuilder
+from spack.package import *
+
+
+class Winlibiconv(Package, SourceforgePackage):
+ """Windows GNUWIN32 project port of GNU libiconv provides an
+ implementation of the iconv() function
+ and the iconv program for character set conversion."""
+
+ homepage = "https://gnuwin32.sourceforge.net/packages/libiconv.htm"
+ sourceforge_mirror_path = "gnuwin32/files/libiconv-1.8-src.zip"
+
+ version("1.8", sha256="1da58752373f8234744f246e38d8fdc1ad70c7593da5a229305fbae63b36f334")
+ version("1.7", sha256="cb548cba97e2d1f667d2ccd7b9929094d5b50aba26864b970551552afbf5743d")
+ version("1.6.1", sha256="69c63e2208af97c8795d3ffe81a384712f1d5fe6f2d33e59d9bbb291c8902752")
+ version("1.6", sha256="c2023bdd9225f9b794085645de0f717634ed411962981e87c83444cb2491af2c")
+
+ provides("iconv")
+
+ for plat in ["linux", "darwin", "cray"]:
+ conflicts(plat)
+
+
+class NMakeBuilder(GenericBuilder):
+ def build(self, pkg, spec, prefix):
+ file_root = glob.glob(os.path.join(self.stage.source_path, "src", "libiconv-*"))[0]
+ with working_dir(file_root):
+ nmake("-f", "%s\\Makefile.msvc" % file_root)
+
+ def install(self, pkg, spec, prefix):
+ file_root = glob.glob(os.path.join(self.stage.source_path, "src", "libiconv-*"))[0]
+ with working_dir(file_root):
+ file_root = glob.glob(os.path.join("src", "libiconv-*"))[0]
+ nmake(
+ "-f",
+ "%s\\Makefile.msvc" % file_root,
+ "install",
+ "PREFIX=%s" % prefix,
+ )
diff --git a/var/spack/repos/builtin/packages/xz/package.py b/var/spack/repos/builtin/packages/xz/package.py
index 5dd075a2b444ea..9a55a07b542b42 100644
--- a/var/spack/repos/builtin/packages/xz/package.py
+++ b/var/spack/repos/builtin/packages/xz/package.py
@@ -3,12 +3,18 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import glob
+import os
+import platform
import re
+import sys
+from spack.build_systems.autotools import AutotoolsBuilder
+from spack.build_systems.msbuild import MSBuildBuilder
from spack.package import *
-class Xz(AutotoolsPackage, SourceforgePackage):
+class Xz(MSBuildPackage, AutotoolsPackage, SourceforgePackage):
"""XZ Utils is free general-purpose data compression software with
high compression ratio. XZ Utils were written for POSIX-like systems,
but also work on some not-so-POSIX systems. XZ Utils are the successor
@@ -44,9 +50,11 @@ class Xz(AutotoolsPackage, SourceforgePackage):
# xz-5.2.7/src/liblzma/common/common.h:56 uses attribute __symver__ instead of
# __asm__(.symver) for newer GCC releases.
conflicts("%intel", when="@5.2.7", msg="icc does not support attribute __symver__")
+ conflicts("platform=windows", when="+pic") # no pic on Windows
+ # prior to 5.2.3, build system is for MinGW only, not currently supported by Spack
+ conflicts("platform=windows", when="@:5.2.3")
- def configure_args(self):
- return self.enable_or_disable("libs")
+ build_system(conditional("msbuild", when="platform=windows"), "autotools", default="autotools")
def flag_handler(self, name, flags):
if name == "cflags" and "+pic" in self.spec:
@@ -63,7 +71,66 @@ def determine_version(cls, exe):
match = re.search(r"xz \(XZ Utils\) (\S+)", output)
return match.group(1) if match else None
+
+class RunAfter(object):
@run_after("install")
def darwin_fix(self):
if self.spec.satisfies("platform=darwin"):
fix_darwin_install_name(self.prefix.lib)
+
+
+class AutotoolsBuilder(AutotoolsBuilder):
+ def configure_args(self):
+ return self.enable_or_disable("libs")
+
+
+class MSBuildBuilder(MSBuildBuilder):
+ @property
+ def build_directory(self):
+ def get_file_string_number(f):
+ s = re.findall(r"\d+$", f)
+ return (int(s[0]) if s else -1, f)
+ win_dir = os.path.join(super().build_directory, "windows")
+ compiler_dirs = []
+ with working_dir(win_dir):
+ for obj in os.scandir():
+ if obj.is_dir():
+ compiler_dirs.append(obj.name)
+ newest_compiler = max(compiler_dirs, key=get_file_string_number)
+ return os.path.join(win_dir, newest_compiler)
+
+ @property
+ def toolchain_version(self):
+ return "v" + self.pkg.compiler.platform_toolset_ver
+
+ def is_64bit(self):
+ return platform.machine().endswith("64")
+
+ def msbuild_args(self):
+ plat = "x64" if self.is_64bit() else "x86"
+ if self.pkg.spec.satisfies("libs=shared,static"):
+ f = "xz_win.sln"
+ elif self.pkg.spec.satisfies("libs=shared"):
+ f = "liblzma_dll.vcxproj"
+ else:
+ f = "liblzma.vcxproj"
+ return [self.define("Configuration", "Release"), self.define("Platform", plat), f]
+
+ def install(self, pkg, spec, prefix):
+ with working_dir(self.build_directory):
+ # Ensure we have libs directory
+ if not os.path.isdir(prefix.lib):
+ mkdirp(prefix.lib)
+ libs_to_find = []
+ if self.pkg.spec.satisfies("libs=shared,static"):
+ libs_to_find.extend(["*.dll", "*.lib"])
+ elif self.pkg.spec.satisfies("libs=shared"):
+ libs_to_find.append("*.dll")
+ else:
+ libs_to_find.append("*.lib")
+ for lib in libs_to_find:
+ libs_to_install = glob.glob(os.path.join(self.build_directory, "**", lib), recursive=True)
+ for l in libs_to_install:
+ install(l, prefix.lib)
+ with working_dir(pkg.stage.source_path):
+ install_tree(os.path.join("src", "liblzma", "api"), prefix.include)
diff --git a/var/spack/repos/builtin/packages/zstd/package.py b/var/spack/repos/builtin/packages/zstd/package.py
index d795ae80c9f94d..c1da951456559e 100644
--- a/var/spack/repos/builtin/packages/zstd/package.py
+++ b/var/spack/repos/builtin/packages/zstd/package.py
@@ -3,10 +3,13 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
+from spack.build_systems.cmake import CMakeBuilder
+from spack.build_systems.makefile import MakefileBuilder
from spack.package import *
-class Zstd(MakefilePackage):
+class Zstd(CMakePackage, MakefilePackage):
"""Zstandard, or zstd as short version, is a fast lossless compression
algorithm, targeting real-time compression scenarios at zlib-level and
better compression ratios."""
@@ -55,6 +58,28 @@ class Zstd(MakefilePackage):
# (last tested: nvhpc@22.3)
conflicts("+programs %nvhpc")
+ build_system("cmake", "makefile", default="cmake")
+
+
+class CMakeBuilder(CMakeBuilder):
+ @property
+ def root_cmakelists_dir(self):
+ return os.path.join(super().root_cmakelists_dir, "build", "cmake")
+
+ def cmake_args(self):
+ spec = self.spec
+ args = []
+ args.append(self.define_from_variant("ZSTD_BUILD_PROGRAMS", "programs"))
+ if "compression=zlib" in spec:
+ args.append(self.define("ZSTD_ZLIB_SUPPORT", True))
+ if "compression=lzma" in spec:
+ args.append(self.define("ZSTD_LZMA_SUPPORT", True))
+ if "compression=lz4" in spec:
+ args.append(self.define("ZSTD_LZ4_SUPPORT", True))
+ return args
+
+
+class MakefileBuilder(MakefileBuilder):
def build(self, spec, prefix):
pass
@@ -64,7 +89,6 @@ def install(self, spec, prefix):
# Tested %nvhpc@22.3. No support for -MP
if "%nvhpc" in self.spec:
args.append("DEPFLAGS=-MT $@ -MMD -MF")
-
# library targets
lib_args = ["-C", "lib"] + args + ["install-pc", "install-includes"]
if "libs=shared" in spec:
@@ -74,7 +98,6 @@ def install(self, spec, prefix):
# install the library
make(*lib_args)
-
# install the programs
if "+programs" in spec:
programs_args = ["-C", "programs"] + args
@@ -87,4 +110,4 @@ def install(self, spec, prefix):
if "compression=lz4" not in spec:
programs_args.append("HAVE_LZ4=0")
programs_args.append("install")
- make(*programs_args)
+ make(*programs_args)
\ No newline at end of file