diff --git a/conda-recipe/build.sh b/conda-recipe/build.sh index 056ae06b013..3f055d86f61 100644 --- a/conda-recipe/build.sh +++ b/conda-recipe/build.sh @@ -15,9 +15,35 @@ # limitations under the License. #=============================================================================== -export TBBROOT=$PREFIX export DPL_ROOT=$PREFIX +# Use isolated TBBROOT staging so we don't create files under $PREFIX that may +# accidentally end up in output packages. +# conda-forge tbb-devel may expose only versioned SONAMEs (libtbb.so., +# libtbbmalloc.so., etc.) while oneDAL Make expects unversioned names in +# TBBROOT/lib prerequisites. Mirror the full libtbb* tree into a staging dir, +# adding unversioned symlinks where missing. +export TBBROOT="$SRC_DIR/__tbbroot" +mkdir -p "$TBBROOT/lib" + +# Symlink TBB headers (TBBROOT/include -> $PREFIX/include) +# Remove stale dir/symlink first so link target is exactly $TBBROOT/include. +rm -rf "$TBBROOT/include" +ln -s "$PREFIX/include" "$TBBROOT/include" + +# Mirror all libtbb* shared objects and create unversioned symlinks if absent +for versioned in "$PREFIX/lib"/libtbb*.so.*; do + [ -e "$versioned" ] || continue + libname=$(basename "$versioned") + # Link versioned file into staging dir + ln -sfn "$versioned" "$TBBROOT/lib/$libname" + # Derive unversioned name: libtbbmalloc.so.2.6 -> libtbbmalloc.so + unversioned="${libname%%\.so\.*}.so" + if [ ! -e "$TBBROOT/lib/$unversioned" ]; then + ln -sfn "$versioned" "$TBBROOT/lib/$unversioned" + fi +done + # default flags set by conda-build create problems with oneDAL build system unset CFLAGS LDFLAGS CXXFLAGS # CONDA_CXX_COMPILER is set by the conda recipe diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index f0b411ce828..38173389fad 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -34,8 +34,12 @@ build: number: {{ buildnumber }} string: h{{ git_hash }}_{{ buildnumber }}_{{ cxx_compiler }} include_recipe: False - ignore_run_exports_from: - - dpcpp_impl_linux-64 # [linux64 or win] + # Note: ignore_run_exports_from is NOT set globally here. + # It is set per-output: dal (CPU) ignores dpcpp_impl run_exports so it + # does not pull GPU/SYCL runtime. dal-gpu does NOT ignore them, so it + # automatically inherits the compatible intel-sycl-rt pin declared by + # dpcpp_impl_linux-64 via run_exports, keeping SYCL runtime in sync + # with the compiler used at build time. script_env: # All build targets are used by default and AVX2 only for CI. # Override REQCPU to reduce manual build time if needed: `REQCPU=avx512 conda build .` @@ -62,6 +66,11 @@ requirements: outputs: - name: dal + build: + # Suppress dpcpp_impl run_exports for the CPU-only package: + # dal must not pull GPU/SYCL runtime (intel-sycl-rt). + ignore_run_exports_from: + - dpcpp_impl_linux-64 # [linux64 or win] script: pack.sh # [linux] requirements: run: @@ -69,12 +78,35 @@ outputs: test: commands: - test -f $PREFIX/lib/libonedal_core.so.{{ major_binary_version }} # [linux] - - test -f $PREFIX/lib/libonedal_dpc.so.{{ major_binary_version }} # [linux] - test -f $PREFIX/lib/libonedal_thread.so.{{ major_binary_version }} # [linux] about: - summary: oneAPI Data Analytics Library (oneDAL) runtime libraries + summary: oneAPI Data Analytics Library (oneDAL) CPU runtime libraries + + - name: dal-gpu + build: + skip: true # [cxx_compiler != "dpcpp" or not linux64] + # Do NOT set ignore_run_exports_from here: dpcpp_impl_linux-64 declares + # run_exports with a compatible intel-sycl-rt pin. dal-gpu inherits it + # automatically — no manual pin needed. + script: pack.sh # [linux] + requirements: + run: + - {{ pin_subpackage('dal', exact=True) }} + # intel-sycl-rt is intentionally absent from the explicit run: list. + # It is injected automatically via run_exports from dpcpp_impl_linux-64, + # ensuring the SYCL runtime version matches the compiler used to build. + - mkl-dpcpp # [linux64] + test: + commands: + - test -f $PREFIX/lib/libonedal_dpc.so.{{ major_binary_version }} # [linux] + - test -f $PREFIX/lib/libonedal_parameters_dpc.so.{{ major_binary_version }} # [linux] + about: + summary: oneAPI Data Analytics Library (oneDAL) GPU/DPC++ runtime library - name: dal-include + build: + ignore_run_exports_from: + - dpcpp_impl_linux-64 # [linux64 or win] script: pack.sh # [linux] test: commands: @@ -83,6 +115,9 @@ outputs: summary: Headers for building against oneAPI Data Analytics Library (oneDAL) - name: dal-static + build: + ignore_run_exports_from: + - dpcpp_impl_linux-64 # [linux64 or win] script: pack.sh # [linux] requirements: run: @@ -95,6 +130,9 @@ outputs: summary: Static libraries for oneAPI Data Analytics Library (oneDAL) - name: dal-devel + build: + ignore_run_exports_from: + - dpcpp_impl_linux-64 # [linux64 or win] script: pack.sh # [linux] requirements: run: @@ -108,7 +146,8 @@ outputs: - dal-static =={{ version }} - mkl-devel - mkl-static - - mkl-devel-dpcpp # [ cxx_compiler == "dpcpp" ] + - mkl-devel-dpcpp # [cxx_compiler == "dpcpp" and linux64] + - {{ pin_subpackage('dal-gpu', exact=True) }} # [cxx_compiler == "dpcpp" and linux64] source_files: - examples - data diff --git a/conda-recipe/pack.sh b/conda-recipe/pack.sh index ac7b10a1d96..8d62791160e 100644 --- a/conda-recipe/pack.sh +++ b/conda-recipe/pack.sh @@ -24,9 +24,25 @@ if [ "$PKG_NAME" = "dal-devel" ]; then cp -r "env" "$PREFIX/" cp -r "lib/cmake" "$PREFIX/lib/" cp -r "lib/pkgconfig" "$PREFIX/lib/" + # Install example datasets under $CONDA_PREFIX/share/oneDAL/data for dal-devel tests. + # Prefer canonical source location ($SRC_DIR/data); keep release-layout fallbacks. + if [ -d "$SRC_DIR/data" ]; then + mkdir -p "$PREFIX/share/oneDAL/data" + cp -f "$SRC_DIR/data/"*.csv "$PREFIX/share/oneDAL/data/" 2>/dev/null || true + elif [ -d "$SRC_DIR/examples/oneapi/data" ]; then + mkdir -p "$PREFIX/share/oneDAL/data" + cp -f "$SRC_DIR/examples/oneapi/data/"*.csv "$PREFIX/share/oneDAL/data/" 2>/dev/null || true + elif [ -d data ]; then + mkdir -p "$PREFIX/share/oneDAL/data" + cp -f data/*.csv "$PREFIX/share/oneDAL/data/" 2>/dev/null || true + fi # set up links necessary for proper works of pkg-config, cmake and env. script mkdir -p "$PREFIX/lib/intel64" for lib in lib/intel64/libonedal*.so*; do + # Keep dal-devel CPU-oriented: do not create intel64 links for DPC runtime libs. + case "$lib" in + *"_dpc"*) continue ;; + esac if [ -f "$lib" ]; then libname=$(basename "$lib") ln -sf "../$libname" "$PREFIX/lib/intel64/$libname" @@ -40,9 +56,22 @@ if [ "$PKG_NAME" = "dal-include" ]; then mkdir -p "$PREFIX/include" cp -r include/* "$PREFIX/include/" fi -# copy libraries +# copy CPU runtime libraries (excludes all DPC++ runtime libs, moved to dal-gpu) if [ "$PKG_NAME" = "dal" ]; then - find lib/intel64 -name "libonedal*.so*" -exec cp -P {} "$PREFIX/lib/" \; + find lib/intel64 -name "libonedal*.so*" ! -name "libonedal*_dpc*" -exec cp -P {} "$PREFIX/lib/" \; +fi +# copy GPU/DPC++ runtime libraries +if [ "$PKG_NAME" = "dal-gpu" ]; then + find lib/intel64 -name "libonedal*_dpc*.so*" -exec cp -P {} "$PREFIX/lib/" \; + + # Keep CMake config lookup compatible: oneDALConfig.cmake searches lib/. + # Create lib/intel64 links to dal-gpu runtime libs (same layout as release tree). + mkdir -p "$PREFIX/lib/intel64" + for lib in "$PREFIX"/lib/libonedal*_dpc*.so*; do + [ -e "$lib" ] || continue + libname=$(basename "$lib") + ln -sf "../$libname" "$PREFIX/lib/intel64/$libname" + done fi if [ "$PKG_NAME" = "dal-static" ]; then find lib/intel64 -name "libonedal*.a" -exec cp {} "$PREFIX/lib/" \; diff --git a/conda-recipe/test-devel.sh b/conda-recipe/test-devel.sh old mode 100644 new mode 100755 index 67d16b0af82..929fd81a41f --- a/conda-recipe/test-devel.sh +++ b/conda-recipe/test-devel.sh @@ -1,4 +1,5 @@ #!/usr/bin/env sh +set -eu #=============================================================================== # Copyright contributors to the oneDAL project # @@ -18,13 +19,14 @@ test -f $CONDA_PREFIX/lib/pkgconfig/dal-dynamic-threading-host.pc test -f $CONDA_PREFIX/lib/pkgconfig/dal-static-threading-host.pc -source $CONDA_PREFIX/env/vars.sh +. "$CONDA_PREFIX/env/vars.sh" run_examples() { - local interface_name=$1 - local linking_type=$2 + interface_name=$1 + linking_type=$2 + extra_cmake_args=${3:-""} - if [ "$linking_type" == "dynamic" ]; then + if [ "$linking_type" = "dynamic" ]; then library_postfix="so" else library_postfix="a" @@ -32,7 +34,16 @@ run_examples() { ( cd examples/$interface_name/cpp - mkdir build_$linking_type + # Ensure oneapi example datasets are reachable for get_data_path(). + # Preferred layout is ../data relative to this directory. + if [ ! -d ../data ] && [ -d "$CONDA_PREFIX/share/oneDAL/data" ]; then + ln -sf "$CONDA_PREFIX/share/oneDAL/data" ../data + fi + if [ -d ../data ]; then + rm -rf data && ln -s ../data data + fi + rm -rf build_$linking_type + mkdir -p build_$linking_type ( cd build_$linking_type @@ -41,25 +52,125 @@ run_examples() { # Note: MKL cmake config is required for static build only and # doesn't work with conda-forge distribution of tbb-devel. # Thus, tbb is set to "found" manually. - if [ "$linking_type" == "static" ]; then + if [ "$linking_type" = "static" ]; then cmake_args="$cmake_args -DTBB_tbb_FOUND=YES" fi + cmake_args="$cmake_args $extra_cmake_args" + # shellcheck disable=SC2086 cmake .. $cmake_args make -j$(nproc) ) - for example in _cmake_results/intel_intel64_$library_postfix/*; do + examples_pattern="_cmake_results/intel_intel64_$library_postfix/*" + set -- $examples_pattern + if [ "$1" = "$examples_pattern" ]; then + echo "ERROR: no built examples found for $interface_name-$linking_type" + return 1 + fi + + for example in "$@"; do + echo "================" + echo "Running example: $interface_name-$linking_type-$(basename "$example")" + echo "================" + "$example" + done + ) +} + +run_dpc_examples() { + # DPC++ examples are validated only in dynamic mode. + # We do not produce static DPC++ artifacts in conda packaging. + linking_type="dynamic" + library_postfix="so" + + ( + cd examples/oneapi/dpc + # Keep data path layout consistent with oneapi/cpp examples. + if [ ! -d ../data ] && [ -d "$CONDA_PREFIX/share/oneDAL/data" ]; then + ln -sf "$CONDA_PREFIX/share/oneDAL/data" ../data + fi + if [ -d ../data ]; then + rm -rf data && ln -s ../data data + fi + rm -rf build_$linking_type + mkdir -p build_$linking_type + + ( + cd build_$linking_type + + cmake_args="-DONEDAL_LINK=$linking_type" + # Note: MKL cmake config (incl. SYCL variant) requires TBB to be findable at configure time. + # conda-forge tbb-devel does not ship TBBConfig.cmake, so we set TBB as found manually + # (same approach as for the static CPU build above). + cmake_args="$cmake_args -DTBB_tbb_FOUND=YES" + + # shellcheck disable=SC2086 + cmake .. $cmake_args + make -j$(nproc) + ) + + examples_pattern="_cmake_results/intel_intel64_$library_postfix/*" + set -- $examples_pattern + if [ "$1" = "$examples_pattern" ]; then + echo "ERROR: no built oneapi/dpc examples found" + return 1 + fi + + for example in "$@"; do echo "================" - echo "Running example: $interface_name-$linking_type-$(basename $example)" + echo "Running example: oneapi-dpc-$linking_type-$(basename "$example")" echo "================" - $example + "$example" done ) } +# ============================================================ +# CPU-only tests: oneapi/cpp + daal (no GPU lib required) +# These must pass even without dal-gpu installed. +# ============================================================ +echo "========================================" +echo "Running CPU examples: oneapi/cpp dynamic" +echo "========================================" run_examples oneapi dynamic + +echo "========================================" +echo "Running CPU examples: oneapi/cpp static" +echo "========================================" run_examples oneapi static + +echo "========================================" +echo "Running CPU examples: daal/cpp dynamic" +echo "========================================" run_examples daal dynamic + +echo "========================================" +echo "Running CPU examples: daal/cpp static" +echo "========================================" run_examples daal static + +# ============================================================ +# GPU/DPC++ tests: oneapi/dpc (requires dal-gpu / libonedal_dpc.so) +# Skipped if GPU library is not installed. +# ============================================================ +has_dpc=$(find "$CONDA_PREFIX/lib" -maxdepth 1 -name "libonedal_dpc.so*" | head -1) +has_dpc_params=$(find "$CONDA_PREFIX/lib" -maxdepth 1 -name "libonedal_parameters_dpc.so*" | head -1) +if [ -n "$has_dpc" ] && [ -n "$has_dpc_params" ]; then + # Workaround: MKL cmake config requires unversioned libtbb.so for tbb_thread threading. + # conda-forge tbb-devel may only provide versioned soname (libtbb.so.). + if [ ! -f "$CONDA_PREFIX/lib/libtbb.so" ]; then + tbb_so=$(find "$CONDA_PREFIX/lib" -maxdepth 1 -name "libtbb.so.*" | head -1) + [ -n "$tbb_so" ] && ln -sf "$(basename "$tbb_so")" "$CONDA_PREFIX/lib/libtbb.so" + fi + echo "========================================" + echo "Running GPU/DPC++ examples: oneapi/dpc dynamic" + echo "========================================" + run_dpc_examples +else + echo "========================================" + echo "Skipping GPU/DPC++ examples: dal-gpu is not fully installed" + echo "========================================" +fi + # TODO: add testing for samples