diff --git a/pkgs/development/compilers/llvm/16/llvm/default.nix b/pkgs/development/compilers/llvm/16/llvm/default.nix index 604ad63492db..63fe28277fd8 100644 --- a/pkgs/development/compilers/llvm/16/llvm/default.nix +++ b/pkgs/development/compilers/llvm/16/llvm/default.nix @@ -165,21 +165,19 @@ in # and thus fails under the sandbox: substituteInPlace unittests/TargetParser/Host.cpp \ --replace '/usr/bin/sw_vers' "${(builtins.toString darwin.DarwinTools) + "/bin/sw_vers" }" - '' + optionalString (stdenv.isDarwin && stdenv.hostPlatform.isx86) '' + # This test tries to call the intrinsics `@llvm.roundeven.f32` and # `@llvm.roundeven.f64` which seem to (incorrectly?) lower to `roundevenf` - # and `roundeven` on x86_64 macOS. + # and `roundeven` on macOS. # # However these functions are glibc specific so the test fails: # - https://www.gnu.org/software/gnulib/manual/html_node/roundevenf.html # - https://www.gnu.org/software/gnulib/manual/html_node/roundeven.html # - # TODO(@rrbutani): this seems to run fine on `aarch64-darwin`, why does it - # pass there? substituteInPlace test/ExecutionEngine/Interpreter/intrinsics.ll \ --replace "%roundeven32 = call float @llvm.roundeven.f32(float 0.000000e+00)" "" \ --replace "%roundeven64 = call double @llvm.roundeven.f64(double 0.000000e+00)" "" - + '' + optionalString (stdenv.isDarwin && stdenv.hostPlatform.isx86) '' # This test fails on darwin x86_64 because `sw_vers` reports a different # macOS version than what LLVM finds by reading # `/System/Library/CoreServices/SystemVersion.plist` (which is passed into