From e76d8eb04655fa9da27b6450edeebf3480418049 Mon Sep 17 00:00:00 2001 From: Nekotekina Date: Thu, 18 Feb 2021 19:38:22 +0300 Subject: [PATCH] Fix -Wstring-aliasing (gcc) --- rpcs3/cmake_modules/ConfigureCompiler.cmake | 1 - rpcs3/util/atomic.hpp | 45 +++++++++++++++------ 2 files changed, 33 insertions(+), 13 deletions(-) diff --git a/rpcs3/cmake_modules/ConfigureCompiler.cmake b/rpcs3/cmake_modules/ConfigureCompiler.cmake index 814db9f9de..75c3f054bc 100644 --- a/rpcs3/cmake_modules/ConfigureCompiler.cmake +++ b/rpcs3/cmake_modules/ConfigureCompiler.cmake @@ -49,7 +49,6 @@ else() add_compile_options(-Wno-delete-non-virtual-dtor) add_compile_options(-Wno-unused-command-line-argument) elseif(CMAKE_COMPILER_IS_GNUCXX) - add_compile_options(-Wno-strict-aliasing) add_compile_options(-Wno-class-memaccess) endif() diff --git a/rpcs3/util/atomic.hpp b/rpcs3/util/atomic.hpp index 81034bb7b6..001e2577e7 100644 --- a/rpcs3/util/atomic.hpp +++ b/rpcs3/util/atomic.hpp @@ -348,29 +348,48 @@ struct atomic_storage static constexpr int s_hle_rel = __ATOMIC_SEQ_CST; #endif +// clang often thinks atomics are misaligned, GCC doesn't like reinterpret_cast for breaking strict aliasing +#ifdef __clang__ +#define MAYBE_CAST(...) (reinterpret_cast(__VA_ARGS__)) +#else +#define MAYBE_CAST(...) (__VA_ARGS__) +#endif + static inline bool compare_exchange(T& dest, T& comp, T exch) { - return __atomic_compare_exchange(reinterpret_cast(&dest), reinterpret_cast(&comp), reinterpret_cast(&exch), false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange(MAYBE_CAST(&dest), MAYBE_CAST(&comp), MAYBE_CAST(&exch), false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline bool compare_exchange_hle_acq(T& dest, T& comp, T exch) { static_assert(sizeof(T) == 4 || sizeof(T) == 8); - return __atomic_compare_exchange(reinterpret_cast(&dest), reinterpret_cast(&comp), reinterpret_cast(&exch), false, s_hle_ack, s_hle_ack); + return __atomic_compare_exchange(MAYBE_CAST(&dest), MAYBE_CAST(&comp), MAYBE_CAST(&exch), false, s_hle_ack, s_hle_ack); } static inline T load(const T& dest) { - T result; - __atomic_load(reinterpret_cast(&dest), reinterpret_cast(&result), __ATOMIC_SEQ_CST); +#ifdef __clang__ + type result; + __atomic_load(reinterpret_cast(&dest), MAYBE_CAST(&result), __ATOMIC_SEQ_CST); + return std::bit_cast(result); +#else + alignas(sizeof(T)) T result; + __atomic_load(&dest, &result, __ATOMIC_SEQ_CST); return result; +#endif } static inline T observe(const T& dest) { - T result; - __atomic_load(reinterpret_cast(&dest), reinterpret_cast(&result), __ATOMIC_RELAXED); +#ifdef __clang__ + type result; + __atomic_load(reinterpret_cast(&dest), MAYBE_CAST(&result), __ATOMIC_RELAXED); + return std::bit_cast(result); +#else + alignas(sizeof(T)) T result; + __atomic_load(&dest, &result, __ATOMIC_RELAXED); return result; +#endif } static inline void store(T& dest, T value) @@ -380,13 +399,13 @@ struct atomic_storage static inline void release(T& dest, T value) { - __atomic_store(reinterpret_cast(&dest), reinterpret_cast(&value), __ATOMIC_RELEASE); + __atomic_store(MAYBE_CAST(&dest), MAYBE_CAST(&value), __ATOMIC_RELEASE); } static inline T exchange(T& dest, T value) { - T result; - __atomic_exchange(reinterpret_cast(&dest), reinterpret_cast(&value), reinterpret_cast(&result), __ATOMIC_SEQ_CST); + alignas(sizeof(T)) T result; + __atomic_exchange(MAYBE_CAST(&dest), MAYBE_CAST(&value), MAYBE_CAST(&result), __ATOMIC_SEQ_CST); return result; } @@ -479,6 +498,7 @@ struct atomic_storage { return atomic_storage::fetch_xor(dest, value) ^ value; } +#undef MAYBE_CAST #endif /* Third part: fallbacks, may be hidden by subsequent atomic_storage<> specializations */ @@ -974,7 +994,7 @@ struct atomic_storage : atomic_storage #else static inline T load(const T& dest) { - T r; + alignas(16) T r; #ifdef __AVX__ __asm__ volatile("vmovdqa %1, %0;" : "=x" (r) : "m" (dest) : "memory"); #else @@ -1047,10 +1067,11 @@ struct atomic_storage : atomic_storage static inline void release(T& dest, T value) { + u128 val = std::bit_cast(value); #ifdef __AVX__ - __asm__ volatile("vmovdqa %0, %1;" :: "x" (reinterpret_cast(value)), "m" (dest) : "memory"); + __asm__ volatile("vmovdqa %0, %1;" :: "x" (val), "m" (dest) : "memory"); #else - __asm__ volatile("movdqa %0, %1;" :: "x" (reinterpret_cast(value)), "m" (dest) : "memory"); + __asm__ volatile("movdqa %0, %1;" :: "x" (val), "m" (dest) : "memory"); #endif } #endif