From 82b08188ba74290095ffafce1c659cedee6e3605 Mon Sep 17 00:00:00 2001 From: root Date: Wed, 18 Sep 2024 22:39:28 +0800 Subject: [PATCH] add sw8a patch --- boost-1.78-Add-sw64-8A-architecture.patch | 796 ++++++++++++++++++++++ boost.spec | 6 +- 2 files changed, 801 insertions(+), 1 deletion(-) create mode 100644 boost-1.78-Add-sw64-8A-architecture.patch diff --git a/boost-1.78-Add-sw64-8A-architecture.patch b/boost-1.78-Add-sw64-8A-architecture.patch new file mode 100644 index 0000000..a4e6bea --- /dev/null +++ b/boost-1.78-Add-sw64-8A-architecture.patch @@ -0,0 +1,796 @@ +diff -uNar boost_1_78_0.org/boost/atomic/detail/core_arch_ops_gcc_sw_64.hpp boost_1_78_0.sw/boost/atomic/detail/core_arch_ops_gcc_sw_64.hpp +--- boost_1_78_0.org/boost/atomic/detail/core_arch_ops_gcc_sw_64.hpp 2024-09-12 18:52:33.860583265 +0800 ++++ boost_1_78_0.sw/boost/atomic/detail/core_arch_ops_gcc_sw_64.hpp 2024-09-12 18:55:37.915586499 +0800 +@@ -115,18 +115,15 @@ + static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, tmp; +- storage_type tmp1, tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n\t" +- "ldi %2,%4\n\t" +- "ldi %3,1\n\t" +- "mov %5, %1\n\t" ++ "ldi %2,%3\n\t" ++ "mov %4, %1\n\t" + "lldw %0, 0(%2)\n\t" +- "wr_f %3\n\t" + "lstw %1, 0(%2)\n\t" +- "rd_f %1\n\t" + "beq %1, 2f\n\t" + + ".subsection 2\n\t" +@@ -134,11 +131,10 @@ + ".previous\n\t" + + : "=&r" (original), // %0 +- "=&r" (tmp), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp), // %1 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -158,11 +154,9 @@ + "ldi %4,%6\n\t" + "lldw %2, 0(%4)\n\t" // current = *(&storage) + "cmpeq %2, %0, %5\n\t" // success = current == expected +- "wr_f %5\n\t" // success = current == expected ++ "beq %5, 2f\n\t" // if (success == 0) goto end + "mov %2, %0\n\t" // expected = current + "lstw %1, 0(%4)\n\t" // storage = desired; desired = store succeeded +- "rd_f %1\n\t" // storage = desired; desired = store succeeded +- "beq %5, 2f\n\t" // if (success == 0) goto end + "mov %1, %3\n\t" // success = desired + "2:\n\t" + : "+r" (expected), // %0 +@@ -195,11 +189,9 @@ + "mov %7, %1\n\t" // tmp = desired + "lldw %2, 0(%4)\n\t" // current = *(&storage) + "cmpeq %2, %0, %5\n\t" // success = current == expected +- "wr_f %5\n\t" // success = current == expected ++ "beq %5, 2f\n\t" // if (success == 0) goto end + "mov %2, %0\n\t" // expected = current + "lstw %1, 0(%4)\n\t" // storage = tmp; tmp = store succeeded +- "rd_f %1\n\t" // storage = tmp; tmp = store succeeded +- "beq %5, 2f\n\t" // if (success == 0) goto end + "beq %1, 3f\n\t" // if (tmp == 0) goto retry + "mov %1, %3\n\t" // success = tmp + "2:\n\t" +@@ -228,18 +220,15 @@ + static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1, tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n\t" +- "ldi %2,%4\n\t" +- "ldi %3,1\n\t" ++ "ldi %2,%3\n\t" + "lldw %0, 0(%2)\n\t" +- "wr_f %3\n\t" +- "addw %0, %5, %1\n\t" ++ "addw %0, %4, %1\n\t" + "lstw %1, 0(%2)\n\t" +- "rd_f %1\n\t" + "beq %1, 2f\n\t" + + ".subsection 2\n\t" +@@ -247,11 +236,10 @@ + ".previous\n\t" + + : "=&r" (original), // %0 +- "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (modified), // %1 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -261,18 +249,15 @@ + static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1, tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n\t" +- "ldi %2,%4\n\t" +- "ldi %3,1\n\t" ++ "ldi %2,%3\n\t" + "lldw %0, 0(%2)\n\t" +- "wr_f %3\n\t" +- "subw %0, %5, %1\n\t" ++ "subw %0, %4, %1\n\t" + "lstw %1, 0(%2)\n\t" +- "rd_f %1\n\t" + "beq %1, 2f\n\t" + + ".subsection 2\n\t" +@@ -280,11 +265,10 @@ + ".previous\n\t" + + : "=&r" (original), // %0 +- "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (modified), // %1 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -294,18 +278,15 @@ + static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n\t" +- "ldi %2,%4\n\t" +- "ldi %3,1\n\t" ++ "ldi %2,%3\n\t" + "lldw %0, 0(%2)\n\t" +- "wr_f %3\n\t" +- "and %0, %5, %1\n\t" ++ "and %0, %4, %1\n\t" + "lstw %1, 0(%2)\n\t" +- "rd_f %1\n\t" + "beq %1, 2f\n\t" + + ".subsection 2\n\t" +@@ -313,11 +294,10 @@ + ".previous\n\t" + + : "=&r" (original), // %0 +- "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (modified), // %1 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -327,18 +307,15 @@ + static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %? \n" +- "bis %0, %5, %1\n" ++ "bis %0, %4, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -347,10 +324,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -360,18 +336,15 @@ + static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1, tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "xor %0, %5, %1\n" ++ "xor %0, %4, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -380,10 +353,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -412,19 +384,16 @@ + static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1, tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "addw %0, %5, %1\n" ++ "addw %0, %4, %1\n" + "zapnot %1, #1, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -433,10 +402,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -446,19 +414,16 @@ + static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1, tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "subw %0, %5, %1\n" ++ "subw %0, %4, %1\n" + "zapnot %1, #1, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -467,10 +432,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -488,19 +452,16 @@ + static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "addw %0, %5, %1\n" ++ "addw %0, %4, %1\n" + "sextb %1, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -509,10 +470,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -522,19 +482,16 @@ + static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "subw %0, %5, %1\n" ++ "subw %0, %4, %1\n" + "sextb %1, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -543,10 +500,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -565,19 +521,16 @@ + static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "addw %0, %5, %1\n" ++ "addw %0, %4, %1\n" + "zapnot %1, #3, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -586,10 +539,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -599,19 +551,16 @@ + static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "subw %0, %5, %1\n" ++ "subw %0, %4, %1\n" + "zapnot %1, #3, %1\n" + "lstw %1, %2\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -620,10 +569,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -641,18 +589,15 @@ + static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "addw %0, %5, %1\n" ++ "addw %0, %4, %1\n" + "sexth %1, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -661,10 +606,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -674,19 +618,16 @@ + static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + base_type::fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldw %0, 0(%2)\n" +- "wr_f %3 \n" +- "subw %0, %5, %1\n" ++ "subw %0, %4, %1\n" + "sexth %1, %1\n" + "lstw %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -695,10 +636,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + base_type::fence_after(order); +@@ -735,18 +675,15 @@ + static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, tmp; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" +- "mov %5, %1\n" ++ "ldi %2,%3\n" ++ "mov %4, %1\n" + "lldl %0, 0(%2)\n" +- "wr_f %3 \n" + "lstl %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -754,11 +691,10 @@ + ".previous\n\t" + + : "=&r" (original), // %0 +- "=&r" (tmp), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp), // %1 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -776,13 +712,11 @@ + ( + "1:\n" + "ldi %4,%6\n" +- "lldl %2, 0(%4)\n" // current = *(&storage) ++ "lldl %2, 0(%4)\n" // current = *(&storage) + "cmpeq %2, %0, %5\n" // success = current == expected +- "wr_f %5 \n" +- "mov %2, %0\n" // expected = current +- "lstl %1, 0(%4)\n" // storage = desired; desired = store succeeded +- "rd_f %1 \n" + "beq %5, 2f\n" // if (success == 0) goto end ++ "mov %2, %0\n" // expected = current ++ "lstl %1, 0(%4)\n" // storage = desired; desired = store succeeded + "mov %1, %3\n" // success = desired + "2:\n\t" + : "+r" (expected), // %0 +@@ -815,11 +749,9 @@ + "mov %7, %1\n" // tmp = desired + "lldl %2, 0(%4)\n" // current = *(&storage) + "cmpeq %2, %0, %5\n" // success = current == expected +- "wr_f %5 \n" ++ "beq %5, 2f\n" // if (success == 0) goto end + "mov %2, %0\n" // expected = current + "lstl %1, 0(%4)\n" // storage = tmp; tmp = store succeeded +- "rd_f %1 \n" +- "beq %5, 2f\n" // if (success == 0) goto end + "beq %1, 3f\n" // if (tmp == 0) goto retry + "mov %1, %3\n" // success = tmp + "2:\n\t" +@@ -848,18 +780,15 @@ + static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1, tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldl %0, 0(%2)\n" +- "wr_f %3 \n" +- "addl %0, %5, %1\n" ++ "addl %0, %4, %1\n" + "lstl %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -868,10 +797,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -881,18 +809,15 @@ + static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldl %0, 0(%2)\n" +- "wr_f %3 \n" +- "subl %0, %5, %1\n" ++ "subl %0, %4, %1\n" + "lstl %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -901,10 +826,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -914,18 +838,15 @@ + static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldl %0, 0(%2)\n" +- "wr_f %3 \n" +- "and %0, %5, %1\n" ++ "and %0, %4, %1\n" + "lstl %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -934,10 +855,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -947,18 +867,15 @@ + static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldl %0, 0(%2)\n" +- "wr_f %3 \n" +- "bis %0, %5, %1\n" ++ "bis %0, %4, %1\n" + "lstl %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -967,10 +884,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); +@@ -980,18 +896,15 @@ + static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT + { + storage_type original, modified; +- storage_type tmp1,tmp2; ++ storage_type tmp1; + fence_before(order); + __asm__ __volatile__ + ( + "1:\n" +- "ldi %2,%4\n" +- "ldi %3,1\n" ++ "ldi %2,%3\n" + "lldl %0, 0(%2)\n" +- "wr_f %3 \n" +- "xor %0, %5, %1\n" ++ "xor %0, %4, %1\n" + "lstl %1, 0(%2)\n" +- "rd_f %1 \n" + "beq %1, 2f\n" + + ".subsection 2\n\t" +@@ -1000,10 +913,9 @@ + + : "=&r" (original), // %0 + "=&r" (modified), // %1 +- "=&r" (tmp1), // %2 +- "=&r" (tmp2) // %3 +- : "m" (storage), // %4 +- "r" (v) // %5 ++ "=&r" (tmp1) // %2 ++ : "m" (storage), // %3 ++ "r" (v) // %4 + : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC + ); + fence_after(order); diff --git a/boost.spec b/boost.spec index 2ebf6a3..44a6d57 100644 --- a/boost.spec +++ b/boost.spec @@ -2,7 +2,7 @@ Name: boost Version: 1.78.0 -Release: 18 +Release: 19 Summary: The free peer-reviewed portable C++ source libraries License: Boost Software License 1.0 URL: http://www.boost.org @@ -31,6 +31,7 @@ Patch9001: boost_1_78_0-sw.patch Patch9002: boost-1.78-graph-Skip-performance-test.patch Patch9003: boost-1.78-icl-Avoid-gcc-bugs.patch Patch9004: boost-1.78-add-boost-context-support-for-loongarch64.patch +Patch9005: boost-1.78-Add-sw64-8A-architecture.patch Requires: %{name}-atomic%{?_isa} = %{version}-%{release} Requires: %{name}-chrono%{?_isa} = %{version}-%{release} @@ -465,6 +466,9 @@ find libs -name example -exec cp {} boost-example --parents -r \; %{_libdir}/*.a %changelog +* Fri Sep 13 2024 wuzx - 1.78.0-19 +- add sw64-8A support + * Tue Jun 18 2024 hefq343 - 1.78.0-18 - Init support for ppc64le -- Gitee