1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
5 #ifndef _RTE_SPINLOCK_X86_64_H_
6 #define _RTE_SPINLOCK_X86_64_H_
12 #include "generic/rte_spinlock.h"
14 #include "rte_cpuflags.h"
15 #include "rte_branch_prediction.h"
16 #include "rte_common.h"
17 #include "rte_pause.h"
19 #define RTE_RTM_MAX_RETRIES (10)
20 #define RTE_XABORT_LOCK_BUSY (0xff)
22 #ifndef RTE_FORCE_INTRINSICS
24 rte_spinlock_lock(rte_spinlock_t *sl)
29 "xchg %[locked], %[lv]\n"
34 "cmpl $0, %[locked]\n"
38 : [locked] "=m" (sl->locked), [lv] "=q" (lock_val)
44 rte_spinlock_unlock (rte_spinlock_t *sl)
48 "xchg %[locked], %[ulv]\n"
49 : [locked] "=m" (sl->locked), [ulv] "=q" (unlock_val)
50 : "[ulv]" (unlock_val)
55 rte_spinlock_trylock (rte_spinlock_t *sl)
60 "xchg %[locked], %[lockval]"
61 : [locked] "=m" (sl->locked), [lockval] "=q" (lockval)
62 : "[lockval]" (lockval)
69 extern uint8_t rte_rtm_supported;
71 static inline int rte_tm_supported(void)
73 return rte_rtm_supported;
77 rte_try_tm(volatile int *lock)
79 if (!rte_rtm_supported)
82 int retries = RTE_RTM_MAX_RETRIES;
84 while (likely(retries--)) {
86 unsigned int status = rte_xbegin();
88 if (likely(RTE_XBEGIN_STARTED == status)) {
90 rte_xabort(RTE_XABORT_LOCK_BUSY);
97 if ((status & RTE_XABORT_EXPLICIT) &&
98 (RTE_XABORT_CODE(status) == RTE_XABORT_LOCK_BUSY))
101 if ((status & RTE_XABORT_RETRY) == 0) /* do not retry */
108 rte_spinlock_lock_tm(rte_spinlock_t *sl)
110 if (likely(rte_try_tm(&sl->locked)))
113 rte_spinlock_lock(sl); /* fall-back */
117 rte_spinlock_trylock_tm(rte_spinlock_t *sl)
119 if (likely(rte_try_tm(&sl->locked)))
122 return rte_spinlock_trylock(sl);
126 rte_spinlock_unlock_tm(rte_spinlock_t *sl)
128 if (unlikely(sl->locked))
129 rte_spinlock_unlock(sl);
135 rte_spinlock_recursive_lock_tm(rte_spinlock_recursive_t *slr)
137 if (likely(rte_try_tm(&slr->sl.locked)))
140 rte_spinlock_recursive_lock(slr); /* fall-back */
144 rte_spinlock_recursive_unlock_tm(rte_spinlock_recursive_t *slr)
146 if (unlikely(slr->sl.locked))
147 rte_spinlock_recursive_unlock(slr);
153 rte_spinlock_recursive_trylock_tm(rte_spinlock_recursive_t *slr)
155 if (likely(rte_try_tm(&slr->sl.locked)))
158 return rte_spinlock_recursive_trylock(slr);
166 #endif /* _RTE_SPINLOCK_X86_64_H_ */