New upstream version 17.11.3
[deb_dpdk.git] / drivers / net / sfc / efsys.h
1 /*-
2  *   BSD LICENSE
3  *
4  * Copyright (c) 2016-2017 Solarflare Communications Inc.
5  * All rights reserved.
6  *
7  * This software was jointly developed between OKTET Labs (under contract
8  * for Solarflare) and Solarflare Communications, Inc.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions are met:
12  *
13  * 1. Redistributions of source code must retain the above copyright notice,
14  *    this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright notice,
16  *    this list of conditions and the following disclaimer in the documentation
17  *    and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
26  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
27  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
28  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
29  * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30  */
31
32 #ifndef _SFC_COMMON_EFSYS_H
33 #define _SFC_COMMON_EFSYS_H
34
35 #include <stdbool.h>
36
37 #include <rte_spinlock.h>
38 #include <rte_byteorder.h>
39 #include <rte_debug.h>
40 #include <rte_memzone.h>
41 #include <rte_memory.h>
42 #include <rte_memcpy.h>
43 #include <rte_cycles.h>
44 #include <rte_prefetch.h>
45 #include <rte_common.h>
46 #include <rte_malloc.h>
47 #include <rte_log.h>
48 #include <rte_io.h>
49
50 #include "sfc_debug.h"
51
52 #ifdef __cplusplus
53 extern "C" {
54 #endif
55
56 #define EFSYS_HAS_UINT64 1
57 #define EFSYS_USE_UINT64 1
58 #define EFSYS_HAS_SSE2_M128 1
59
60 #if RTE_BYTE_ORDER == RTE_BIG_ENDIAN
61 #define EFSYS_IS_BIG_ENDIAN 1
62 #define EFSYS_IS_LITTLE_ENDIAN 0
63 #elif RTE_BYTE_ORDER == RTE_LITTLE_ENDIAN
64 #define EFSYS_IS_BIG_ENDIAN 0
65 #define EFSYS_IS_LITTLE_ENDIAN 1
66 #else
67 #error "Cannot determine system endianness"
68 #endif
69 #include "efx_types.h"
70
71
72 #ifndef _NOTE
73 #define _NOTE(s)
74 #endif
75
76 typedef bool boolean_t;
77
78 #ifndef B_FALSE
79 #define B_FALSE false
80 #endif
81 #ifndef B_TRUE
82 #define B_TRUE  true
83 #endif
84
85 /*
86  * RTE_MAX() and RTE_MIN() cannot be used since braced-group within
87  * expression allowed only inside a function, but MAX() is used as
88  * a number of elements in array.
89  */
90 #ifndef MAX
91 #define MAX(v1, v2)     ((v1) > (v2) ? (v1) : (v2))
92 #endif
93 #ifndef MIN
94 #define MIN(v1, v2)     ((v1) < (v2) ? (v1) : (v2))
95 #endif
96
97 /* There are macros for alignment in DPDK, but we need to make a proper
98  * correspondence here, if we want to re-use them at all
99  */
100 #ifndef IS_P2ALIGNED
101 #define IS_P2ALIGNED(v, a)      ((((uintptr_t)(v)) & ((uintptr_t)(a) - 1)) == 0)
102 #endif
103
104 #ifndef P2ROUNDUP
105 #define P2ROUNDUP(x, align)     (-(-(x) & -(align)))
106 #endif
107
108 #ifndef P2ALIGN
109 #define P2ALIGN(_x, _a)         ((_x) & -(_a))
110 #endif
111
112 #ifndef IS2P
113 #define ISP2(x)                 rte_is_power_of_2(x)
114 #endif
115
116 #define ENOTACTIVE      ENOTCONN
117
118 static inline void
119 prefetch_read_many(const volatile void *addr)
120 {
121         rte_prefetch0(addr);
122 }
123
124 static inline void
125 prefetch_read_once(const volatile void *addr)
126 {
127         rte_prefetch_non_temporal(addr);
128 }
129
130 /* Modifiers used for Windows builds */
131 #define __in
132 #define __in_opt
133 #define __in_ecount(_n)
134 #define __in_ecount_opt(_n)
135 #define __in_bcount(_n)
136 #define __in_bcount_opt(_n)
137
138 #define __out
139 #define __out_opt
140 #define __out_ecount(_n)
141 #define __out_ecount_opt(_n)
142 #define __out_bcount(_n)
143 #define __out_bcount_opt(_n)
144 #define __out_bcount_part(_n, _l)
145 #define __out_bcount_part_opt(_n, _l)
146
147 #define __deref_out
148
149 #define __inout
150 #define __inout_opt
151 #define __inout_ecount(_n)
152 #define __inout_ecount_opt(_n)
153 #define __inout_bcount(_n)
154 #define __inout_bcount_opt(_n)
155 #define __inout_bcount_full_opt(_n)
156
157 #define __deref_out_bcount_opt(n)
158
159 #define __checkReturn
160 #define __success(_x)
161
162 #define __drv_when(_p, _c)
163
164 /* Code inclusion options */
165
166
167 #define EFSYS_OPT_NAMES 1
168
169 /* Disable SFN5xxx/SFN6xxx since it requires specific support in the PMD */
170 #define EFSYS_OPT_SIENA 0
171 /* Enable SFN7xxx support */
172 #define EFSYS_OPT_HUNTINGTON 1
173 /* Enable SFN8xxx support */
174 #define EFSYS_OPT_MEDFORD 1
175 #ifdef RTE_LIBRTE_SFC_EFX_DEBUG
176 #define EFSYS_OPT_CHECK_REG 1
177 #else
178 #define EFSYS_OPT_CHECK_REG 0
179 #endif
180
181 /* MCDI is required for SFN7xxx and SFN8xx */
182 #define EFSYS_OPT_MCDI 1
183 #define EFSYS_OPT_MCDI_LOGGING 1
184 #define EFSYS_OPT_MCDI_PROXY_AUTH 1
185
186 #define EFSYS_OPT_MAC_STATS 1
187
188 #define EFSYS_OPT_LOOPBACK 0
189
190 #define EFSYS_OPT_MON_MCDI 0
191 #define EFSYS_OPT_MON_STATS 0
192
193 #define EFSYS_OPT_PHY_STATS 0
194 #define EFSYS_OPT_BIST 0
195 #define EFSYS_OPT_PHY_LED_CONTROL 0
196 #define EFSYS_OPT_PHY_FLAGS 0
197
198 #define EFSYS_OPT_VPD 0
199 #define EFSYS_OPT_NVRAM 0
200 #define EFSYS_OPT_BOOTCFG 0
201
202 #define EFSYS_OPT_DIAG 0
203 #define EFSYS_OPT_RX_SCALE 1
204 #define EFSYS_OPT_QSTATS 0
205 /* Filters support is required for SFN7xxx and SFN8xx */
206 #define EFSYS_OPT_FILTER 1
207 #define EFSYS_OPT_RX_SCATTER 0
208
209 #define EFSYS_OPT_EV_PREFETCH 0
210
211 #define EFSYS_OPT_DECODE_INTR_FATAL 0
212
213 #define EFSYS_OPT_LICENSING 0
214
215 #define EFSYS_OPT_ALLOW_UNCONFIGURED_NIC 0
216
217 #define EFSYS_OPT_RX_PACKED_STREAM 0
218
219 /* ID */
220
221 typedef struct __efsys_identifier_s efsys_identifier_t;
222
223
224 #define EFSYS_PROBE(_name)                                              \
225         do { } while (0)
226
227 #define EFSYS_PROBE1(_name, _type1, _arg1)                              \
228         do { } while (0)
229
230 #define EFSYS_PROBE2(_name, _type1, _arg1, _type2, _arg2)               \
231         do { } while (0)
232
233 #define EFSYS_PROBE3(_name, _type1, _arg1, _type2, _arg2,               \
234                      _type3, _arg3)                                     \
235         do { } while (0)
236
237 #define EFSYS_PROBE4(_name, _type1, _arg1, _type2, _arg2,               \
238                      _type3, _arg3, _type4, _arg4)                      \
239         do { } while (0)
240
241 #define EFSYS_PROBE5(_name, _type1, _arg1, _type2, _arg2,               \
242                      _type3, _arg3, _type4, _arg4, _type5, _arg5)       \
243         do { } while (0)
244
245 #define EFSYS_PROBE6(_name, _type1, _arg1, _type2, _arg2,               \
246                      _type3, _arg3, _type4, _arg4, _type5, _arg5,       \
247                      _type6, _arg6)                                     \
248         do { } while (0)
249
250 #define EFSYS_PROBE7(_name, _type1, _arg1, _type2, _arg2,               \
251                      _type3, _arg3, _type4, _arg4, _type5, _arg5,       \
252                      _type6, _arg6, _type7, _arg7)                      \
253         do { } while (0)
254
255
256 /* DMA */
257
258 typedef rte_iova_t efsys_dma_addr_t;
259
260 typedef struct efsys_mem_s {
261         const struct rte_memzone        *esm_mz;
262         /*
263          * Ideally it should have volatile qualifier to denote that
264          * the memory may be updated by someone else. However, it adds
265          * qualifier discard warnings when the pointer or its derivative
266          * is passed to memset() or rte_mov16().
267          * So, skip the qualifier here, but make sure that it is added
268          * below in access macros.
269          */
270         void                            *esm_base;
271         efsys_dma_addr_t                esm_addr;
272 } efsys_mem_t;
273
274
275 #define EFSYS_MEM_ZERO(_esmp, _size)                                    \
276         do {                                                            \
277                 (void)memset((void *)(_esmp)->esm_base, 0, (_size));    \
278                                                                         \
279                 _NOTE(CONSTANTCONDITION);                               \
280         } while (B_FALSE)
281
282 #define EFSYS_MEM_READD(_esmp, _offset, _edp)                           \
283         do {                                                            \
284                 volatile uint8_t  *_base = (_esmp)->esm_base;           \
285                 volatile uint32_t *_addr;                               \
286                                                                         \
287                 _NOTE(CONSTANTCONDITION);                               \
288                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
289                                                                         \
290                 _addr = (volatile uint32_t *)(_base + (_offset));       \
291                 (_edp)->ed_u32[0] = _addr[0];                           \
292                                                                         \
293                 EFSYS_PROBE2(mem_readl, unsigned int, (_offset),        \
294                                          uint32_t, (_edp)->ed_u32[0]);  \
295                                                                         \
296                 _NOTE(CONSTANTCONDITION);                               \
297         } while (B_FALSE)
298
299 #define EFSYS_MEM_READQ(_esmp, _offset, _eqp)                           \
300         do {                                                            \
301                 volatile uint8_t  *_base = (_esmp)->esm_base;           \
302                 volatile uint64_t *_addr;                               \
303                                                                         \
304                 _NOTE(CONSTANTCONDITION);                               \
305                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
306                                                                         \
307                 _addr = (volatile uint64_t *)(_base + (_offset));       \
308                 (_eqp)->eq_u64[0] = _addr[0];                           \
309                                                                         \
310                 EFSYS_PROBE3(mem_readq, unsigned int, (_offset),        \
311                                          uint32_t, (_eqp)->eq_u32[1],   \
312                                          uint32_t, (_eqp)->eq_u32[0]);  \
313                                                                         \
314                 _NOTE(CONSTANTCONDITION);                               \
315         } while (B_FALSE)
316
317 #define EFSYS_MEM_READO(_esmp, _offset, _eop)                           \
318         do {                                                            \
319                 volatile uint8_t *_base = (_esmp)->esm_base;            \
320                 volatile __m128i *_addr;                                \
321                                                                         \
322                 _NOTE(CONSTANTCONDITION);                               \
323                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
324                                                                         \
325                 _addr = (volatile __m128i *)(_base + (_offset));        \
326                 (_eop)->eo_u128[0] = _addr[0];                          \
327                                                                         \
328                 EFSYS_PROBE5(mem_reado, unsigned int, (_offset),        \
329                                          uint32_t, (_eop)->eo_u32[3],   \
330                                          uint32_t, (_eop)->eo_u32[2],   \
331                                          uint32_t, (_eop)->eo_u32[1],   \
332                                          uint32_t, (_eop)->eo_u32[0]);  \
333                                                                         \
334                 _NOTE(CONSTANTCONDITION);                               \
335         } while (B_FALSE)
336
337
338 #define EFSYS_MEM_WRITED(_esmp, _offset, _edp)                          \
339         do {                                                            \
340                 volatile uint8_t  *_base = (_esmp)->esm_base;           \
341                 volatile uint32_t *_addr;                               \
342                                                                         \
343                 _NOTE(CONSTANTCONDITION);                               \
344                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
345                                                                         \
346                 EFSYS_PROBE2(mem_writed, unsigned int, (_offset),       \
347                                          uint32_t, (_edp)->ed_u32[0]);  \
348                                                                         \
349                 _addr = (volatile uint32_t *)(_base + (_offset));       \
350                 _addr[0] = (_edp)->ed_u32[0];                           \
351                                                                         \
352                 _NOTE(CONSTANTCONDITION);                               \
353         } while (B_FALSE)
354
355 #define EFSYS_MEM_WRITEQ(_esmp, _offset, _eqp)                          \
356         do {                                                            \
357                 volatile uint8_t  *_base = (_esmp)->esm_base;           \
358                 volatile uint64_t *_addr;                               \
359                                                                         \
360                 _NOTE(CONSTANTCONDITION);                               \
361                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
362                                                                         \
363                 EFSYS_PROBE3(mem_writeq, unsigned int, (_offset),       \
364                                          uint32_t, (_eqp)->eq_u32[1],   \
365                                          uint32_t, (_eqp)->eq_u32[0]);  \
366                                                                         \
367                 _addr = (volatile uint64_t *)(_base + (_offset));       \
368                 _addr[0] = (_eqp)->eq_u64[0];                           \
369                                                                         \
370                 _NOTE(CONSTANTCONDITION);                               \
371         } while (B_FALSE)
372
373 #define EFSYS_MEM_WRITEO(_esmp, _offset, _eop)                          \
374         do {                                                            \
375                 volatile uint8_t *_base = (_esmp)->esm_base;            \
376                 volatile __m128i *_addr;                                \
377                                                                         \
378                 _NOTE(CONSTANTCONDITION);                               \
379                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
380                                                                         \
381                                                                         \
382                 EFSYS_PROBE5(mem_writeo, unsigned int, (_offset),       \
383                                          uint32_t, (_eop)->eo_u32[3],   \
384                                          uint32_t, (_eop)->eo_u32[2],   \
385                                          uint32_t, (_eop)->eo_u32[1],   \
386                                          uint32_t, (_eop)->eo_u32[0]);  \
387                                                                         \
388                 _addr = (volatile __m128i *)(_base + (_offset));        \
389                 _addr[0] = (_eop)->eo_u128[0];                          \
390                                                                         \
391                 _NOTE(CONSTANTCONDITION);                               \
392         } while (B_FALSE)
393
394
395 #define EFSYS_MEM_ADDR(_esmp)                                           \
396         ((_esmp)->esm_addr)
397
398 #define EFSYS_MEM_IS_NULL(_esmp)                                        \
399         ((_esmp)->esm_base == NULL)
400
401 #define EFSYS_MEM_PREFETCH(_esmp, _offset)                              \
402         do {                                                            \
403                 volatile uint8_t *_base = (_esmp)->esm_base;            \
404                                                                         \
405                 rte_prefetch0(_base + (_offset));                       \
406         } while (0)
407
408
409 /* BAR */
410
411 typedef struct efsys_bar_s {
412         rte_spinlock_t          esb_lock;
413         int                     esb_rid;
414         struct rte_pci_device   *esb_dev;
415         /*
416          * Ideally it should have volatile qualifier to denote that
417          * the memory may be updated by someone else. However, it adds
418          * qualifier discard warnings when the pointer or its derivative
419          * is passed to memset() or rte_mov16().
420          * So, skip the qualifier here, but make sure that it is added
421          * below in access macros.
422          */
423         void                    *esb_base;
424 } efsys_bar_t;
425
426 #define SFC_BAR_LOCK_INIT(_esbp, _ifname)                               \
427         do {                                                            \
428                 rte_spinlock_init(&(_esbp)->esb_lock);                  \
429                 _NOTE(CONSTANTCONDITION);                               \
430         } while (B_FALSE)
431 #define SFC_BAR_LOCK_DESTROY(_esbp)     ((void)0)
432 #define SFC_BAR_LOCK(_esbp)             rte_spinlock_lock(&(_esbp)->esb_lock)
433 #define SFC_BAR_UNLOCK(_esbp)           rte_spinlock_unlock(&(_esbp)->esb_lock)
434
435 #define EFSYS_BAR_READD(_esbp, _offset, _edp, _lock)                    \
436         do {                                                            \
437                 volatile uint8_t  *_base = (_esbp)->esb_base;           \
438                 volatile uint32_t *_addr;                               \
439                                                                         \
440                 _NOTE(CONSTANTCONDITION);                               \
441                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
442                 _NOTE(CONSTANTCONDITION);                               \
443                 if (_lock)                                              \
444                         SFC_BAR_LOCK(_esbp);                            \
445                                                                         \
446                 _addr = (volatile uint32_t *)(_base + (_offset));       \
447                 rte_rmb();                                              \
448                 (_edp)->ed_u32[0] = rte_read32_relaxed(_addr);          \
449                                                                         \
450                 EFSYS_PROBE2(bar_readd, unsigned int, (_offset),        \
451                                          uint32_t, (_edp)->ed_u32[0]);  \
452                                                                         \
453                 _NOTE(CONSTANTCONDITION);                               \
454                 if (_lock)                                              \
455                         SFC_BAR_UNLOCK(_esbp);                          \
456                 _NOTE(CONSTANTCONDITION);                               \
457         } while (B_FALSE)
458
459 #define EFSYS_BAR_READQ(_esbp, _offset, _eqp)                           \
460         do {                                                            \
461                 volatile uint8_t  *_base = (_esbp)->esb_base;           \
462                 volatile uint64_t *_addr;                               \
463                                                                         \
464                 _NOTE(CONSTANTCONDITION);                               \
465                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
466                                                                         \
467                 SFC_BAR_LOCK(_esbp);                                    \
468                                                                         \
469                 _addr = (volatile uint64_t *)(_base + (_offset));       \
470                 rte_rmb();                                              \
471                 (_eqp)->eq_u64[0] = rte_read64_relaxed(_addr);          \
472                                                                         \
473                 EFSYS_PROBE3(bar_readq, unsigned int, (_offset),        \
474                                          uint32_t, (_eqp)->eq_u32[1],   \
475                                          uint32_t, (_eqp)->eq_u32[0]);  \
476                                                                         \
477                 SFC_BAR_UNLOCK(_esbp);                                  \
478                 _NOTE(CONSTANTCONDITION);                               \
479         } while (B_FALSE)
480
481 #define EFSYS_BAR_READO(_esbp, _offset, _eop, _lock)                    \
482         do {                                                            \
483                 volatile uint8_t *_base = (_esbp)->esb_base;            \
484                 volatile __m128i *_addr;                                \
485                                                                         \
486                 _NOTE(CONSTANTCONDITION);                               \
487                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
488                                                                         \
489                 _NOTE(CONSTANTCONDITION);                               \
490                 if (_lock)                                              \
491                         SFC_BAR_LOCK(_esbp);                            \
492                                                                         \
493                 _addr = (volatile __m128i *)(_base + (_offset));        \
494                 rte_rmb();                                              \
495                 /* There is no rte_read128_relaxed() yet */             \
496                 (_eop)->eo_u128[0] = _addr[0];                          \
497                                                                         \
498                 EFSYS_PROBE5(bar_reado, unsigned int, (_offset),        \
499                                          uint32_t, (_eop)->eo_u32[3],   \
500                                          uint32_t, (_eop)->eo_u32[2],   \
501                                          uint32_t, (_eop)->eo_u32[1],   \
502                                          uint32_t, (_eop)->eo_u32[0]);  \
503                                                                         \
504                 _NOTE(CONSTANTCONDITION);                               \
505                 if (_lock)                                              \
506                         SFC_BAR_UNLOCK(_esbp);                          \
507                 _NOTE(CONSTANTCONDITION);                               \
508         } while (B_FALSE)
509
510
511 #define EFSYS_BAR_WRITED(_esbp, _offset, _edp, _lock)                   \
512         do {                                                            \
513                 volatile uint8_t  *_base = (_esbp)->esb_base;           \
514                 volatile uint32_t *_addr;                               \
515                                                                         \
516                 _NOTE(CONSTANTCONDITION);                               \
517                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
518                                                                         \
519                 _NOTE(CONSTANTCONDITION);                               \
520                 if (_lock)                                              \
521                         SFC_BAR_LOCK(_esbp);                            \
522                                                                         \
523                 EFSYS_PROBE2(bar_writed, unsigned int, (_offset),       \
524                                          uint32_t, (_edp)->ed_u32[0]);  \
525                                                                         \
526                 _addr = (volatile uint32_t *)(_base + (_offset));       \
527                 rte_write32_relaxed((_edp)->ed_u32[0], _addr);          \
528                 rte_wmb();                                              \
529                                                                         \
530                 _NOTE(CONSTANTCONDITION);                               \
531                 if (_lock)                                              \
532                         SFC_BAR_UNLOCK(_esbp);                          \
533                 _NOTE(CONSTANTCONDITION);                               \
534         } while (B_FALSE)
535
536 #define EFSYS_BAR_WRITEQ(_esbp, _offset, _eqp)                          \
537         do {                                                            \
538                 volatile uint8_t  *_base = (_esbp)->esb_base;           \
539                 volatile uint64_t *_addr;                               \
540                                                                         \
541                 _NOTE(CONSTANTCONDITION);                               \
542                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
543                                                                         \
544                 SFC_BAR_LOCK(_esbp);                                    \
545                                                                         \
546                 EFSYS_PROBE3(bar_writeq, unsigned int, (_offset),       \
547                                          uint32_t, (_eqp)->eq_u32[1],   \
548                                          uint32_t, (_eqp)->eq_u32[0]);  \
549                                                                         \
550                 _addr = (volatile uint64_t *)(_base + (_offset));       \
551                 rte_write64_relaxed((_eqp)->eq_u64[0], _addr);          \
552                 rte_wmb();                                              \
553                                                                         \
554                 SFC_BAR_UNLOCK(_esbp);                                  \
555                 _NOTE(CONSTANTCONDITION);                               \
556         } while (B_FALSE)
557
558 /*
559  * Guarantees 64bit aligned 64bit writes to write combined BAR mapping
560  * (required by PIO hardware).
561  *
562  * Neither VFIO, nor UIO, nor NIC UIO (on FreeBSD) support
563  * write-combined memory mapped to user-land, so just abort if used.
564  */
565 #define EFSYS_BAR_WC_WRITEQ(_esbp, _offset, _eqp)                       \
566         do {                                                            \
567                 rte_panic("Write-combined BAR access not supported");   \
568         } while (B_FALSE)
569
570 #define EFSYS_BAR_WRITEO(_esbp, _offset, _eop, _lock)                   \
571         do {                                                            \
572                 volatile uint8_t *_base = (_esbp)->esb_base;            \
573                 volatile __m128i *_addr;                                \
574                                                                         \
575                 _NOTE(CONSTANTCONDITION);                               \
576                 SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
577                                                                         \
578                 _NOTE(CONSTANTCONDITION);                               \
579                 if (_lock)                                              \
580                         SFC_BAR_LOCK(_esbp);                            \
581                                                                         \
582                 EFSYS_PROBE5(bar_writeo, unsigned int, (_offset),       \
583                                          uint32_t, (_eop)->eo_u32[3],   \
584                                          uint32_t, (_eop)->eo_u32[2],   \
585                                          uint32_t, (_eop)->eo_u32[1],   \
586                                          uint32_t, (_eop)->eo_u32[0]);  \
587                                                                         \
588                 _addr = (volatile __m128i *)(_base + (_offset));        \
589                 /* There is no rte_write128_relaxed() yet */            \
590                 _addr[0] = (_eop)->eo_u128[0];                          \
591                 rte_wmb();                                              \
592                                                                         \
593                 _NOTE(CONSTANTCONDITION);                               \
594                 if (_lock)                                              \
595                         SFC_BAR_UNLOCK(_esbp);                          \
596                 _NOTE(CONSTANTCONDITION);                               \
597         } while (B_FALSE)
598
599 /* Use the standard octo-word write for doorbell writes */
600 #define EFSYS_BAR_DOORBELL_WRITEO(_esbp, _offset, _eop)                 \
601         do {                                                            \
602                 EFSYS_BAR_WRITEO((_esbp), (_offset), (_eop), B_FALSE);  \
603                 _NOTE(CONSTANTCONDITION);                               \
604         } while (B_FALSE)
605
606 /* SPIN */
607
608 #define EFSYS_SPIN(_us)                                                 \
609         do {                                                            \
610                 rte_delay_us(_us);                                      \
611                 _NOTE(CONSTANTCONDITION);                               \
612         } while (B_FALSE)
613
614 #define EFSYS_SLEEP EFSYS_SPIN
615
616 /* BARRIERS */
617
618 #define EFSYS_MEM_READ_BARRIER()        rte_rmb()
619 #define EFSYS_PIO_WRITE_BARRIER()       rte_io_wmb()
620
621 /* DMA SYNC */
622
623 /*
624  * DPDK does not provide any DMA syncing API, and no PMD drivers
625  * have any traces of explicit DMA syncing.
626  * DMA mapping is assumed to be coherent.
627  */
628
629 #define EFSYS_DMA_SYNC_FOR_KERNEL(_esmp, _offset, _size)        ((void)0)
630
631 /* Just avoid store and compiler (impliciltly) reordering */
632 #define EFSYS_DMA_SYNC_FOR_DEVICE(_esmp, _offset, _size)        rte_wmb()
633
634 /* TIMESTAMP */
635
636 typedef uint64_t efsys_timestamp_t;
637
638 #define EFSYS_TIMESTAMP(_usp)                                           \
639         do {                                                            \
640                 *(_usp) = rte_get_timer_cycles() * 1000000 /            \
641                         rte_get_timer_hz();                             \
642                 _NOTE(CONSTANTCONDITION);                               \
643         } while (B_FALSE)
644
645 /* KMEM */
646
647 #define EFSYS_KMEM_ALLOC(_esip, _size, _p)                              \
648         do {                                                            \
649                 (_esip) = (_esip);                                      \
650                 (_p) = rte_zmalloc("sfc", (_size), 0);                  \
651                 _NOTE(CONSTANTCONDITION);                               \
652         } while (B_FALSE)
653
654 #define EFSYS_KMEM_FREE(_esip, _size, _p)                               \
655         do {                                                            \
656                 (void)(_esip);                                          \
657                 (void)(_size);                                          \
658                 rte_free((_p));                                         \
659                 _NOTE(CONSTANTCONDITION);                               \
660         } while (B_FALSE)
661
662 /* LOCK */
663
664 typedef rte_spinlock_t efsys_lock_t;
665
666 #define SFC_EFSYS_LOCK_INIT(_eslp, _ifname, _label)     \
667         rte_spinlock_init((_eslp))
668 #define SFC_EFSYS_LOCK_DESTROY(_eslp) ((void)0)
669 #define SFC_EFSYS_LOCK(_eslp)                           \
670         rte_spinlock_lock((_eslp))
671 #define SFC_EFSYS_UNLOCK(_eslp)                         \
672         rte_spinlock_unlock((_eslp))
673 #define SFC_EFSYS_LOCK_ASSERT_OWNED(_eslp)              \
674         SFC_ASSERT(rte_spinlock_is_locked((_eslp)))
675
676 typedef int efsys_lock_state_t;
677
678 #define EFSYS_LOCK_MAGIC        0x000010c4
679
680 #define EFSYS_LOCK(_lockp, _state)                              \
681         do {                                                    \
682                 SFC_EFSYS_LOCK(_lockp);                         \
683                 (_state) = EFSYS_LOCK_MAGIC;                    \
684                 _NOTE(CONSTANTCONDITION);                       \
685         } while (B_FALSE)
686
687 #define EFSYS_UNLOCK(_lockp, _state)                            \
688         do {                                                    \
689                 SFC_ASSERT((_state) == EFSYS_LOCK_MAGIC);       \
690                 SFC_EFSYS_UNLOCK(_lockp);                       \
691                 _NOTE(CONSTANTCONDITION);                       \
692         } while (B_FALSE)
693
694 /* STAT */
695
696 typedef uint64_t        efsys_stat_t;
697
698 #define EFSYS_STAT_INCR(_knp, _delta)                           \
699         do {                                                    \
700                 *(_knp) += (_delta);                            \
701                 _NOTE(CONSTANTCONDITION);                       \
702         } while (B_FALSE)
703
704 #define EFSYS_STAT_DECR(_knp, _delta)                           \
705         do {                                                    \
706                 *(_knp) -= (_delta);                            \
707                 _NOTE(CONSTANTCONDITION);                       \
708         } while (B_FALSE)
709
710 #define EFSYS_STAT_SET(_knp, _val)                              \
711         do {                                                    \
712                 *(_knp) = (_val);                               \
713                 _NOTE(CONSTANTCONDITION);                       \
714         } while (B_FALSE)
715
716 #define EFSYS_STAT_SET_QWORD(_knp, _valp)                       \
717         do {                                                    \
718                 *(_knp) = rte_le_to_cpu_64((_valp)->eq_u64[0]); \
719                 _NOTE(CONSTANTCONDITION);                       \
720         } while (B_FALSE)
721
722 #define EFSYS_STAT_SET_DWORD(_knp, _valp)                       \
723         do {                                                    \
724                 *(_knp) = rte_le_to_cpu_32((_valp)->ed_u32[0]); \
725                 _NOTE(CONSTANTCONDITION);                       \
726         } while (B_FALSE)
727
728 #define EFSYS_STAT_INCR_QWORD(_knp, _valp)                              \
729         do {                                                            \
730                 *(_knp) += rte_le_to_cpu_64((_valp)->eq_u64[0]);        \
731                 _NOTE(CONSTANTCONDITION);                               \
732         } while (B_FALSE)
733
734 #define EFSYS_STAT_SUBR_QWORD(_knp, _valp)                              \
735         do {                                                            \
736                 *(_knp) -= rte_le_to_cpu_64((_valp)->eq_u64[0]);        \
737                 _NOTE(CONSTANTCONDITION);                               \
738         } while (B_FALSE)
739
740 /* ERR */
741
742 #if EFSYS_OPT_DECODE_INTR_FATAL
743 #define EFSYS_ERR(_esip, _code, _dword0, _dword1)                       \
744         do {                                                            \
745                 (void)(_esip);                                          \
746                 RTE_LOG(ERR, PMD, "FATAL ERROR #%u (0x%08x%08x)\n",     \
747                         (_code), (_dword0), (_dword1));                 \
748                 _NOTE(CONSTANTCONDITION);                               \
749         } while (B_FALSE)
750 #endif
751
752 /* ASSERT */
753
754 /* RTE_VERIFY from DPDK treats expressions with % operator incorrectly,
755  * so we re-implement it here
756  */
757 #ifdef RTE_LIBRTE_SFC_EFX_DEBUG
758 #define EFSYS_ASSERT(_exp)                                              \
759         do {                                                            \
760                 if (unlikely(!(_exp)))                                  \
761                         rte_panic("line %d\tassert \"%s\" failed\n",    \
762                                   __LINE__, (#_exp));                   \
763         } while (0)
764 #else
765 #define EFSYS_ASSERT(_exp)              (void)(_exp)
766 #endif
767
768 #define EFSYS_ASSERT3(_x, _op, _y, _t)  EFSYS_ASSERT((_t)(_x) _op (_t)(_y))
769
770 #define EFSYS_ASSERT3U(_x, _op, _y)     EFSYS_ASSERT3(_x, _op, _y, uint64_t)
771 #define EFSYS_ASSERT3S(_x, _op, _y)     EFSYS_ASSERT3(_x, _op, _y, int64_t)
772 #define EFSYS_ASSERT3P(_x, _op, _y)     EFSYS_ASSERT3(_x, _op, _y, uintptr_t)
773
774 /* ROTATE */
775
776 #define EFSYS_HAS_ROTL_DWORD    0
777
778 #ifdef __cplusplus
779 }
780 #endif
781
782 #endif  /* _SFC_COMMON_EFSYS_H */