crypto: A more memory efficient layout of the frame element struct 25/31425/2
authorNeale Ranns <neale@graphiant.com>
Wed, 24 Feb 2021 09:18:53 +0000 (09:18 +0000)
committerDamjan Marion <dmarion@me.com>
Wed, 24 Feb 2021 14:21:33 +0000 (14:21 +0000)
Type: improvement

Also:
  - state as enum so my GDB life is easier
  - typo; s/indice/indices/;

Signed-off-by: Neale Ranns <neale@graphiant.com>
Change-Id: I3320f5ef1ccd7d042071ef336488a41adfad7463

src/vnet/crypto/crypto.h
src/vnet/crypto/node.c

index 529c70a..eeb1204 100644 (file)
@@ -300,13 +300,6 @@ typedef struct
 
 typedef struct
 {
-  vnet_crypto_op_status_t status:8;
-  u32 key_index;
-  i16 crypto_start_offset;     /* first buffer offset */
-  i16 integ_start_offset;
-  u32 crypto_total_length;
-  /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
-  u16 integ_length_adj;
   u8 *iv;
   union
   {
@@ -314,18 +307,33 @@ typedef struct
     u8 *tag;
   };
   u8 *aad;
+  u32 key_index;
+  u32 crypto_total_length;
+  i16 crypto_start_offset; /* first buffer offset */
+  i16 integ_start_offset;
+  /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
+  u16 integ_length_adj;
+  vnet_crypto_op_status_t status : 8;
   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
 } vnet_crypto_async_frame_elt_t;
 
+/* Assert the size so the compiler will warn us when it changes */
+STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
+
+typedef enum vnet_crypto_async_frame_state_t_
+{
+  VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
+  /* frame waiting to be processed */
+  VNET_CRYPTO_FRAME_STATE_PENDING,
+  VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
+  VNET_CRYPTO_FRAME_STATE_SUCCESS,
+  VNET_CRYPTO_FRAME_STATE_ELT_ERROR
+} __clib_packed vnet_crypto_async_frame_state_t;
+
 typedef struct
 {
   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
-#define VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED 0
-#define VNET_CRYPTO_FRAME_STATE_PENDING 1      /* frame waiting to be processed */
-#define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS 2
-#define VNET_CRYPTO_FRAME_STATE_SUCCESS 3
-#define VNET_CRYPTO_FRAME_STATE_ELT_ERROR 4
-  u8 state;
+  vnet_crypto_async_frame_state_t state;
   vnet_crypto_async_op_id_t op:8;
   u16 n_elts;
   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
@@ -339,7 +347,7 @@ typedef struct
   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
   vnet_crypto_async_frame_t *frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
   vnet_crypto_async_frame_t *frame_pool;
-  u32 *buffer_indice;
+  u32 *buffer_indices;
   u16 *nexts;
 } vnet_crypto_thread_t;
 
index 63ed95e..e1186f4 100644 (file)
@@ -87,9 +87,9 @@ crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
     {
       if (cf)
        {
-         vec_validate (ct->buffer_indice, n_cache + cf->n_elts);
+         vec_validate (ct->buffer_indices, n_cache + cf->n_elts);
          vec_validate (ct->nexts, n_cache + cf->n_elts);
-         clib_memcpy_fast (ct->buffer_indice + n_cache, cf->buffer_indices,
+         clib_memcpy_fast (ct->buffer_indices + n_cache, cf->buffer_indices,
                            sizeof (u32) * cf->n_elts);
          if (cf->state == VNET_CRYPTO_FRAME_STATE_SUCCESS)
            {
@@ -114,7 +114,7 @@ crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
          n_cache += cf->n_elts;
          if (n_cache >= VLIB_FRAME_SIZE)
            {
-             vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indice,
+             vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indices,
                                           ct->nexts, n_cache);
              n_cache = 0;
            }
@@ -167,7 +167,7 @@ VLIB_NODE_FN (crypto_dispatch_node) (vlib_main_t * vm,
   }
   /* *INDENT-ON* */
   if (n_cache)
-    vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indice, ct->nexts,
+    vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indices, ct->nexts,
                                 n_cache);
 
   return n_dispatched;