+static_always_inline u32
+openssl_ops_enc_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
+ const EVP_CIPHER * cipher)
+{
+ openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
+ vm->thread_index);
+ EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
+ u32 i;
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ u32 nonce[3];
+ int len;
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
+ RAND_bytes (op->iv, 8);
+
+ nonce[0] = op->salt;
+ clib_memcpy_fast (nonce + 1, op->iv, 8);
+
+ EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
+ EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
+ EVP_EncryptInit_ex (ctx, 0, 0, op->key, (u8 *) nonce);
+ if (op->aad_len)
+ EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
+ EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
+ EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
+ EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_GET_TAG, op->tag_len, op->tag);
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ }
+ return n_ops;
+}
+
+static_always_inline u32
+openssl_ops_dec_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
+ const EVP_CIPHER * cipher)
+{
+ openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
+ vm->thread_index);
+ EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
+ u32 i, n_fail = 0;
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ int len;
+
+ EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
+ EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, op->iv_len, 0);
+ EVP_DecryptInit_ex (ctx, 0, 0, op->key, op->iv);
+ if (op->aad_len)
+ EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
+ EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
+ EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_TAG, op->tag_len, op->tag);
+
+ if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ else
+ {
+ n_fail++;
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_DECRYPT;
+ }
+ }
+ return n_ops - n_fail;
+}
+