quic: update to quicly v0.1.4
[vpp.git] / src / plugins / tlspicotls / pico_vpp_crypto.c
index e497e5e..3d28d50 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2020 Intel and/or its affiliates.
+ * Copyright (c) 2021 Intel and/or its affiliates.
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at:
 #include <picotls/openssl.h>
 #include <picotls.h>
 
-#include "pico_vpp_crypto.h"
+#include <tlspicotls/pico_vpp_crypto.h>
+#include <tlspicotls/tls_picotls.h>
 
 typedef void (*ptls_vpp_do_transform_fn) (ptls_cipher_context_t *, void *,
                                          const void *, size_t);
 
 vnet_crypto_main_t *cm = &crypto_main;
+extern picotls_main_t picotls_main;
 
 struct cipher_context_t
 {
   ptls_cipher_context_t super;
   vnet_crypto_op_t op;
+  vnet_crypto_op_id_t id;
   u32 key_index;
 };
 
 struct vpp_aead_context_t
 {
   ptls_aead_context_t super;
+  EVP_CIPHER_CTX *evp_ctx;
+  uint8_t static_iv[PTLS_MAX_IV_SIZE];
   vnet_crypto_op_t op;
+  u32 key_index;
+  vnet_crypto_op_id_t id;
   vnet_crypto_op_chunk_t chunks[2];
   vnet_crypto_alg_t alg;
-  u32 key_index;
   u32 chunk_index;
+  uint8_t iv[PTLS_MAX_IV_SIZE];
 };
 
 static void
@@ -47,23 +54,7 @@ ptls_vpp_crypto_cipher_do_init (ptls_cipher_context_t * _ctx, const void *iv)
 {
   struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
 
-  vnet_crypto_op_id_t id;
-  if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
-    {
-      id = VNET_CRYPTO_OP_AES_128_CTR_ENC;
-    }
-  else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
-    {
-      id = VNET_CRYPTO_OP_AES_256_CTR_ENC;
-    }
-  else
-    {
-      TLS_DBG (1, "%s, Invalid crypto cipher : ", __FUNCTION__,
-              _ctx->algo->name);
-      assert (0);
-    }
-
-  vnet_crypto_op_init (&ctx->op, id);
+  vnet_crypto_op_init (&ctx->op, ctx->id);
   ctx->op.iv = (u8 *) iv;
   ctx->op.key_index = ctx->key_index;
 }
@@ -105,10 +96,14 @@ ptls_vpp_crypto_cipher_setup_crypto (ptls_cipher_context_t * _ctx, int is_enc,
   if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
     {
       algo = VNET_CRYPTO_ALG_AES_128_CTR;
+      ctx->id = is_enc ? VNET_CRYPTO_OP_AES_128_CTR_ENC :
+                        VNET_CRYPTO_OP_AES_128_CTR_DEC;
     }
   else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
     {
       algo = VNET_CRYPTO_ALG_AES_256_CTR;
+      ctx->id = is_enc ? VNET_CRYPTO_OP_AES_256_CTR_ENC :
+                        VNET_CRYPTO_OP_AES_256_CTR_DEC;
     }
   else
     {
@@ -117,44 +112,53 @@ ptls_vpp_crypto_cipher_setup_crypto (ptls_cipher_context_t * _ctx, int is_enc,
       assert (0);
     }
 
+  clib_rwlock_writer_lock (&picotls_main.crypto_keys_rw_lock);
   ctx->key_index = vnet_crypto_key_add (vm, algo,
                                        (u8 *) key, _ctx->algo->key_size);
+  clib_rwlock_writer_unlock (&picotls_main.crypto_keys_rw_lock);
 
   return 0;
 }
 
 size_t
-ptls_vpp_crypto_aead_decrypt (ptls_aead_context_t * _ctx, void *_output,
-                             const void *input, size_t inlen, const void *iv,
+ptls_vpp_crypto_aead_decrypt (ptls_aead_context_t *_ctx, void *_output,
+                             const void *input, size_t inlen, uint64_t seq,
                              const void *aad, size_t aadlen)
 {
   vlib_main_t *vm = vlib_get_main ();
   struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
   int tag_size = ctx->super.algo->tag_size;
 
-  ctx->op.dst = _output;
-  ctx->op.src = (void *) input;
-  ctx->op.len = inlen - tag_size;;
-  ctx->op.iv = (void *) iv;
-  ctx->op.aad = (void *) aad;
+  vnet_crypto_op_init (&ctx->op, ctx->id);
+  ctx->op.aad = (u8 *) aad;
   ctx->op.aad_len = aadlen;
-  ctx->op.tag = (void *) input + inlen - tag_size;
+  ctx->op.iv = ctx->iv;
+  ptls_aead__build_iv (ctx->super.algo, ctx->op.iv, ctx->static_iv, seq);
+  ctx->op.src = (u8 *) input;
+  ctx->op.dst = _output;
+  ctx->op.key_index = ctx->key_index;
+  ctx->op.len = inlen - tag_size;
   ctx->op.tag_len = tag_size;
+  ctx->op.tag = ctx->op.src + ctx->op.len;
 
   vnet_crypto_process_ops (vm, &(ctx->op), 1);
   assert (ctx->op.status == VNET_CRYPTO_OP_STATUS_COMPLETED);
 
-  return inlen - tag_size;
+  return ctx->op.len;
 }
 
 static void
-ptls_vpp_crypto_aead_encrypt_init (ptls_aead_context_t * _ctx, const void *iv,
+ptls_vpp_crypto_aead_encrypt_init (ptls_aead_context_t *_ctx, uint64_t seq,
                                   const void *aad, size_t aadlen)
 {
   struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
-  ctx->op.iv = (void *) iv;
+
+  vnet_crypto_op_init (&ctx->op, ctx->id);
   ctx->op.aad = (void *) aad;
   ctx->op.aad_len = aadlen;
+  ctx->op.iv = ctx->iv;
+  ptls_aead__build_iv (ctx->super.algo, ctx->op.iv, ctx->static_iv, seq);
+  ctx->op.key_index = ctx->key_index;
   ctx->op.n_chunks = 2;
   ctx->op.chunk_index = 0;
 
@@ -193,35 +197,32 @@ ptls_vpp_crypto_aead_encrypt_final (ptls_aead_context_t * _ctx, void *_output)
 static void
 ptls_vpp_crypto_aead_dispose_crypto (ptls_aead_context_t * _ctx)
 {
-  /* Do nothing */
-}
+  vlib_main_t *vm = vlib_get_main ();
+  struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
 
+  clib_rwlock_writer_lock (&picotls_main.crypto_keys_rw_lock);
+  vnet_crypto_key_del (vm, ctx->key_index);
+  clib_rwlock_writer_unlock (&picotls_main.crypto_keys_rw_lock);
+}
 
 static int
-ptls_vpp_crypto_aead_setup_crypto (ptls_aead_context_t * _ctx, int is_enc,
-                                  const void *key, vnet_crypto_alg_t alg)
+ptls_vpp_crypto_aead_setup_crypto (ptls_aead_context_t *_ctx, int is_enc,
+                                  const void *key, const void *iv,
+                                  vnet_crypto_alg_t alg)
 {
   struct vlib_main_t *vm = vlib_get_main ();
   struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
   u16 key_len = ctx->super.algo->key_size;
 
-  memset (&(ctx->op), 0, sizeof (vnet_crypto_op_t));
-
   if (alg == VNET_CRYPTO_ALG_AES_128_GCM)
     {
-      if (is_enc)
-       vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_128_GCM_ENC);
-      else
-       vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_128_GCM_DEC);
+      ctx->id = is_enc ? VNET_CRYPTO_OP_AES_128_GCM_ENC :
+                        VNET_CRYPTO_OP_AES_128_GCM_DEC;
     }
   else if (alg == VNET_CRYPTO_ALG_AES_256_GCM)
     {
-      if (is_enc)
-       {
-         vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_256_GCM_ENC);
-       }
-      else
-       vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_256_GCM_DEC);
+      ctx->id = is_enc ? VNET_CRYPTO_OP_AES_256_GCM_ENC :
+                        VNET_CRYPTO_OP_AES_256_GCM_DEC;
     }
   else
     {
@@ -231,15 +232,23 @@ ptls_vpp_crypto_aead_setup_crypto (ptls_aead_context_t * _ctx, int is_enc,
     }
 
   ctx->alg = alg;
-
-  ctx->op.key_index =
-    vnet_crypto_key_add (vm, ctx->alg, (void *) key, key_len);
   ctx->chunk_index = 0;
+  clib_memcpy (ctx->static_iv, iv, ctx->super.algo->iv_size);
+
+  clib_rwlock_writer_lock (&picotls_main.crypto_keys_rw_lock);
+  ctx->key_index = vnet_crypto_key_add (vm, alg, (void *) key, key_len);
+  clib_rwlock_writer_unlock (&picotls_main.crypto_keys_rw_lock);
 
-  ctx->super.do_decrypt = ptls_vpp_crypto_aead_decrypt;
-  ctx->super.do_encrypt_init = ptls_vpp_crypto_aead_encrypt_init;
-  ctx->super.do_encrypt_update = ptls_vpp_crypto_aead_encrypt_update;
-  ctx->super.do_encrypt_final = ptls_vpp_crypto_aead_encrypt_final;
+  if (is_enc)
+    {
+      ctx->super.do_encrypt_init = ptls_vpp_crypto_aead_encrypt_init;
+      ctx->super.do_encrypt_update = ptls_vpp_crypto_aead_encrypt_update;
+      ctx->super.do_encrypt_final = ptls_vpp_crypto_aead_encrypt_final;
+    }
+  else
+    {
+      ctx->super.do_decrypt = ptls_vpp_crypto_aead_decrypt;
+    }
   ctx->super.dispose_crypto = ptls_vpp_crypto_aead_dispose_crypto;
 
   return 0;
@@ -262,52 +271,70 @@ ptls_vpp_crypto_aes256ctr_setup_crypto (ptls_cipher_context_t * ctx,
 }
 
 static int
-ptls_vpp_crypto_aead_aes128gcm_setup_crypto (ptls_aead_context_t * ctx,
-                                            int is_enc, const void *key)
+ptls_vpp_crypto_aead_aes128gcm_setup_crypto (ptls_aead_context_t *ctx,
+                                            int is_enc, const void *key,
+                                            const void *iv)
 {
-  return ptls_vpp_crypto_aead_setup_crypto (ctx, is_enc, key,
+  return ptls_vpp_crypto_aead_setup_crypto (ctx, is_enc, key, iv,
                                            VNET_CRYPTO_ALG_AES_128_GCM);
 }
 
 static int
-ptls_vpp_crypto_aead_aes256gcm_setup_crypto (ptls_aead_context_t * ctx,
-                                            int is_enc, const void *key)
+ptls_vpp_crypto_aead_aes256gcm_setup_crypto (ptls_aead_context_t *ctx,
+                                            int is_enc, const void *key,
+                                            const void *iv)
 {
-  return ptls_vpp_crypto_aead_setup_crypto (ctx, is_enc, key,
+  return ptls_vpp_crypto_aead_setup_crypto (ctx, is_enc, key, iv,
                                            VNET_CRYPTO_ALG_AES_256_GCM);
 }
 
-ptls_cipher_algorithm_t ptls_vpp_crypto_aes128ctr = { "AES128-CTR",
+ptls_cipher_algorithm_t ptls_vpp_crypto_aes128ctr = {
+  "AES128-CTR",
   PTLS_AES128_KEY_SIZE,
-  1, PTLS_AES_IV_SIZE,
+  1,
+  PTLS_AES_IV_SIZE,
   sizeof (struct vpp_aead_context_t),
   ptls_vpp_crypto_aes128ctr_setup_crypto
 };
 
-ptls_cipher_algorithm_t ptls_vpp_crypto_aes256ctr = { "AES256-CTR",
+ptls_cipher_algorithm_t ptls_vpp_crypto_aes256ctr = {
+  "AES256-CTR",
   PTLS_AES256_KEY_SIZE,
-  1 /* block size */ ,
+  1 /* block size */,
   PTLS_AES_IV_SIZE,
   sizeof (struct vpp_aead_context_t),
   ptls_vpp_crypto_aes256ctr_setup_crypto
 };
 
-ptls_aead_algorithm_t ptls_vpp_crypto_aes128gcm = { "AES128-GCM",
+#define PTLS_X86_CACHE_LINE_ALIGN_BITS 6
+ptls_aead_algorithm_t ptls_vpp_crypto_aes128gcm = {
+  "AES128-GCM",
+  PTLS_AESGCM_CONFIDENTIALITY_LIMIT,
+  PTLS_AESGCM_INTEGRITY_LIMIT,
   &ptls_vpp_crypto_aes128ctr,
   NULL,
   PTLS_AES128_KEY_SIZE,
   PTLS_AESGCM_IV_SIZE,
   PTLS_AESGCM_TAG_SIZE,
+  { PTLS_TLS12_AESGCM_FIXED_IV_SIZE, PTLS_TLS12_AESGCM_RECORD_IV_SIZE },
+  1,
+  PTLS_X86_CACHE_LINE_ALIGN_BITS,
   sizeof (struct vpp_aead_context_t),
   ptls_vpp_crypto_aead_aes128gcm_setup_crypto
 };
 
-ptls_aead_algorithm_t ptls_vpp_crypto_aes256gcm = { "AES256-GCM",
+ptls_aead_algorithm_t ptls_vpp_crypto_aes256gcm = {
+  "AES256-GCM",
+  PTLS_AESGCM_CONFIDENTIALITY_LIMIT,
+  PTLS_AESGCM_INTEGRITY_LIMIT,
   &ptls_vpp_crypto_aes256ctr,
   NULL,
   PTLS_AES256_KEY_SIZE,
   PTLS_AESGCM_IV_SIZE,
   PTLS_AESGCM_TAG_SIZE,
+  { PTLS_TLS12_AESGCM_FIXED_IV_SIZE, PTLS_TLS12_AESGCM_RECORD_IV_SIZE },
+  1,
+  PTLS_X86_CACHE_LINE_ALIGN_BITS,
   sizeof (struct vpp_aead_context_t),
   ptls_vpp_crypto_aead_aes256gcm_setup_crypto
 };