1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2 * Copyright(c) 2015-2018 Intel Corporation
5 #include <openssl/sha.h> /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h> /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h> /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h> /* Needed for bpi runt block processing */
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
22 /** Frees a context previously created
23 * Depends on openssl libcrypto
26 bpi_cipher_ctx_free(void *bpi_ctx)
29 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
32 /** Creates a context in either AES or DES in ECB mode
33 * Depends on openssl libcrypto
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37 enum rte_crypto_cipher_operation direction __rte_unused,
38 uint8_t *key, void **ctx)
40 const EVP_CIPHER *algo = NULL;
42 *ctx = EVP_CIPHER_CTX_new();
49 if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
52 algo = EVP_aes_128_ecb();
54 /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55 if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
64 EVP_CIPHER_CTX_free(*ctx);
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70 struct qat_sym_dev_private *internals)
73 const struct rte_cryptodev_capabilities *capability;
75 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
80 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
83 if (capability->sym.cipher.algo == algo)
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91 struct qat_sym_dev_private *internals)
94 const struct rte_cryptodev_capabilities *capability;
96 while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97 RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
101 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
104 if (capability->sym.auth.algo == algo)
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112 struct rte_cryptodev_sym_session *sess)
114 uint8_t index = dev->driver_id;
115 void *sess_priv = get_sym_session_private_data(sess, index);
116 struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
120 bpi_cipher_ctx_free(s->bpi_ctx);
121 memset(s, 0, qat_sym_session_get_private_size(dev));
122 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
124 set_sym_session_private_data(sess, index, NULL);
125 rte_mempool_put(sess_mp, sess_priv);
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
133 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134 return ICP_QAT_FW_LA_CMD_CIPHER;
136 /* Authentication Only */
137 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138 return ICP_QAT_FW_LA_CMD_AUTH;
141 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142 /* AES-GCM and AES-CCM works with different direction
143 * GCM first encrypts and generate hash where AES-CCM
144 * first generate hash and encrypts. Similar relation
145 * applies to decryption.
147 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
151 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
153 if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
156 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
159 if (xform->next == NULL)
162 /* Cipher then Authenticate */
163 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164 xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
167 /* Authenticate then Cipher */
168 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169 xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
179 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
192 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193 return &xform->cipher;
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203 struct rte_crypto_sym_xform *xform,
204 struct qat_sym_session *session)
206 struct qat_sym_dev_private *internals = dev->data->dev_private;
207 struct rte_crypto_cipher_xform *cipher_xform = NULL;
210 /* Get cipher xform from crypto xform chain */
211 cipher_xform = qat_get_cipher_xform(xform);
213 session->cipher_iv.offset = cipher_xform->iv.offset;
214 session->cipher_iv.length = cipher_xform->iv.length;
216 switch (cipher_xform->algo) {
217 case RTE_CRYPTO_CIPHER_AES_CBC:
218 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219 &session->qat_cipher_alg) != 0) {
220 QAT_LOG(ERR, "Invalid AES cipher key size");
224 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
226 case RTE_CRYPTO_CIPHER_AES_CTR:
227 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228 &session->qat_cipher_alg) != 0) {
229 QAT_LOG(ERR, "Invalid AES cipher key size");
233 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
235 case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237 &session->qat_cipher_alg) != 0) {
238 QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
242 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
244 case RTE_CRYPTO_CIPHER_NULL:
245 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
248 case RTE_CRYPTO_CIPHER_KASUMI_F8:
249 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250 &session->qat_cipher_alg) != 0) {
251 QAT_LOG(ERR, "Invalid KASUMI cipher key size");
255 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
257 case RTE_CRYPTO_CIPHER_3DES_CBC:
258 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259 &session->qat_cipher_alg) != 0) {
260 QAT_LOG(ERR, "Invalid 3DES cipher key size");
264 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
266 case RTE_CRYPTO_CIPHER_DES_CBC:
267 if (qat_sym_validate_des_key(cipher_xform->key.length,
268 &session->qat_cipher_alg) != 0) {
269 QAT_LOG(ERR, "Invalid DES cipher key size");
273 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
275 case RTE_CRYPTO_CIPHER_3DES_CTR:
276 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277 &session->qat_cipher_alg) != 0) {
278 QAT_LOG(ERR, "Invalid 3DES cipher key size");
282 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
284 case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285 ret = bpi_cipher_ctx_init(
288 cipher_xform->key.data,
291 QAT_LOG(ERR, "failed to create DES BPI ctx");
294 if (qat_sym_validate_des_key(cipher_xform->key.length,
295 &session->qat_cipher_alg) != 0) {
296 QAT_LOG(ERR, "Invalid DES cipher key size");
300 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
302 case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303 ret = bpi_cipher_ctx_init(
306 cipher_xform->key.data,
309 QAT_LOG(ERR, "failed to create AES BPI ctx");
312 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313 &session->qat_cipher_alg) != 0) {
314 QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
318 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
320 case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321 if (!qat_is_cipher_alg_supported(
322 cipher_xform->algo, internals)) {
323 QAT_LOG(ERR, "%s not supported on this device",
324 rte_crypto_cipher_algorithm_strings
325 [cipher_xform->algo]);
329 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330 &session->qat_cipher_alg) != 0) {
331 QAT_LOG(ERR, "Invalid ZUC cipher key size");
335 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
337 case RTE_CRYPTO_CIPHER_3DES_ECB:
338 case RTE_CRYPTO_CIPHER_AES_ECB:
339 case RTE_CRYPTO_CIPHER_AES_F8:
340 case RTE_CRYPTO_CIPHER_AES_XTS:
341 case RTE_CRYPTO_CIPHER_ARC4:
342 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
347 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
353 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
354 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
356 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
358 if (qat_sym_session_aead_create_cd_cipher(session,
359 cipher_xform->key.data,
360 cipher_xform->key.length)) {
368 if (session->bpi_ctx) {
369 bpi_cipher_ctx_free(session->bpi_ctx);
370 session->bpi_ctx = NULL;
376 qat_sym_session_configure(struct rte_cryptodev *dev,
377 struct rte_crypto_sym_xform *xform,
378 struct rte_cryptodev_sym_session *sess,
379 struct rte_mempool *mempool)
381 void *sess_private_data;
384 if (rte_mempool_get(mempool, &sess_private_data)) {
386 "Couldn't get object from session mempool");
390 ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
393 "Crypto QAT PMD: failed to configure session parameters");
395 /* Return session to mempool */
396 rte_mempool_put(mempool, sess_private_data);
400 set_sym_session_private_data(sess, dev->driver_id,
407 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
408 struct rte_crypto_sym_xform *xform, void *session_private)
410 struct qat_sym_session *session = session_private;
414 /* Set context descriptor physical address */
415 session->cd_paddr = rte_mempool_virt2iova(session) +
416 offsetof(struct qat_sym_session, cd);
418 session->min_qat_dev_gen = QAT_GEN1;
420 /* Get requested QAT command id */
421 qat_cmd_id = qat_get_cmd_id(xform);
422 if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
423 QAT_LOG(ERR, "Unsupported xform chain requested");
426 session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
427 switch (session->qat_cmd) {
428 case ICP_QAT_FW_LA_CMD_CIPHER:
429 ret = qat_sym_session_configure_cipher(dev, xform, session);
433 case ICP_QAT_FW_LA_CMD_AUTH:
434 ret = qat_sym_session_configure_auth(dev, xform, session);
438 case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
439 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
440 ret = qat_sym_session_configure_aead(xform,
445 ret = qat_sym_session_configure_cipher(dev,
449 ret = qat_sym_session_configure_auth(dev,
455 case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
456 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
457 ret = qat_sym_session_configure_aead(xform,
462 ret = qat_sym_session_configure_auth(dev,
466 ret = qat_sym_session_configure_cipher(dev,
472 case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
473 case ICP_QAT_FW_LA_CMD_TRNG_TEST:
474 case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
475 case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
476 case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
477 case ICP_QAT_FW_LA_CMD_MGF1:
478 case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
479 case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
480 case ICP_QAT_FW_LA_CMD_DELIMITER:
481 QAT_LOG(ERR, "Unsupported Service %u",
485 QAT_LOG(ERR, "Unsupported Service %u",
494 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
495 struct rte_crypto_sym_xform *xform,
496 struct qat_sym_session *session)
498 struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
499 struct qat_sym_dev_private *internals = dev->data->dev_private;
500 uint8_t *key_data = auth_xform->key.data;
501 uint8_t key_length = auth_xform->key.length;
502 session->aes_cmac = 0;
504 switch (auth_xform->algo) {
505 case RTE_CRYPTO_AUTH_SHA1_HMAC:
506 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
508 case RTE_CRYPTO_AUTH_SHA224_HMAC:
509 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
511 case RTE_CRYPTO_AUTH_SHA256_HMAC:
512 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
514 case RTE_CRYPTO_AUTH_SHA384_HMAC:
515 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
517 case RTE_CRYPTO_AUTH_SHA512_HMAC:
518 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
520 case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
521 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
523 case RTE_CRYPTO_AUTH_AES_CMAC:
524 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
525 session->aes_cmac = 1;
527 case RTE_CRYPTO_AUTH_AES_GMAC:
528 if (qat_sym_validate_aes_key(auth_xform->key.length,
529 &session->qat_cipher_alg) != 0) {
530 QAT_LOG(ERR, "Invalid AES key size");
533 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
534 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
537 case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
538 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
540 case RTE_CRYPTO_AUTH_MD5_HMAC:
541 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
543 case RTE_CRYPTO_AUTH_NULL:
544 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
546 case RTE_CRYPTO_AUTH_KASUMI_F9:
547 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
549 case RTE_CRYPTO_AUTH_ZUC_EIA3:
550 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
551 QAT_LOG(ERR, "%s not supported on this device",
552 rte_crypto_auth_algorithm_strings
556 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
558 case RTE_CRYPTO_AUTH_SHA1:
559 case RTE_CRYPTO_AUTH_SHA256:
560 case RTE_CRYPTO_AUTH_SHA512:
561 case RTE_CRYPTO_AUTH_SHA224:
562 case RTE_CRYPTO_AUTH_SHA384:
563 case RTE_CRYPTO_AUTH_MD5:
564 case RTE_CRYPTO_AUTH_AES_CBC_MAC:
565 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
569 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
574 session->auth_iv.offset = auth_xform->iv.offset;
575 session->auth_iv.length = auth_xform->iv.length;
577 if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
578 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
579 session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
580 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
582 * It needs to create cipher desc content first,
583 * then authentication
586 if (qat_sym_session_aead_create_cd_cipher(session,
587 auth_xform->key.data,
588 auth_xform->key.length))
591 if (qat_sym_session_aead_create_cd_auth(session,
595 auth_xform->digest_length,
599 session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
600 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
602 * It needs to create authentication desc content first,
606 if (qat_sym_session_aead_create_cd_auth(session,
610 auth_xform->digest_length,
614 if (qat_sym_session_aead_create_cd_cipher(session,
615 auth_xform->key.data,
616 auth_xform->key.length))
619 /* Restore to authentication only only */
620 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
622 if (qat_sym_session_aead_create_cd_auth(session,
626 auth_xform->digest_length,
631 session->digest_length = auth_xform->digest_length;
636 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
637 struct qat_sym_session *session)
639 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
640 enum rte_crypto_auth_operation crypto_operation;
643 * Store AEAD IV parameters as cipher IV,
644 * to avoid unnecessary memory usage
646 session->cipher_iv.offset = xform->aead.iv.offset;
647 session->cipher_iv.length = xform->aead.iv.length;
649 switch (aead_xform->algo) {
650 case RTE_CRYPTO_AEAD_AES_GCM:
651 if (qat_sym_validate_aes_key(aead_xform->key.length,
652 &session->qat_cipher_alg) != 0) {
653 QAT_LOG(ERR, "Invalid AES key size");
656 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
657 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
659 case RTE_CRYPTO_AEAD_AES_CCM:
660 if (qat_sym_validate_aes_key(aead_xform->key.length,
661 &session->qat_cipher_alg) != 0) {
662 QAT_LOG(ERR, "Invalid AES key size");
665 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
666 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
669 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
674 if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
675 aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
676 (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
677 aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
678 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
680 * It needs to create cipher desc content first,
681 * then authentication
683 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
684 RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
686 if (qat_sym_session_aead_create_cd_cipher(session,
687 aead_xform->key.data,
688 aead_xform->key.length))
691 if (qat_sym_session_aead_create_cd_auth(session,
692 aead_xform->key.data,
693 aead_xform->key.length,
694 aead_xform->aad_length,
695 aead_xform->digest_length,
699 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
701 * It needs to create authentication desc content first,
705 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
706 RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
708 if (qat_sym_session_aead_create_cd_auth(session,
709 aead_xform->key.data,
710 aead_xform->key.length,
711 aead_xform->aad_length,
712 aead_xform->digest_length,
716 if (qat_sym_session_aead_create_cd_cipher(session,
717 aead_xform->key.data,
718 aead_xform->key.length))
722 session->digest_length = aead_xform->digest_length;
726 unsigned int qat_sym_session_get_private_size(
727 struct rte_cryptodev *dev __rte_unused)
729 return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
732 /* returns block size in bytes per cipher algo */
733 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
735 switch (qat_cipher_alg) {
736 case ICP_QAT_HW_CIPHER_ALGO_DES:
737 return ICP_QAT_HW_DES_BLK_SZ;
738 case ICP_QAT_HW_CIPHER_ALGO_3DES:
739 return ICP_QAT_HW_3DES_BLK_SZ;
740 case ICP_QAT_HW_CIPHER_ALGO_AES128:
741 case ICP_QAT_HW_CIPHER_ALGO_AES192:
742 case ICP_QAT_HW_CIPHER_ALGO_AES256:
743 return ICP_QAT_HW_AES_BLK_SZ;
745 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
752 * Returns size in bytes per hash algo for state1 size field in cd_ctrl
753 * This is digest size rounded up to nearest quadword
755 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
757 switch (qat_hash_alg) {
758 case ICP_QAT_HW_AUTH_ALGO_SHA1:
759 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
760 QAT_HW_DEFAULT_ALIGNMENT);
761 case ICP_QAT_HW_AUTH_ALGO_SHA224:
762 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
763 QAT_HW_DEFAULT_ALIGNMENT);
764 case ICP_QAT_HW_AUTH_ALGO_SHA256:
765 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
766 QAT_HW_DEFAULT_ALIGNMENT);
767 case ICP_QAT_HW_AUTH_ALGO_SHA384:
768 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
769 QAT_HW_DEFAULT_ALIGNMENT);
770 case ICP_QAT_HW_AUTH_ALGO_SHA512:
771 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
772 QAT_HW_DEFAULT_ALIGNMENT);
773 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
774 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
775 QAT_HW_DEFAULT_ALIGNMENT);
776 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
777 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
778 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
779 QAT_HW_DEFAULT_ALIGNMENT);
780 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
781 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
782 QAT_HW_DEFAULT_ALIGNMENT);
783 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
784 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
785 QAT_HW_DEFAULT_ALIGNMENT);
786 case ICP_QAT_HW_AUTH_ALGO_MD5:
787 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
788 QAT_HW_DEFAULT_ALIGNMENT);
789 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
790 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
791 QAT_HW_DEFAULT_ALIGNMENT);
792 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
793 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
794 QAT_HW_DEFAULT_ALIGNMENT);
795 case ICP_QAT_HW_AUTH_ALGO_NULL:
796 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
797 QAT_HW_DEFAULT_ALIGNMENT);
798 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
799 /* return maximum state1 size in this case */
800 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
801 QAT_HW_DEFAULT_ALIGNMENT);
803 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
809 /* returns digest size in bytes per hash algo */
810 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
812 switch (qat_hash_alg) {
813 case ICP_QAT_HW_AUTH_ALGO_SHA1:
814 return ICP_QAT_HW_SHA1_STATE1_SZ;
815 case ICP_QAT_HW_AUTH_ALGO_SHA224:
816 return ICP_QAT_HW_SHA224_STATE1_SZ;
817 case ICP_QAT_HW_AUTH_ALGO_SHA256:
818 return ICP_QAT_HW_SHA256_STATE1_SZ;
819 case ICP_QAT_HW_AUTH_ALGO_SHA384:
820 return ICP_QAT_HW_SHA384_STATE1_SZ;
821 case ICP_QAT_HW_AUTH_ALGO_SHA512:
822 return ICP_QAT_HW_SHA512_STATE1_SZ;
823 case ICP_QAT_HW_AUTH_ALGO_MD5:
824 return ICP_QAT_HW_MD5_STATE1_SZ;
825 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
826 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
827 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
828 /* return maximum digest size in this case */
829 return ICP_QAT_HW_SHA512_STATE1_SZ;
831 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
837 /* returns block size in byes per hash algo */
838 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
840 switch (qat_hash_alg) {
841 case ICP_QAT_HW_AUTH_ALGO_SHA1:
843 case ICP_QAT_HW_AUTH_ALGO_SHA224:
844 return SHA256_CBLOCK;
845 case ICP_QAT_HW_AUTH_ALGO_SHA256:
846 return SHA256_CBLOCK;
847 case ICP_QAT_HW_AUTH_ALGO_SHA384:
848 return SHA512_CBLOCK;
849 case ICP_QAT_HW_AUTH_ALGO_SHA512:
850 return SHA512_CBLOCK;
851 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
853 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
854 return ICP_QAT_HW_AES_BLK_SZ;
855 case ICP_QAT_HW_AUTH_ALGO_MD5:
857 case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
858 /* return maximum block size in this case */
859 return SHA512_CBLOCK;
861 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
867 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
871 if (!SHA1_Init(&ctx))
873 SHA1_Transform(&ctx, data_in);
874 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
878 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
882 if (!SHA224_Init(&ctx))
884 SHA256_Transform(&ctx, data_in);
885 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
889 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
893 if (!SHA256_Init(&ctx))
895 SHA256_Transform(&ctx, data_in);
896 rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
900 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
904 if (!SHA384_Init(&ctx))
906 SHA512_Transform(&ctx, data_in);
907 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
911 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
915 if (!SHA512_Init(&ctx))
917 SHA512_Transform(&ctx, data_in);
918 rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
922 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
928 MD5_Transform(&ctx, data_in);
929 rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
934 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
939 uint8_t digest[qat_hash_get_digest_size(
940 ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
941 uint32_t *hash_state_out_be32;
942 uint64_t *hash_state_out_be64;
945 digest_size = qat_hash_get_digest_size(hash_alg);
946 if (digest_size <= 0)
949 hash_state_out_be32 = (uint32_t *)data_out;
950 hash_state_out_be64 = (uint64_t *)data_out;
953 case ICP_QAT_HW_AUTH_ALGO_SHA1:
954 if (partial_hash_sha1(data_in, digest))
956 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
957 *hash_state_out_be32 =
958 rte_bswap32(*(((uint32_t *)digest)+i));
960 case ICP_QAT_HW_AUTH_ALGO_SHA224:
961 if (partial_hash_sha224(data_in, digest))
963 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
964 *hash_state_out_be32 =
965 rte_bswap32(*(((uint32_t *)digest)+i));
967 case ICP_QAT_HW_AUTH_ALGO_SHA256:
968 if (partial_hash_sha256(data_in, digest))
970 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
971 *hash_state_out_be32 =
972 rte_bswap32(*(((uint32_t *)digest)+i));
974 case ICP_QAT_HW_AUTH_ALGO_SHA384:
975 if (partial_hash_sha384(data_in, digest))
977 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
978 *hash_state_out_be64 =
979 rte_bswap64(*(((uint64_t *)digest)+i));
981 case ICP_QAT_HW_AUTH_ALGO_SHA512:
982 if (partial_hash_sha512(data_in, digest))
984 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
985 *hash_state_out_be64 =
986 rte_bswap64(*(((uint64_t *)digest)+i));
988 case ICP_QAT_HW_AUTH_ALGO_MD5:
989 if (partial_hash_md5(data_in, data_out))
993 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
999 #define HMAC_IPAD_VALUE 0x36
1000 #define HMAC_OPAD_VALUE 0x5c
1001 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1003 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1005 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1009 derived[0] = base[0] << 1;
1010 for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1011 derived[i] = base[i] << 1;
1012 derived[i - 1] |= base[i] >> 7;
1016 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1019 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1020 const uint8_t *auth_key,
1021 uint16_t auth_keylen,
1022 uint8_t *p_state_buf,
1023 uint16_t *p_state_len,
1027 uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1028 uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1031 if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1037 uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1040 auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1042 in = rte_zmalloc("AES CMAC K1",
1043 ICP_QAT_HW_AES_128_KEY_SZ, 16);
1046 QAT_LOG(ERR, "Failed to alloc memory");
1050 rte_memcpy(in, AES_CMAC_SEED,
1051 ICP_QAT_HW_AES_128_KEY_SZ);
1052 rte_memcpy(p_state_buf, auth_key, auth_keylen);
1054 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1060 AES_encrypt(in, k0, &enc_key);
1062 k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1063 k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1065 aes_cmac_key_derive(k0, k1);
1066 aes_cmac_key_derive(k1, k2);
1068 memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1069 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1073 static uint8_t qat_aes_xcbc_key_seed[
1074 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1075 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1076 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1077 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1078 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1079 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1080 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1084 uint8_t *out = p_state_buf;
1088 in = rte_zmalloc("working mem for key",
1089 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1091 QAT_LOG(ERR, "Failed to alloc memory");
1095 rte_memcpy(in, qat_aes_xcbc_key_seed,
1096 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1097 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1098 if (AES_set_encrypt_key(auth_key,
1102 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1104 (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1105 0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1108 AES_encrypt(in, out, &enc_key);
1109 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1110 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1112 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1113 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1117 } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1118 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1120 uint8_t *out = p_state_buf;
1123 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1124 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1125 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1126 in = rte_zmalloc("working mem for key",
1127 ICP_QAT_HW_GALOIS_H_SZ, 16);
1129 QAT_LOG(ERR, "Failed to alloc memory");
1133 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1134 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1138 AES_encrypt(in, out, &enc_key);
1139 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1140 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1141 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1146 block_size = qat_hash_get_block_size(hash_alg);
1149 /* init ipad and opad from key and xor with fixed values */
1150 memset(ipad, 0, block_size);
1151 memset(opad, 0, block_size);
1153 if (auth_keylen > (unsigned int)block_size) {
1154 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1157 rte_memcpy(ipad, auth_key, auth_keylen);
1158 rte_memcpy(opad, auth_key, auth_keylen);
1160 for (i = 0; i < block_size; i++) {
1161 uint8_t *ipad_ptr = ipad + i;
1162 uint8_t *opad_ptr = opad + i;
1163 *ipad_ptr ^= HMAC_IPAD_VALUE;
1164 *opad_ptr ^= HMAC_OPAD_VALUE;
1167 /* do partial hash of ipad and copy to state1 */
1168 if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1169 memset(ipad, 0, block_size);
1170 memset(opad, 0, block_size);
1171 QAT_LOG(ERR, "ipad precompute failed");
1176 * State len is a multiple of 8, so may be larger than the digest.
1177 * Put the partial hash of opad state_len bytes after state1
1179 *p_state_len = qat_hash_get_state1_size(hash_alg);
1180 if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1181 memset(ipad, 0, block_size);
1182 memset(opad, 0, block_size);
1183 QAT_LOG(ERR, "opad precompute failed");
1187 /* don't leave data lying around */
1188 memset(ipad, 0, block_size);
1189 memset(opad, 0, block_size);
1194 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1195 enum qat_sym_proto_flag proto_flags)
1198 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1199 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1200 header->comn_req_flags =
1201 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1202 QAT_COMN_PTR_TYPE_FLAT);
1203 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1204 ICP_QAT_FW_LA_PARTIAL_NONE);
1205 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1206 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1208 switch (proto_flags) {
1209 case QAT_CRYPTO_PROTO_FLAG_NONE:
1210 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1211 ICP_QAT_FW_LA_NO_PROTO);
1213 case QAT_CRYPTO_PROTO_FLAG_CCM:
1214 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1215 ICP_QAT_FW_LA_CCM_PROTO);
1217 case QAT_CRYPTO_PROTO_FLAG_GCM:
1218 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1219 ICP_QAT_FW_LA_GCM_PROTO);
1221 case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1222 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1223 ICP_QAT_FW_LA_SNOW_3G_PROTO);
1225 case QAT_CRYPTO_PROTO_FLAG_ZUC:
1226 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1227 ICP_QAT_FW_LA_ZUC_3G_PROTO);
1231 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1232 ICP_QAT_FW_LA_NO_UPDATE_STATE);
1233 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1234 ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1238 * Snow3G and ZUC should never use this function
1239 * and set its protocol flag in both cipher and auth part of content
1240 * descriptor building function
1242 static enum qat_sym_proto_flag
1243 qat_get_crypto_proto_flag(uint16_t flags)
1245 int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1246 enum qat_sym_proto_flag qat_proto_flag =
1247 QAT_CRYPTO_PROTO_FLAG_NONE;
1250 case ICP_QAT_FW_LA_GCM_PROTO:
1251 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1253 case ICP_QAT_FW_LA_CCM_PROTO:
1254 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1258 return qat_proto_flag;
1261 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1263 uint32_t cipherkeylen)
1265 struct icp_qat_hw_cipher_algo_blk *cipher;
1266 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1267 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1268 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1269 void *ptr = &req_tmpl->cd_ctrl;
1270 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1271 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1272 enum icp_qat_hw_cipher_convert key_convert;
1273 enum qat_sym_proto_flag qat_proto_flag =
1274 QAT_CRYPTO_PROTO_FLAG_NONE;
1275 uint32_t total_key_size;
1276 uint16_t cipher_offset, cd_size;
1277 uint32_t wordIndex = 0;
1278 uint32_t *temp_key = NULL;
1280 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1281 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1282 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1283 ICP_QAT_FW_SLICE_CIPHER);
1284 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1285 ICP_QAT_FW_SLICE_DRAM_WR);
1286 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1287 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1288 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1289 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1290 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1291 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1292 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1293 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1294 ICP_QAT_FW_SLICE_CIPHER);
1295 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1296 ICP_QAT_FW_SLICE_AUTH);
1297 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1298 ICP_QAT_FW_SLICE_AUTH);
1299 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1300 ICP_QAT_FW_SLICE_DRAM_WR);
1301 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1302 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1303 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1307 if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1309 * CTR Streaming ciphers are a special case. Decrypt = encrypt
1310 * Overriding default values previously set
1312 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1313 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1314 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1315 || cdesc->qat_cipher_alg ==
1316 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1317 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1318 else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1319 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1321 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1323 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1324 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1325 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1326 cipher_cd_ctrl->cipher_state_sz =
1327 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1328 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1330 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1331 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1332 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1333 cipher_cd_ctrl->cipher_padding_sz =
1334 (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1335 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1336 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1337 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1339 qat_get_crypto_proto_flag(header->serv_specif_flags);
1340 } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1341 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1342 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1344 qat_get_crypto_proto_flag(header->serv_specif_flags);
1345 } else if (cdesc->qat_cipher_alg ==
1346 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1347 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1348 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1349 cipher_cd_ctrl->cipher_state_sz =
1350 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1351 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1352 cdesc->min_qat_dev_gen = QAT_GEN2;
1354 total_key_size = cipherkeylen;
1355 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1357 qat_get_crypto_proto_flag(header->serv_specif_flags);
1359 cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1360 cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1361 cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1363 header->service_cmd_id = cdesc->qat_cmd;
1364 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1366 cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1367 cipher->cipher_config.val =
1368 ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1369 cdesc->qat_cipher_alg, key_convert,
1372 if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1373 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1374 sizeof(struct icp_qat_hw_cipher_config)
1376 memcpy(cipher->key, cipherkey, cipherkeylen);
1377 memcpy(temp_key, cipherkey, cipherkeylen);
1379 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1380 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1382 temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1384 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1385 cipherkeylen + cipherkeylen;
1387 memcpy(cipher->key, cipherkey, cipherkeylen);
1388 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1392 if (total_key_size > cipherkeylen) {
1393 uint32_t padding_size = total_key_size-cipherkeylen;
1394 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1395 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1396 /* K3 not provided so use K1 = K3*/
1397 memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1398 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1399 && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1400 /* K2 and K3 not provided so use K1 = K2 = K3*/
1401 memcpy(cdesc->cd_cur_ptr, cipherkey,
1403 memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1404 cipherkey, cipherkeylen);
1406 memset(cdesc->cd_cur_ptr, 0, padding_size);
1408 cdesc->cd_cur_ptr += padding_size;
1410 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1411 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1416 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1418 uint32_t authkeylen,
1419 uint32_t aad_length,
1420 uint32_t digestsize,
1421 unsigned int operation)
1423 struct icp_qat_hw_auth_setup *hash;
1424 struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1425 struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1426 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1427 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1428 void *ptr = &req_tmpl->cd_ctrl;
1429 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1430 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1431 struct icp_qat_fw_la_auth_req_params *auth_param =
1432 (struct icp_qat_fw_la_auth_req_params *)
1433 ((char *)&req_tmpl->serv_specif_rqpars +
1434 sizeof(struct icp_qat_fw_la_cipher_req_params));
1435 uint16_t state1_size = 0, state2_size = 0;
1436 uint16_t hash_offset, cd_size;
1437 uint32_t *aad_len = NULL;
1438 uint32_t wordIndex = 0;
1440 enum qat_sym_proto_flag qat_proto_flag =
1441 QAT_CRYPTO_PROTO_FLAG_NONE;
1443 if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1444 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1445 ICP_QAT_FW_SLICE_AUTH);
1446 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1447 ICP_QAT_FW_SLICE_DRAM_WR);
1448 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1449 } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1450 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1451 ICP_QAT_FW_SLICE_AUTH);
1452 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1453 ICP_QAT_FW_SLICE_CIPHER);
1454 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1455 ICP_QAT_FW_SLICE_CIPHER);
1456 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1457 ICP_QAT_FW_SLICE_DRAM_WR);
1458 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1459 } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1460 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1464 if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1465 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1466 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1467 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1468 ICP_QAT_FW_LA_CMP_AUTH_RES);
1469 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1471 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1472 ICP_QAT_FW_LA_RET_AUTH_RES);
1473 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1474 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1475 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1479 * Setup the inner hash config
1481 hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1482 hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1483 hash->auth_config.reserved = 0;
1484 hash->auth_config.config =
1485 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1486 cdesc->qat_hash_alg, digestsize);
1488 if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1489 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1490 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1491 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1492 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1493 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1495 hash->auth_counter.counter = 0;
1497 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1501 hash->auth_counter.counter = rte_bswap32(block_size);
1504 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1507 * cd_cur_ptr now points at the state1 information.
1509 switch (cdesc->qat_hash_alg) {
1510 case ICP_QAT_HW_AUTH_ALGO_SHA1:
1511 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1512 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1514 QAT_LOG(ERR, "(SHA)precompute failed");
1517 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1519 case ICP_QAT_HW_AUTH_ALGO_SHA224:
1520 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1521 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1523 QAT_LOG(ERR, "(SHA)precompute failed");
1526 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1528 case ICP_QAT_HW_AUTH_ALGO_SHA256:
1529 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1530 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1532 QAT_LOG(ERR, "(SHA)precompute failed");
1535 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1537 case ICP_QAT_HW_AUTH_ALGO_SHA384:
1538 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1539 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1541 QAT_LOG(ERR, "(SHA)precompute failed");
1544 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1546 case ICP_QAT_HW_AUTH_ALGO_SHA512:
1547 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1548 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1550 QAT_LOG(ERR, "(SHA)precompute failed");
1553 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1555 case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1556 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1558 if (cdesc->aes_cmac)
1559 memset(cdesc->cd_cur_ptr, 0, state1_size);
1560 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1561 authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1562 &state2_size, cdesc->aes_cmac)) {
1563 cdesc->aes_cmac ? QAT_LOG(ERR,
1564 "(CMAC)precompute failed")
1566 "(XCBC)precompute failed");
1570 case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1571 case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1572 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1573 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1574 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1575 authkeylen, cdesc->cd_cur_ptr + state1_size,
1576 &state2_size, cdesc->aes_cmac)) {
1577 QAT_LOG(ERR, "(GCM)precompute failed");
1581 * Write (the length of AAD) into bytes 16-19 of state2
1582 * in big-endian format. This field is 8 bytes
1584 auth_param->u2.aad_sz =
1585 RTE_ALIGN_CEIL(aad_length, 16);
1586 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1588 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1589 ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1590 ICP_QAT_HW_GALOIS_H_SZ);
1591 *aad_len = rte_bswap32(aad_length);
1592 cdesc->aad_len = aad_length;
1594 case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1595 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1596 state1_size = qat_hash_get_state1_size(
1597 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1598 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1599 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1601 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1602 (cdesc->cd_cur_ptr + state1_size + state2_size);
1603 cipherconfig->cipher_config.val =
1604 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1605 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1606 ICP_QAT_HW_CIPHER_KEY_CONVERT,
1607 ICP_QAT_HW_CIPHER_ENCRYPT);
1608 memcpy(cipherconfig->key, authkey, authkeylen);
1609 memset(cipherconfig->key + authkeylen,
1610 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1611 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1612 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1613 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1615 case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1616 hash->auth_config.config =
1617 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1618 cdesc->qat_hash_alg, digestsize);
1619 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1620 state1_size = qat_hash_get_state1_size(
1621 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1622 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1623 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1624 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1626 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1627 cdesc->cd_cur_ptr += state1_size + state2_size
1628 + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1629 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1630 cdesc->min_qat_dev_gen = QAT_GEN2;
1633 case ICP_QAT_HW_AUTH_ALGO_MD5:
1634 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1635 authkeylen, cdesc->cd_cur_ptr, &state1_size,
1637 QAT_LOG(ERR, "(MD5)precompute failed");
1640 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1642 case ICP_QAT_HW_AUTH_ALGO_NULL:
1643 state1_size = qat_hash_get_state1_size(
1644 ICP_QAT_HW_AUTH_ALGO_NULL);
1645 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1647 case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1648 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1649 state1_size = qat_hash_get_state1_size(
1650 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1651 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1652 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1654 if (aad_length > 0) {
1655 aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1656 ICP_QAT_HW_CCM_AAD_LEN_INFO;
1657 auth_param->u2.aad_sz =
1658 RTE_ALIGN_CEIL(aad_length,
1659 ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1661 auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1663 cdesc->aad_len = aad_length;
1664 hash->auth_counter.counter = 0;
1666 hash_cd_ctrl->outer_prefix_sz = digestsize;
1667 auth_param->hash_state_sz = digestsize;
1669 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1671 case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1672 state1_size = qat_hash_get_state1_size(
1673 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1674 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1675 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1676 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1679 * The Inner Hash Initial State2 block must contain IK
1680 * (Initialisation Key), followed by IK XOR-ed with KM
1681 * (Key Modifier): IK||(IK^KM).
1683 /* write the auth key */
1684 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1685 /* initialise temp key with auth key */
1686 memcpy(pTempKey, authkey, authkeylen);
1687 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1688 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1689 pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1692 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1696 /* Request template setup */
1697 qat_sym_session_init_common_hdr(header, qat_proto_flag);
1698 header->service_cmd_id = cdesc->qat_cmd;
1700 /* Auth CD config setup */
1701 hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1702 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1703 hash_cd_ctrl->inner_res_sz = digestsize;
1704 hash_cd_ctrl->final_sz = digestsize;
1705 hash_cd_ctrl->inner_state1_sz = state1_size;
1706 auth_param->auth_res_sz = digestsize;
1708 hash_cd_ctrl->inner_state2_sz = state2_size;
1709 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1710 ((sizeof(struct icp_qat_hw_auth_setup) +
1711 RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1714 cdesc->cd_cur_ptr += state1_size + state2_size;
1715 cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1717 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1718 cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1723 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1726 case ICP_QAT_HW_AES_128_KEY_SZ:
1727 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1729 case ICP_QAT_HW_AES_192_KEY_SZ:
1730 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1732 case ICP_QAT_HW_AES_256_KEY_SZ:
1733 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1741 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1742 enum icp_qat_hw_cipher_algo *alg)
1745 case ICP_QAT_HW_AES_128_KEY_SZ:
1746 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1754 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1757 case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1758 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1766 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1769 case ICP_QAT_HW_KASUMI_KEY_SZ:
1770 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1778 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1781 case ICP_QAT_HW_DES_KEY_SZ:
1782 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1790 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1793 case QAT_3DES_KEY_SZ_OPT1:
1794 case QAT_3DES_KEY_SZ_OPT2:
1795 case QAT_3DES_KEY_SZ_OPT3:
1796 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1804 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1807 case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1808 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;