New upstream version 18.08
[deb_dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2018 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
246                 break;
247         case RTE_CRYPTO_CIPHER_KASUMI_F8:
248                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
249                                         &session->qat_cipher_alg) != 0) {
250                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
251                         ret = -EINVAL;
252                         goto error_out;
253                 }
254                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
255                 break;
256         case RTE_CRYPTO_CIPHER_3DES_CBC:
257                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
258                                 &session->qat_cipher_alg) != 0) {
259                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
260                         ret = -EINVAL;
261                         goto error_out;
262                 }
263                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
264                 break;
265         case RTE_CRYPTO_CIPHER_DES_CBC:
266                 if (qat_sym_validate_des_key(cipher_xform->key.length,
267                                 &session->qat_cipher_alg) != 0) {
268                         QAT_LOG(ERR, "Invalid DES cipher key size");
269                         ret = -EINVAL;
270                         goto error_out;
271                 }
272                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
273                 break;
274         case RTE_CRYPTO_CIPHER_3DES_CTR:
275                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
276                                 &session->qat_cipher_alg) != 0) {
277                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
278                         ret = -EINVAL;
279                         goto error_out;
280                 }
281                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
282                 break;
283         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
284                 ret = bpi_cipher_ctx_init(
285                                         cipher_xform->algo,
286                                         cipher_xform->op,
287                                         cipher_xform->key.data,
288                                         &session->bpi_ctx);
289                 if (ret != 0) {
290                         QAT_LOG(ERR, "failed to create DES BPI ctx");
291                         goto error_out;
292                 }
293                 if (qat_sym_validate_des_key(cipher_xform->key.length,
294                                 &session->qat_cipher_alg) != 0) {
295                         QAT_LOG(ERR, "Invalid DES cipher key size");
296                         ret = -EINVAL;
297                         goto error_out;
298                 }
299                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
300                 break;
301         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
302                 ret = bpi_cipher_ctx_init(
303                                         cipher_xform->algo,
304                                         cipher_xform->op,
305                                         cipher_xform->key.data,
306                                         &session->bpi_ctx);
307                 if (ret != 0) {
308                         QAT_LOG(ERR, "failed to create AES BPI ctx");
309                         goto error_out;
310                 }
311                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
312                                 &session->qat_cipher_alg) != 0) {
313                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
314                         ret = -EINVAL;
315                         goto error_out;
316                 }
317                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
318                 break;
319         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
320                 if (!qat_is_cipher_alg_supported(
321                         cipher_xform->algo, internals)) {
322                         QAT_LOG(ERR, "%s not supported on this device",
323                                 rte_crypto_cipher_algorithm_strings
324                                         [cipher_xform->algo]);
325                         ret = -ENOTSUP;
326                         goto error_out;
327                 }
328                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
329                                 &session->qat_cipher_alg) != 0) {
330                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
331                         ret = -EINVAL;
332                         goto error_out;
333                 }
334                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
335                 break;
336         case RTE_CRYPTO_CIPHER_3DES_ECB:
337         case RTE_CRYPTO_CIPHER_AES_ECB:
338         case RTE_CRYPTO_CIPHER_AES_F8:
339         case RTE_CRYPTO_CIPHER_AES_XTS:
340         case RTE_CRYPTO_CIPHER_ARC4:
341                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
342                                 cipher_xform->algo);
343                 ret = -ENOTSUP;
344                 goto error_out;
345         default:
346                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
347                                 cipher_xform->algo);
348                 ret = -EINVAL;
349                 goto error_out;
350         }
351
352         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
353                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
354         else
355                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
356
357         if (qat_sym_session_aead_create_cd_cipher(session,
358                                                 cipher_xform->key.data,
359                                                 cipher_xform->key.length)) {
360                 ret = -EINVAL;
361                 goto error_out;
362         }
363
364         return 0;
365
366 error_out:
367         if (session->bpi_ctx) {
368                 bpi_cipher_ctx_free(session->bpi_ctx);
369                 session->bpi_ctx = NULL;
370         }
371         return ret;
372 }
373
374 int
375 qat_sym_session_configure(struct rte_cryptodev *dev,
376                 struct rte_crypto_sym_xform *xform,
377                 struct rte_cryptodev_sym_session *sess,
378                 struct rte_mempool *mempool)
379 {
380         void *sess_private_data;
381         int ret;
382
383         if (rte_mempool_get(mempool, &sess_private_data)) {
384                 CDEV_LOG_ERR(
385                         "Couldn't get object from session mempool");
386                 return -ENOMEM;
387         }
388
389         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
390         if (ret != 0) {
391                 QAT_LOG(ERR,
392                     "Crypto QAT PMD: failed to configure session parameters");
393
394                 /* Return session to mempool */
395                 rte_mempool_put(mempool, sess_private_data);
396                 return ret;
397         }
398
399         set_sym_session_private_data(sess, dev->driver_id,
400                 sess_private_data);
401
402         return 0;
403 }
404
405 int
406 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
407                 struct rte_crypto_sym_xform *xform, void *session_private)
408 {
409         struct qat_sym_session *session = session_private;
410         int ret;
411         int qat_cmd_id;
412
413         /* Set context descriptor physical address */
414         session->cd_paddr = rte_mempool_virt2iova(session) +
415                         offsetof(struct qat_sym_session, cd);
416
417         session->min_qat_dev_gen = QAT_GEN1;
418
419         /* Get requested QAT command id */
420         qat_cmd_id = qat_get_cmd_id(xform);
421         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
422                 QAT_LOG(ERR, "Unsupported xform chain requested");
423                 return -ENOTSUP;
424         }
425         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
426         switch (session->qat_cmd) {
427         case ICP_QAT_FW_LA_CMD_CIPHER:
428                 ret = qat_sym_session_configure_cipher(dev, xform, session);
429                 if (ret < 0)
430                         return ret;
431                 break;
432         case ICP_QAT_FW_LA_CMD_AUTH:
433                 ret = qat_sym_session_configure_auth(dev, xform, session);
434                 if (ret < 0)
435                         return ret;
436                 break;
437         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
438                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
439                         ret = qat_sym_session_configure_aead(xform,
440                                         session);
441                         if (ret < 0)
442                                 return ret;
443                 } else {
444                         ret = qat_sym_session_configure_cipher(dev,
445                                         xform, session);
446                         if (ret < 0)
447                                 return ret;
448                         ret = qat_sym_session_configure_auth(dev,
449                                         xform, session);
450                         if (ret < 0)
451                                 return ret;
452                 }
453                 break;
454         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
455                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
456                         ret = qat_sym_session_configure_aead(xform,
457                                         session);
458                         if (ret < 0)
459                                 return ret;
460                 } else {
461                         ret = qat_sym_session_configure_auth(dev,
462                                         xform, session);
463                         if (ret < 0)
464                                 return ret;
465                         ret = qat_sym_session_configure_cipher(dev,
466                                         xform, session);
467                         if (ret < 0)
468                                 return ret;
469                 }
470                 break;
471         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
472         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
473         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
474         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
475         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
476         case ICP_QAT_FW_LA_CMD_MGF1:
477         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
478         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
479         case ICP_QAT_FW_LA_CMD_DELIMITER:
480         QAT_LOG(ERR, "Unsupported Service %u",
481                 session->qat_cmd);
482                 return -ENOTSUP;
483         default:
484         QAT_LOG(ERR, "Unsupported Service %u",
485                 session->qat_cmd);
486                 return -ENOTSUP;
487         }
488
489         return 0;
490 }
491
492 int
493 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
494                                 struct rte_crypto_sym_xform *xform,
495                                 struct qat_sym_session *session)
496 {
497         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
498         struct qat_sym_dev_private *internals = dev->data->dev_private;
499         uint8_t *key_data = auth_xform->key.data;
500         uint8_t key_length = auth_xform->key.length;
501
502         switch (auth_xform->algo) {
503         case RTE_CRYPTO_AUTH_SHA1_HMAC:
504                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
505                 break;
506         case RTE_CRYPTO_AUTH_SHA224_HMAC:
507                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
508                 break;
509         case RTE_CRYPTO_AUTH_SHA256_HMAC:
510                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
511                 break;
512         case RTE_CRYPTO_AUTH_SHA384_HMAC:
513                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
514                 break;
515         case RTE_CRYPTO_AUTH_SHA512_HMAC:
516                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
517                 break;
518         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
519                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
520                 break;
521         case RTE_CRYPTO_AUTH_AES_GMAC:
522                 if (qat_sym_validate_aes_key(auth_xform->key.length,
523                                 &session->qat_cipher_alg) != 0) {
524                         QAT_LOG(ERR, "Invalid AES key size");
525                         return -EINVAL;
526                 }
527                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
528                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
529
530                 break;
531         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
532                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
533                 break;
534         case RTE_CRYPTO_AUTH_MD5_HMAC:
535                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
536                 break;
537         case RTE_CRYPTO_AUTH_NULL:
538                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
539                 break;
540         case RTE_CRYPTO_AUTH_KASUMI_F9:
541                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
542                 break;
543         case RTE_CRYPTO_AUTH_ZUC_EIA3:
544                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
545                         QAT_LOG(ERR, "%s not supported on this device",
546                                 rte_crypto_auth_algorithm_strings
547                                 [auth_xform->algo]);
548                         return -ENOTSUP;
549                 }
550                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
551                 break;
552         case RTE_CRYPTO_AUTH_SHA1:
553         case RTE_CRYPTO_AUTH_SHA256:
554         case RTE_CRYPTO_AUTH_SHA512:
555         case RTE_CRYPTO_AUTH_SHA224:
556         case RTE_CRYPTO_AUTH_SHA384:
557         case RTE_CRYPTO_AUTH_MD5:
558         case RTE_CRYPTO_AUTH_AES_CMAC:
559         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
560                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
561                                 auth_xform->algo);
562                 return -ENOTSUP;
563         default:
564                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
565                                 auth_xform->algo);
566                 return -EINVAL;
567         }
568
569         session->auth_iv.offset = auth_xform->iv.offset;
570         session->auth_iv.length = auth_xform->iv.length;
571
572         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
573                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
574                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
575                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
576                         /*
577                          * It needs to create cipher desc content first,
578                          * then authentication
579                          */
580
581                         if (qat_sym_session_aead_create_cd_cipher(session,
582                                                 auth_xform->key.data,
583                                                 auth_xform->key.length))
584                                 return -EINVAL;
585
586                         if (qat_sym_session_aead_create_cd_auth(session,
587                                                 key_data,
588                                                 key_length,
589                                                 0,
590                                                 auth_xform->digest_length,
591                                                 auth_xform->op))
592                                 return -EINVAL;
593                 } else {
594                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
595                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
596                         /*
597                          * It needs to create authentication desc content first,
598                          * then cipher
599                          */
600
601                         if (qat_sym_session_aead_create_cd_auth(session,
602                                         key_data,
603                                         key_length,
604                                         0,
605                                         auth_xform->digest_length,
606                                         auth_xform->op))
607                                 return -EINVAL;
608
609                         if (qat_sym_session_aead_create_cd_cipher(session,
610                                                 auth_xform->key.data,
611                                                 auth_xform->key.length))
612                                 return -EINVAL;
613                 }
614                 /* Restore to authentication only only */
615                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
616         } else {
617                 if (qat_sym_session_aead_create_cd_auth(session,
618                                 key_data,
619                                 key_length,
620                                 0,
621                                 auth_xform->digest_length,
622                                 auth_xform->op))
623                         return -EINVAL;
624         }
625
626         session->digest_length = auth_xform->digest_length;
627         return 0;
628 }
629
630 int
631 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
632                                 struct qat_sym_session *session)
633 {
634         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
635         enum rte_crypto_auth_operation crypto_operation;
636
637         /*
638          * Store AEAD IV parameters as cipher IV,
639          * to avoid unnecessary memory usage
640          */
641         session->cipher_iv.offset = xform->aead.iv.offset;
642         session->cipher_iv.length = xform->aead.iv.length;
643
644         switch (aead_xform->algo) {
645         case RTE_CRYPTO_AEAD_AES_GCM:
646                 if (qat_sym_validate_aes_key(aead_xform->key.length,
647                                 &session->qat_cipher_alg) != 0) {
648                         QAT_LOG(ERR, "Invalid AES key size");
649                         return -EINVAL;
650                 }
651                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
652                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
653                 break;
654         case RTE_CRYPTO_AEAD_AES_CCM:
655                 if (qat_sym_validate_aes_key(aead_xform->key.length,
656                                 &session->qat_cipher_alg) != 0) {
657                         QAT_LOG(ERR, "Invalid AES key size");
658                         return -EINVAL;
659                 }
660                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
661                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
662                 break;
663         default:
664                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
665                                 aead_xform->algo);
666                 return -EINVAL;
667         }
668
669         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
670                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
671                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
672                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
673                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
674                 /*
675                  * It needs to create cipher desc content first,
676                  * then authentication
677                  */
678                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
679                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
680
681                 if (qat_sym_session_aead_create_cd_cipher(session,
682                                         aead_xform->key.data,
683                                         aead_xform->key.length))
684                         return -EINVAL;
685
686                 if (qat_sym_session_aead_create_cd_auth(session,
687                                         aead_xform->key.data,
688                                         aead_xform->key.length,
689                                         aead_xform->aad_length,
690                                         aead_xform->digest_length,
691                                         crypto_operation))
692                         return -EINVAL;
693         } else {
694                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
695                 /*
696                  * It needs to create authentication desc content first,
697                  * then cipher
698                  */
699
700                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
701                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
702
703                 if (qat_sym_session_aead_create_cd_auth(session,
704                                         aead_xform->key.data,
705                                         aead_xform->key.length,
706                                         aead_xform->aad_length,
707                                         aead_xform->digest_length,
708                                         crypto_operation))
709                         return -EINVAL;
710
711                 if (qat_sym_session_aead_create_cd_cipher(session,
712                                         aead_xform->key.data,
713                                         aead_xform->key.length))
714                         return -EINVAL;
715         }
716
717         session->digest_length = aead_xform->digest_length;
718         return 0;
719 }
720
721 unsigned int qat_sym_session_get_private_size(
722                 struct rte_cryptodev *dev __rte_unused)
723 {
724         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
725 }
726
727 /* returns block size in bytes per cipher algo */
728 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
729 {
730         switch (qat_cipher_alg) {
731         case ICP_QAT_HW_CIPHER_ALGO_DES:
732                 return ICP_QAT_HW_DES_BLK_SZ;
733         case ICP_QAT_HW_CIPHER_ALGO_3DES:
734                 return ICP_QAT_HW_3DES_BLK_SZ;
735         case ICP_QAT_HW_CIPHER_ALGO_AES128:
736         case ICP_QAT_HW_CIPHER_ALGO_AES192:
737         case ICP_QAT_HW_CIPHER_ALGO_AES256:
738                 return ICP_QAT_HW_AES_BLK_SZ;
739         default:
740                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
741                 return -EFAULT;
742         };
743         return -EFAULT;
744 }
745
746 /*
747  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
748  * This is digest size rounded up to nearest quadword
749  */
750 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
751 {
752         switch (qat_hash_alg) {
753         case ICP_QAT_HW_AUTH_ALGO_SHA1:
754                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
755                                                 QAT_HW_DEFAULT_ALIGNMENT);
756         case ICP_QAT_HW_AUTH_ALGO_SHA224:
757                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
758                                                 QAT_HW_DEFAULT_ALIGNMENT);
759         case ICP_QAT_HW_AUTH_ALGO_SHA256:
760                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
761                                                 QAT_HW_DEFAULT_ALIGNMENT);
762         case ICP_QAT_HW_AUTH_ALGO_SHA384:
763                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
764                                                 QAT_HW_DEFAULT_ALIGNMENT);
765         case ICP_QAT_HW_AUTH_ALGO_SHA512:
766                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
767                                                 QAT_HW_DEFAULT_ALIGNMENT);
768         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
769                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
770                                                 QAT_HW_DEFAULT_ALIGNMENT);
771         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
772         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
773                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
774                                                 QAT_HW_DEFAULT_ALIGNMENT);
775         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
776                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
777                                                 QAT_HW_DEFAULT_ALIGNMENT);
778         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
779                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
780                                                 QAT_HW_DEFAULT_ALIGNMENT);
781         case ICP_QAT_HW_AUTH_ALGO_MD5:
782                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
783                                                 QAT_HW_DEFAULT_ALIGNMENT);
784         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
785                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
786                                                 QAT_HW_DEFAULT_ALIGNMENT);
787         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
788                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
789                                                 QAT_HW_DEFAULT_ALIGNMENT);
790         case ICP_QAT_HW_AUTH_ALGO_NULL:
791                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
792                                                 QAT_HW_DEFAULT_ALIGNMENT);
793         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
794                 /* return maximum state1 size in this case */
795                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
796                                                 QAT_HW_DEFAULT_ALIGNMENT);
797         default:
798                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
799                 return -EFAULT;
800         };
801         return -EFAULT;
802 }
803
804 /* returns digest size in bytes  per hash algo */
805 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
806 {
807         switch (qat_hash_alg) {
808         case ICP_QAT_HW_AUTH_ALGO_SHA1:
809                 return ICP_QAT_HW_SHA1_STATE1_SZ;
810         case ICP_QAT_HW_AUTH_ALGO_SHA224:
811                 return ICP_QAT_HW_SHA224_STATE1_SZ;
812         case ICP_QAT_HW_AUTH_ALGO_SHA256:
813                 return ICP_QAT_HW_SHA256_STATE1_SZ;
814         case ICP_QAT_HW_AUTH_ALGO_SHA384:
815                 return ICP_QAT_HW_SHA384_STATE1_SZ;
816         case ICP_QAT_HW_AUTH_ALGO_SHA512:
817                 return ICP_QAT_HW_SHA512_STATE1_SZ;
818         case ICP_QAT_HW_AUTH_ALGO_MD5:
819                 return ICP_QAT_HW_MD5_STATE1_SZ;
820         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
821                 /* return maximum digest size in this case */
822                 return ICP_QAT_HW_SHA512_STATE1_SZ;
823         default:
824                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
825                 return -EFAULT;
826         };
827         return -EFAULT;
828 }
829
830 /* returns block size in byes per hash algo */
831 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
832 {
833         switch (qat_hash_alg) {
834         case ICP_QAT_HW_AUTH_ALGO_SHA1:
835                 return SHA_CBLOCK;
836         case ICP_QAT_HW_AUTH_ALGO_SHA224:
837                 return SHA256_CBLOCK;
838         case ICP_QAT_HW_AUTH_ALGO_SHA256:
839                 return SHA256_CBLOCK;
840         case ICP_QAT_HW_AUTH_ALGO_SHA384:
841                 return SHA512_CBLOCK;
842         case ICP_QAT_HW_AUTH_ALGO_SHA512:
843                 return SHA512_CBLOCK;
844         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
845                 return 16;
846         case ICP_QAT_HW_AUTH_ALGO_MD5:
847                 return MD5_CBLOCK;
848         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
849                 /* return maximum block size in this case */
850                 return SHA512_CBLOCK;
851         default:
852                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
853                 return -EFAULT;
854         };
855         return -EFAULT;
856 }
857
858 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
859 {
860         SHA_CTX ctx;
861
862         if (!SHA1_Init(&ctx))
863                 return -EFAULT;
864         SHA1_Transform(&ctx, data_in);
865         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
866         return 0;
867 }
868
869 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
870 {
871         SHA256_CTX ctx;
872
873         if (!SHA224_Init(&ctx))
874                 return -EFAULT;
875         SHA256_Transform(&ctx, data_in);
876         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
877         return 0;
878 }
879
880 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
881 {
882         SHA256_CTX ctx;
883
884         if (!SHA256_Init(&ctx))
885                 return -EFAULT;
886         SHA256_Transform(&ctx, data_in);
887         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
888         return 0;
889 }
890
891 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
892 {
893         SHA512_CTX ctx;
894
895         if (!SHA384_Init(&ctx))
896                 return -EFAULT;
897         SHA512_Transform(&ctx, data_in);
898         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
899         return 0;
900 }
901
902 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
903 {
904         SHA512_CTX ctx;
905
906         if (!SHA512_Init(&ctx))
907                 return -EFAULT;
908         SHA512_Transform(&ctx, data_in);
909         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
910         return 0;
911 }
912
913 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
914 {
915         MD5_CTX ctx;
916
917         if (!MD5_Init(&ctx))
918                 return -EFAULT;
919         MD5_Transform(&ctx, data_in);
920         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
921
922         return 0;
923 }
924
925 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
926                         uint8_t *data_in,
927                         uint8_t *data_out)
928 {
929         int digest_size;
930         uint8_t digest[qat_hash_get_digest_size(
931                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
932         uint32_t *hash_state_out_be32;
933         uint64_t *hash_state_out_be64;
934         int i;
935
936         digest_size = qat_hash_get_digest_size(hash_alg);
937         if (digest_size <= 0)
938                 return -EFAULT;
939
940         hash_state_out_be32 = (uint32_t *)data_out;
941         hash_state_out_be64 = (uint64_t *)data_out;
942
943         switch (hash_alg) {
944         case ICP_QAT_HW_AUTH_ALGO_SHA1:
945                 if (partial_hash_sha1(data_in, digest))
946                         return -EFAULT;
947                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
948                         *hash_state_out_be32 =
949                                 rte_bswap32(*(((uint32_t *)digest)+i));
950                 break;
951         case ICP_QAT_HW_AUTH_ALGO_SHA224:
952                 if (partial_hash_sha224(data_in, digest))
953                         return -EFAULT;
954                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
955                         *hash_state_out_be32 =
956                                 rte_bswap32(*(((uint32_t *)digest)+i));
957                 break;
958         case ICP_QAT_HW_AUTH_ALGO_SHA256:
959                 if (partial_hash_sha256(data_in, digest))
960                         return -EFAULT;
961                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
962                         *hash_state_out_be32 =
963                                 rte_bswap32(*(((uint32_t *)digest)+i));
964                 break;
965         case ICP_QAT_HW_AUTH_ALGO_SHA384:
966                 if (partial_hash_sha384(data_in, digest))
967                         return -EFAULT;
968                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
969                         *hash_state_out_be64 =
970                                 rte_bswap64(*(((uint64_t *)digest)+i));
971                 break;
972         case ICP_QAT_HW_AUTH_ALGO_SHA512:
973                 if (partial_hash_sha512(data_in, digest))
974                         return -EFAULT;
975                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
976                         *hash_state_out_be64 =
977                                 rte_bswap64(*(((uint64_t *)digest)+i));
978                 break;
979         case ICP_QAT_HW_AUTH_ALGO_MD5:
980                 if (partial_hash_md5(data_in, data_out))
981                         return -EFAULT;
982                 break;
983         default:
984                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
985                 return -EFAULT;
986         }
987
988         return 0;
989 }
990 #define HMAC_IPAD_VALUE 0x36
991 #define HMAC_OPAD_VALUE 0x5c
992 #define HASH_XCBC_PRECOMP_KEY_NUM 3
993
994 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
995                                 const uint8_t *auth_key,
996                                 uint16_t auth_keylen,
997                                 uint8_t *p_state_buf,
998                                 uint16_t *p_state_len)
999 {
1000         int block_size;
1001         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1002         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1003         int i;
1004
1005         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1006                 static uint8_t qat_aes_xcbc_key_seed[
1007                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1008                         0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1009                         0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1010                         0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1011                         0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1012                         0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1013                         0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1014                 };
1015
1016                 uint8_t *in = NULL;
1017                 uint8_t *out = p_state_buf;
1018                 int x;
1019                 AES_KEY enc_key;
1020
1021                 in = rte_zmalloc("working mem for key",
1022                                 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1023                 if (in == NULL) {
1024                         QAT_LOG(ERR, "Failed to alloc memory");
1025                         return -ENOMEM;
1026                 }
1027
1028                 rte_memcpy(in, qat_aes_xcbc_key_seed,
1029                                 ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1030                 for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1031                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1032                                 &enc_key) != 0) {
1033                                 rte_free(in -
1034                                         (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1035                                 memset(out -
1036                                         (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1037                                         0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1038                                 return -EFAULT;
1039                         }
1040                         AES_encrypt(in, out, &enc_key);
1041                         in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1042                         out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1043                 }
1044                 *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1045                 rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1046                 return 0;
1047         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1048                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1049                 uint8_t *in = NULL;
1050                 uint8_t *out = p_state_buf;
1051                 AES_KEY enc_key;
1052
1053                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1054                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1055                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1056                 in = rte_zmalloc("working mem for key",
1057                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1058                 if (in == NULL) {
1059                         QAT_LOG(ERR, "Failed to alloc memory");
1060                         return -ENOMEM;
1061                 }
1062
1063                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1064                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1065                         &enc_key) != 0) {
1066                         return -EFAULT;
1067                 }
1068                 AES_encrypt(in, out, &enc_key);
1069                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1070                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1071                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1072                 rte_free(in);
1073                 return 0;
1074         }
1075
1076         block_size = qat_hash_get_block_size(hash_alg);
1077         if (block_size <= 0)
1078                 return -EFAULT;
1079         /* init ipad and opad from key and xor with fixed values */
1080         memset(ipad, 0, block_size);
1081         memset(opad, 0, block_size);
1082
1083         if (auth_keylen > (unsigned int)block_size) {
1084                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1085                 return -EFAULT;
1086         }
1087         rte_memcpy(ipad, auth_key, auth_keylen);
1088         rte_memcpy(opad, auth_key, auth_keylen);
1089
1090         for (i = 0; i < block_size; i++) {
1091                 uint8_t *ipad_ptr = ipad + i;
1092                 uint8_t *opad_ptr = opad + i;
1093                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1094                 *opad_ptr ^= HMAC_OPAD_VALUE;
1095         }
1096
1097         /* do partial hash of ipad and copy to state1 */
1098         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1099                 memset(ipad, 0, block_size);
1100                 memset(opad, 0, block_size);
1101                 QAT_LOG(ERR, "ipad precompute failed");
1102                 return -EFAULT;
1103         }
1104
1105         /*
1106          * State len is a multiple of 8, so may be larger than the digest.
1107          * Put the partial hash of opad state_len bytes after state1
1108          */
1109         *p_state_len = qat_hash_get_state1_size(hash_alg);
1110         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1111                 memset(ipad, 0, block_size);
1112                 memset(opad, 0, block_size);
1113                 QAT_LOG(ERR, "opad precompute failed");
1114                 return -EFAULT;
1115         }
1116
1117         /*  don't leave data lying around */
1118         memset(ipad, 0, block_size);
1119         memset(opad, 0, block_size);
1120         return 0;
1121 }
1122
1123 static void
1124 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1125                 enum qat_sym_proto_flag proto_flags)
1126 {
1127         header->hdr_flags =
1128                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1129         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1130         header->comn_req_flags =
1131                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1132                                         QAT_COMN_PTR_TYPE_FLAT);
1133         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1134                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1135         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1136                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1137
1138         switch (proto_flags)            {
1139         case QAT_CRYPTO_PROTO_FLAG_NONE:
1140                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1141                                         ICP_QAT_FW_LA_NO_PROTO);
1142                 break;
1143         case QAT_CRYPTO_PROTO_FLAG_CCM:
1144                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1145                                         ICP_QAT_FW_LA_CCM_PROTO);
1146                 break;
1147         case QAT_CRYPTO_PROTO_FLAG_GCM:
1148                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1149                                         ICP_QAT_FW_LA_GCM_PROTO);
1150                 break;
1151         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1152                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1153                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1154                 break;
1155         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1156                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1157                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1158                 break;
1159         }
1160
1161         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1162                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1163         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1164                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1165 }
1166
1167 /*
1168  *      Snow3G and ZUC should never use this function
1169  *      and set its protocol flag in both cipher and auth part of content
1170  *      descriptor building function
1171  */
1172 static enum qat_sym_proto_flag
1173 qat_get_crypto_proto_flag(uint16_t flags)
1174 {
1175         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1176         enum qat_sym_proto_flag qat_proto_flag =
1177                         QAT_CRYPTO_PROTO_FLAG_NONE;
1178
1179         switch (proto) {
1180         case ICP_QAT_FW_LA_GCM_PROTO:
1181                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1182                 break;
1183         case ICP_QAT_FW_LA_CCM_PROTO:
1184                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1185                 break;
1186         }
1187
1188         return qat_proto_flag;
1189 }
1190
1191 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1192                                                 uint8_t *cipherkey,
1193                                                 uint32_t cipherkeylen)
1194 {
1195         struct icp_qat_hw_cipher_algo_blk *cipher;
1196         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1197         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1198         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1199         void *ptr = &req_tmpl->cd_ctrl;
1200         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1201         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1202         enum icp_qat_hw_cipher_convert key_convert;
1203         enum qat_sym_proto_flag qat_proto_flag =
1204                 QAT_CRYPTO_PROTO_FLAG_NONE;
1205         uint32_t total_key_size;
1206         uint16_t cipher_offset, cd_size;
1207         uint32_t wordIndex  = 0;
1208         uint32_t *temp_key = NULL;
1209
1210         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1211                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1212                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1213                                         ICP_QAT_FW_SLICE_CIPHER);
1214                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1215                                         ICP_QAT_FW_SLICE_DRAM_WR);
1216                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1217                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1218                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1219                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1220                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1221         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1222                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1223                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1224                                         ICP_QAT_FW_SLICE_CIPHER);
1225                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1226                                         ICP_QAT_FW_SLICE_AUTH);
1227                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1228                                         ICP_QAT_FW_SLICE_AUTH);
1229                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1230                                         ICP_QAT_FW_SLICE_DRAM_WR);
1231                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1232         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1233                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1234                 return -EFAULT;
1235         }
1236
1237         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1238                 /*
1239                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1240                  * Overriding default values previously set
1241                  */
1242                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1243                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1244         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1245                 || cdesc->qat_cipher_alg ==
1246                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1247                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1248         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1249                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1250         else
1251                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1252
1253         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1254                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1255                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1256                 cipher_cd_ctrl->cipher_state_sz =
1257                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1258                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1259
1260         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1261                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1262                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1263                 cipher_cd_ctrl->cipher_padding_sz =
1264                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1265         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1266                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1267                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1268                 qat_proto_flag =
1269                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1270         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1271                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1272                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1273                 qat_proto_flag =
1274                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1275         } else if (cdesc->qat_cipher_alg ==
1276                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1277                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1278                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1279                 cipher_cd_ctrl->cipher_state_sz =
1280                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1281                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1282                 cdesc->min_qat_dev_gen = QAT_GEN2;
1283         } else {
1284                 total_key_size = cipherkeylen;
1285                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1286                 qat_proto_flag =
1287                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1288         }
1289         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1290         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1291         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1292
1293         header->service_cmd_id = cdesc->qat_cmd;
1294         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1295
1296         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1297         cipher->cipher_config.val =
1298             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1299                                         cdesc->qat_cipher_alg, key_convert,
1300                                         cdesc->qat_dir);
1301
1302         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1303                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1304                                         sizeof(struct icp_qat_hw_cipher_config)
1305                                         + cipherkeylen);
1306                 memcpy(cipher->key, cipherkey, cipherkeylen);
1307                 memcpy(temp_key, cipherkey, cipherkeylen);
1308
1309                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1310                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1311                                                                 wordIndex++)
1312                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1313
1314                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1315                                         cipherkeylen + cipherkeylen;
1316         } else {
1317                 memcpy(cipher->key, cipherkey, cipherkeylen);
1318                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1319                                         cipherkeylen;
1320         }
1321
1322         if (total_key_size > cipherkeylen) {
1323                 uint32_t padding_size =  total_key_size-cipherkeylen;
1324                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1325                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1326                         /* K3 not provided so use K1 = K3*/
1327                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1328                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1329                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1330                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1331                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1332                                 cipherkeylen);
1333                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1334                                 cipherkey, cipherkeylen);
1335                 } else
1336                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1337
1338                 cdesc->cd_cur_ptr += padding_size;
1339         }
1340         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1341         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1342
1343         return 0;
1344 }
1345
1346 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1347                                                 uint8_t *authkey,
1348                                                 uint32_t authkeylen,
1349                                                 uint32_t aad_length,
1350                                                 uint32_t digestsize,
1351                                                 unsigned int operation)
1352 {
1353         struct icp_qat_hw_auth_setup *hash;
1354         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1355         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1356         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1357         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1358         void *ptr = &req_tmpl->cd_ctrl;
1359         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1360         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1361         struct icp_qat_fw_la_auth_req_params *auth_param =
1362                 (struct icp_qat_fw_la_auth_req_params *)
1363                 ((char *)&req_tmpl->serv_specif_rqpars +
1364                 sizeof(struct icp_qat_fw_la_cipher_req_params));
1365         uint16_t state1_size = 0, state2_size = 0;
1366         uint16_t hash_offset, cd_size;
1367         uint32_t *aad_len = NULL;
1368         uint32_t wordIndex  = 0;
1369         uint32_t *pTempKey;
1370         enum qat_sym_proto_flag qat_proto_flag =
1371                 QAT_CRYPTO_PROTO_FLAG_NONE;
1372
1373         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1374                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1375                                         ICP_QAT_FW_SLICE_AUTH);
1376                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1377                                         ICP_QAT_FW_SLICE_DRAM_WR);
1378                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1379         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1380                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1381                                 ICP_QAT_FW_SLICE_AUTH);
1382                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1383                                 ICP_QAT_FW_SLICE_CIPHER);
1384                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1385                                 ICP_QAT_FW_SLICE_CIPHER);
1386                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1387                                 ICP_QAT_FW_SLICE_DRAM_WR);
1388                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1389         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1390                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1391                 return -EFAULT;
1392         }
1393
1394         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1395                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1396                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1397                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1398                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1399                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1400         } else {
1401                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1402                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1403                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1404                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1405                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1406         }
1407
1408         /*
1409          * Setup the inner hash config
1410          */
1411         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1412         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1413         hash->auth_config.reserved = 0;
1414         hash->auth_config.config =
1415                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1416                                 cdesc->qat_hash_alg, digestsize);
1417
1418         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1419                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1420                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3)
1421                 hash->auth_counter.counter = 0;
1422         else
1423                 hash->auth_counter.counter = rte_bswap32(
1424                                 qat_hash_get_block_size(cdesc->qat_hash_alg));
1425
1426         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1427
1428         /*
1429          * cd_cur_ptr now points at the state1 information.
1430          */
1431         switch (cdesc->qat_hash_alg) {
1432         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1433                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1,
1434                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1435                         QAT_LOG(ERR, "(SHA)precompute failed");
1436                         return -EFAULT;
1437                 }
1438                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1439                 break;
1440         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1441                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224,
1442                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1443                         QAT_LOG(ERR, "(SHA)precompute failed");
1444                         return -EFAULT;
1445                 }
1446                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1447                 break;
1448         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1449                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256,
1450                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1451                         QAT_LOG(ERR, "(SHA)precompute failed");
1452                         return -EFAULT;
1453                 }
1454                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1455                 break;
1456         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1457                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384,
1458                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1459                         QAT_LOG(ERR, "(SHA)precompute failed");
1460                         return -EFAULT;
1461                 }
1462                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1463                 break;
1464         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1465                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512,
1466                         authkey, authkeylen, cdesc->cd_cur_ptr, &state1_size)) {
1467                         QAT_LOG(ERR, "(SHA)precompute failed");
1468                         return -EFAULT;
1469                 }
1470                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1471                 break;
1472         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1473                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1474                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1475                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1476                         &state2_size)) {
1477                         QAT_LOG(ERR, "(XCBC)precompute failed");
1478                         return -EFAULT;
1479                 }
1480                 break;
1481         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1482         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1483                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1484                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1485                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg,
1486                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1487                         &state2_size)) {
1488                         QAT_LOG(ERR, "(GCM)precompute failed");
1489                         return -EFAULT;
1490                 }
1491                 /*
1492                  * Write (the length of AAD) into bytes 16-19 of state2
1493                  * in big-endian format. This field is 8 bytes
1494                  */
1495                 auth_param->u2.aad_sz =
1496                                 RTE_ALIGN_CEIL(aad_length, 16);
1497                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1498
1499                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1500                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1501                                         ICP_QAT_HW_GALOIS_H_SZ);
1502                 *aad_len = rte_bswap32(aad_length);
1503                 cdesc->aad_len = aad_length;
1504                 break;
1505         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1506                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1507                 state1_size = qat_hash_get_state1_size(
1508                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1509                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1510                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1511
1512                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1513                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1514                 cipherconfig->cipher_config.val =
1515                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1516                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1517                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1518                         ICP_QAT_HW_CIPHER_ENCRYPT);
1519                 memcpy(cipherconfig->key, authkey, authkeylen);
1520                 memset(cipherconfig->key + authkeylen,
1521                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1522                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1523                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1524                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1525                 break;
1526         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1527                 hash->auth_config.config =
1528                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1529                                 cdesc->qat_hash_alg, digestsize);
1530                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1531                 state1_size = qat_hash_get_state1_size(
1532                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1533                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1534                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1535                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1536
1537                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1538                 cdesc->cd_cur_ptr += state1_size + state2_size
1539                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1540                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1541                 cdesc->min_qat_dev_gen = QAT_GEN2;
1542
1543                 break;
1544         case ICP_QAT_HW_AUTH_ALGO_MD5:
1545                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5,
1546                         authkey, authkeylen, cdesc->cd_cur_ptr,
1547                         &state1_size)) {
1548                         QAT_LOG(ERR, "(MD5)precompute failed");
1549                         return -EFAULT;
1550                 }
1551                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1552                 break;
1553         case ICP_QAT_HW_AUTH_ALGO_NULL:
1554                 state1_size = qat_hash_get_state1_size(
1555                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1556                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1557                 break;
1558         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1559                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1560                 state1_size = qat_hash_get_state1_size(
1561                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1562                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1563                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1564
1565                 if (aad_length > 0) {
1566                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1567                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1568                         auth_param->u2.aad_sz =
1569                         RTE_ALIGN_CEIL(aad_length,
1570                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1571                 } else {
1572                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1573                 }
1574                 cdesc->aad_len = aad_length;
1575                 hash->auth_counter.counter = 0;
1576
1577                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1578                 auth_param->hash_state_sz = digestsize;
1579
1580                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1581                 break;
1582         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1583                 state1_size = qat_hash_get_state1_size(
1584                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1585                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1586                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1587                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1588                                                         + authkeylen);
1589                 /*
1590                 * The Inner Hash Initial State2 block must contain IK
1591                 * (Initialisation Key), followed by IK XOR-ed with KM
1592                 * (Key Modifier): IK||(IK^KM).
1593                 */
1594                 /* write the auth key */
1595                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1596                 /* initialise temp key with auth key */
1597                 memcpy(pTempKey, authkey, authkeylen);
1598                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1599                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1600                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1601                 break;
1602         default:
1603                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1604                 return -EFAULT;
1605         }
1606
1607         /* Request template setup */
1608         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1609         header->service_cmd_id = cdesc->qat_cmd;
1610
1611         /* Auth CD config setup */
1612         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1613         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1614         hash_cd_ctrl->inner_res_sz = digestsize;
1615         hash_cd_ctrl->final_sz = digestsize;
1616         hash_cd_ctrl->inner_state1_sz = state1_size;
1617         auth_param->auth_res_sz = digestsize;
1618
1619         hash_cd_ctrl->inner_state2_sz  = state2_size;
1620         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1621                         ((sizeof(struct icp_qat_hw_auth_setup) +
1622                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1623                                         >> 3);
1624
1625         cdesc->cd_cur_ptr += state1_size + state2_size;
1626         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1627
1628         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1629         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1630
1631         return 0;
1632 }
1633
1634 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1635 {
1636         switch (key_len) {
1637         case ICP_QAT_HW_AES_128_KEY_SZ:
1638                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1639                 break;
1640         case ICP_QAT_HW_AES_192_KEY_SZ:
1641                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1642                 break;
1643         case ICP_QAT_HW_AES_256_KEY_SZ:
1644                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1645                 break;
1646         default:
1647                 return -EINVAL;
1648         }
1649         return 0;
1650 }
1651
1652 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1653                 enum icp_qat_hw_cipher_algo *alg)
1654 {
1655         switch (key_len) {
1656         case ICP_QAT_HW_AES_128_KEY_SZ:
1657                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1658                 break;
1659         default:
1660                 return -EINVAL;
1661         }
1662         return 0;
1663 }
1664
1665 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1666 {
1667         switch (key_len) {
1668         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1669                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1670                 break;
1671         default:
1672                 return -EINVAL;
1673         }
1674         return 0;
1675 }
1676
1677 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1678 {
1679         switch (key_len) {
1680         case ICP_QAT_HW_KASUMI_KEY_SZ:
1681                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1682                 break;
1683         default:
1684                 return -EINVAL;
1685         }
1686         return 0;
1687 }
1688
1689 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1690 {
1691         switch (key_len) {
1692         case ICP_QAT_HW_DES_KEY_SZ:
1693                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1694                 break;
1695         default:
1696                 return -EINVAL;
1697         }
1698         return 0;
1699 }
1700
1701 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1702 {
1703         switch (key_len) {
1704         case QAT_3DES_KEY_SZ_OPT1:
1705         case QAT_3DES_KEY_SZ_OPT2:
1706         case QAT_3DES_KEY_SZ_OPT3:
1707                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1708                 break;
1709         default:
1710                 return -EINVAL;
1711         }
1712         return 0;
1713 }
1714
1715 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1716 {
1717         switch (key_len) {
1718         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1719                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1720                 break;
1721         default:
1722                 return -EINVAL;
1723         }
1724         return 0;
1725 }