New upstream version 18.11.2
[deb_dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2018 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_cipher_alg = ICP_QAT_HW_CIPHER_ALGO_NULL;
246                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
247                 break;
248         case RTE_CRYPTO_CIPHER_KASUMI_F8:
249                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
250                                         &session->qat_cipher_alg) != 0) {
251                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
252                         ret = -EINVAL;
253                         goto error_out;
254                 }
255                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
256                 break;
257         case RTE_CRYPTO_CIPHER_3DES_CBC:
258                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
259                                 &session->qat_cipher_alg) != 0) {
260                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
261                         ret = -EINVAL;
262                         goto error_out;
263                 }
264                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
265                 break;
266         case RTE_CRYPTO_CIPHER_DES_CBC:
267                 if (qat_sym_validate_des_key(cipher_xform->key.length,
268                                 &session->qat_cipher_alg) != 0) {
269                         QAT_LOG(ERR, "Invalid DES cipher key size");
270                         ret = -EINVAL;
271                         goto error_out;
272                 }
273                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
274                 break;
275         case RTE_CRYPTO_CIPHER_3DES_CTR:
276                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
277                                 &session->qat_cipher_alg) != 0) {
278                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
279                         ret = -EINVAL;
280                         goto error_out;
281                 }
282                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
283                 break;
284         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
285                 ret = bpi_cipher_ctx_init(
286                                         cipher_xform->algo,
287                                         cipher_xform->op,
288                                         cipher_xform->key.data,
289                                         &session->bpi_ctx);
290                 if (ret != 0) {
291                         QAT_LOG(ERR, "failed to create DES BPI ctx");
292                         goto error_out;
293                 }
294                 if (qat_sym_validate_des_key(cipher_xform->key.length,
295                                 &session->qat_cipher_alg) != 0) {
296                         QAT_LOG(ERR, "Invalid DES cipher key size");
297                         ret = -EINVAL;
298                         goto error_out;
299                 }
300                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
301                 break;
302         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
303                 ret = bpi_cipher_ctx_init(
304                                         cipher_xform->algo,
305                                         cipher_xform->op,
306                                         cipher_xform->key.data,
307                                         &session->bpi_ctx);
308                 if (ret != 0) {
309                         QAT_LOG(ERR, "failed to create AES BPI ctx");
310                         goto error_out;
311                 }
312                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
313                                 &session->qat_cipher_alg) != 0) {
314                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
315                         ret = -EINVAL;
316                         goto error_out;
317                 }
318                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
319                 break;
320         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
321                 if (!qat_is_cipher_alg_supported(
322                         cipher_xform->algo, internals)) {
323                         QAT_LOG(ERR, "%s not supported on this device",
324                                 rte_crypto_cipher_algorithm_strings
325                                         [cipher_xform->algo]);
326                         ret = -ENOTSUP;
327                         goto error_out;
328                 }
329                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
330                                 &session->qat_cipher_alg) != 0) {
331                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
332                         ret = -EINVAL;
333                         goto error_out;
334                 }
335                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
336                 break;
337         case RTE_CRYPTO_CIPHER_3DES_ECB:
338         case RTE_CRYPTO_CIPHER_AES_ECB:
339         case RTE_CRYPTO_CIPHER_AES_F8:
340         case RTE_CRYPTO_CIPHER_AES_XTS:
341         case RTE_CRYPTO_CIPHER_ARC4:
342                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
343                                 cipher_xform->algo);
344                 ret = -ENOTSUP;
345                 goto error_out;
346         default:
347                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
348                                 cipher_xform->algo);
349                 ret = -EINVAL;
350                 goto error_out;
351         }
352
353         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
354                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
355         else
356                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
357
358         if (qat_sym_session_aead_create_cd_cipher(session,
359                                                 cipher_xform->key.data,
360                                                 cipher_xform->key.length)) {
361                 ret = -EINVAL;
362                 goto error_out;
363         }
364
365         return 0;
366
367 error_out:
368         if (session->bpi_ctx) {
369                 bpi_cipher_ctx_free(session->bpi_ctx);
370                 session->bpi_ctx = NULL;
371         }
372         return ret;
373 }
374
375 int
376 qat_sym_session_configure(struct rte_cryptodev *dev,
377                 struct rte_crypto_sym_xform *xform,
378                 struct rte_cryptodev_sym_session *sess,
379                 struct rte_mempool *mempool)
380 {
381         void *sess_private_data;
382         int ret;
383
384         if (rte_mempool_get(mempool, &sess_private_data)) {
385                 CDEV_LOG_ERR(
386                         "Couldn't get object from session mempool");
387                 return -ENOMEM;
388         }
389
390         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
391         if (ret != 0) {
392                 QAT_LOG(ERR,
393                     "Crypto QAT PMD: failed to configure session parameters");
394
395                 /* Return session to mempool */
396                 rte_mempool_put(mempool, sess_private_data);
397                 return ret;
398         }
399
400         set_sym_session_private_data(sess, dev->driver_id,
401                 sess_private_data);
402
403         return 0;
404 }
405
406 int
407 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
408                 struct rte_crypto_sym_xform *xform, void *session_private)
409 {
410         struct qat_sym_session *session = session_private;
411         int ret;
412         int qat_cmd_id;
413
414         /* Set context descriptor physical address */
415         session->cd_paddr = rte_mempool_virt2iova(session) +
416                         offsetof(struct qat_sym_session, cd);
417
418         session->min_qat_dev_gen = QAT_GEN1;
419
420         /* Get requested QAT command id */
421         qat_cmd_id = qat_get_cmd_id(xform);
422         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
423                 QAT_LOG(ERR, "Unsupported xform chain requested");
424                 return -ENOTSUP;
425         }
426         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
427         switch (session->qat_cmd) {
428         case ICP_QAT_FW_LA_CMD_CIPHER:
429                 ret = qat_sym_session_configure_cipher(dev, xform, session);
430                 if (ret < 0)
431                         return ret;
432                 break;
433         case ICP_QAT_FW_LA_CMD_AUTH:
434                 ret = qat_sym_session_configure_auth(dev, xform, session);
435                 if (ret < 0)
436                         return ret;
437                 break;
438         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
439                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
440                         ret = qat_sym_session_configure_aead(xform,
441                                         session);
442                         if (ret < 0)
443                                 return ret;
444                 } else {
445                         ret = qat_sym_session_configure_cipher(dev,
446                                         xform, session);
447                         if (ret < 0)
448                                 return ret;
449                         ret = qat_sym_session_configure_auth(dev,
450                                         xform, session);
451                         if (ret < 0)
452                                 return ret;
453                 }
454                 break;
455         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
456                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
457                         ret = qat_sym_session_configure_aead(xform,
458                                         session);
459                         if (ret < 0)
460                                 return ret;
461                 } else {
462                         ret = qat_sym_session_configure_auth(dev,
463                                         xform, session);
464                         if (ret < 0)
465                                 return ret;
466                         ret = qat_sym_session_configure_cipher(dev,
467                                         xform, session);
468                         if (ret < 0)
469                                 return ret;
470                 }
471                 break;
472         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
473         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
474         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
475         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
476         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
477         case ICP_QAT_FW_LA_CMD_MGF1:
478         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
479         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
480         case ICP_QAT_FW_LA_CMD_DELIMITER:
481         QAT_LOG(ERR, "Unsupported Service %u",
482                 session->qat_cmd);
483                 return -ENOTSUP;
484         default:
485         QAT_LOG(ERR, "Unsupported Service %u",
486                 session->qat_cmd);
487                 return -ENOTSUP;
488         }
489
490         return 0;
491 }
492
493 int
494 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
495                                 struct rte_crypto_sym_xform *xform,
496                                 struct qat_sym_session *session)
497 {
498         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
499         struct qat_sym_dev_private *internals = dev->data->dev_private;
500         uint8_t *key_data = auth_xform->key.data;
501         uint8_t key_length = auth_xform->key.length;
502         session->aes_cmac = 0;
503
504         switch (auth_xform->algo) {
505         case RTE_CRYPTO_AUTH_SHA1_HMAC:
506                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
507                 break;
508         case RTE_CRYPTO_AUTH_SHA224_HMAC:
509                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
510                 break;
511         case RTE_CRYPTO_AUTH_SHA256_HMAC:
512                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
513                 break;
514         case RTE_CRYPTO_AUTH_SHA384_HMAC:
515                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
516                 break;
517         case RTE_CRYPTO_AUTH_SHA512_HMAC:
518                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
519                 break;
520         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
521                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
522                 break;
523         case RTE_CRYPTO_AUTH_AES_CMAC:
524                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
525                 session->aes_cmac = 1;
526                 break;
527         case RTE_CRYPTO_AUTH_AES_GMAC:
528                 if (qat_sym_validate_aes_key(auth_xform->key.length,
529                                 &session->qat_cipher_alg) != 0) {
530                         QAT_LOG(ERR, "Invalid AES key size");
531                         return -EINVAL;
532                 }
533                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
534                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
535
536                 break;
537         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
538                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
539                 break;
540         case RTE_CRYPTO_AUTH_MD5_HMAC:
541                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
542                 break;
543         case RTE_CRYPTO_AUTH_NULL:
544                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
545                 break;
546         case RTE_CRYPTO_AUTH_KASUMI_F9:
547                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
548                 break;
549         case RTE_CRYPTO_AUTH_ZUC_EIA3:
550                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
551                         QAT_LOG(ERR, "%s not supported on this device",
552                                 rte_crypto_auth_algorithm_strings
553                                 [auth_xform->algo]);
554                         return -ENOTSUP;
555                 }
556                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
557                 break;
558         case RTE_CRYPTO_AUTH_SHA1:
559         case RTE_CRYPTO_AUTH_SHA256:
560         case RTE_CRYPTO_AUTH_SHA512:
561         case RTE_CRYPTO_AUTH_SHA224:
562         case RTE_CRYPTO_AUTH_SHA384:
563         case RTE_CRYPTO_AUTH_MD5:
564         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
565                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
566                                 auth_xform->algo);
567                 return -ENOTSUP;
568         default:
569                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
570                                 auth_xform->algo);
571                 return -EINVAL;
572         }
573
574         session->auth_iv.offset = auth_xform->iv.offset;
575         session->auth_iv.length = auth_xform->iv.length;
576
577         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
578                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
579                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
580                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
581                         /*
582                          * It needs to create cipher desc content first,
583                          * then authentication
584                          */
585
586                         if (qat_sym_session_aead_create_cd_cipher(session,
587                                                 auth_xform->key.data,
588                                                 auth_xform->key.length))
589                                 return -EINVAL;
590
591                         if (qat_sym_session_aead_create_cd_auth(session,
592                                                 key_data,
593                                                 key_length,
594                                                 0,
595                                                 auth_xform->digest_length,
596                                                 auth_xform->op))
597                                 return -EINVAL;
598                 } else {
599                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
600                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
601                         /*
602                          * It needs to create authentication desc content first,
603                          * then cipher
604                          */
605
606                         if (qat_sym_session_aead_create_cd_auth(session,
607                                         key_data,
608                                         key_length,
609                                         0,
610                                         auth_xform->digest_length,
611                                         auth_xform->op))
612                                 return -EINVAL;
613
614                         if (qat_sym_session_aead_create_cd_cipher(session,
615                                                 auth_xform->key.data,
616                                                 auth_xform->key.length))
617                                 return -EINVAL;
618                 }
619                 /* Restore to authentication only only */
620                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
621         } else {
622                 if (qat_sym_session_aead_create_cd_auth(session,
623                                 key_data,
624                                 key_length,
625                                 0,
626                                 auth_xform->digest_length,
627                                 auth_xform->op))
628                         return -EINVAL;
629         }
630
631         session->digest_length = auth_xform->digest_length;
632         return 0;
633 }
634
635 int
636 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
637                                 struct qat_sym_session *session)
638 {
639         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
640         enum rte_crypto_auth_operation crypto_operation;
641
642         /*
643          * Store AEAD IV parameters as cipher IV,
644          * to avoid unnecessary memory usage
645          */
646         session->cipher_iv.offset = xform->aead.iv.offset;
647         session->cipher_iv.length = xform->aead.iv.length;
648
649         switch (aead_xform->algo) {
650         case RTE_CRYPTO_AEAD_AES_GCM:
651                 if (qat_sym_validate_aes_key(aead_xform->key.length,
652                                 &session->qat_cipher_alg) != 0) {
653                         QAT_LOG(ERR, "Invalid AES key size");
654                         return -EINVAL;
655                 }
656                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
657                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
658                 break;
659         case RTE_CRYPTO_AEAD_AES_CCM:
660                 if (qat_sym_validate_aes_key(aead_xform->key.length,
661                                 &session->qat_cipher_alg) != 0) {
662                         QAT_LOG(ERR, "Invalid AES key size");
663                         return -EINVAL;
664                 }
665                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
666                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
667                 break;
668         default:
669                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
670                                 aead_xform->algo);
671                 return -EINVAL;
672         }
673
674         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
675                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
676                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
677                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
678                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
679                 /*
680                  * It needs to create cipher desc content first,
681                  * then authentication
682                  */
683                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
684                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
685
686                 if (qat_sym_session_aead_create_cd_cipher(session,
687                                         aead_xform->key.data,
688                                         aead_xform->key.length))
689                         return -EINVAL;
690
691                 if (qat_sym_session_aead_create_cd_auth(session,
692                                         aead_xform->key.data,
693                                         aead_xform->key.length,
694                                         aead_xform->aad_length,
695                                         aead_xform->digest_length,
696                                         crypto_operation))
697                         return -EINVAL;
698         } else {
699                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
700                 /*
701                  * It needs to create authentication desc content first,
702                  * then cipher
703                  */
704
705                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
706                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
707
708                 if (qat_sym_session_aead_create_cd_auth(session,
709                                         aead_xform->key.data,
710                                         aead_xform->key.length,
711                                         aead_xform->aad_length,
712                                         aead_xform->digest_length,
713                                         crypto_operation))
714                         return -EINVAL;
715
716                 if (qat_sym_session_aead_create_cd_cipher(session,
717                                         aead_xform->key.data,
718                                         aead_xform->key.length))
719                         return -EINVAL;
720         }
721
722         session->digest_length = aead_xform->digest_length;
723         return 0;
724 }
725
726 unsigned int qat_sym_session_get_private_size(
727                 struct rte_cryptodev *dev __rte_unused)
728 {
729         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
730 }
731
732 /* returns block size in bytes per cipher algo */
733 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
734 {
735         switch (qat_cipher_alg) {
736         case ICP_QAT_HW_CIPHER_ALGO_DES:
737                 return ICP_QAT_HW_DES_BLK_SZ;
738         case ICP_QAT_HW_CIPHER_ALGO_3DES:
739                 return ICP_QAT_HW_3DES_BLK_SZ;
740         case ICP_QAT_HW_CIPHER_ALGO_AES128:
741         case ICP_QAT_HW_CIPHER_ALGO_AES192:
742         case ICP_QAT_HW_CIPHER_ALGO_AES256:
743                 return ICP_QAT_HW_AES_BLK_SZ;
744         default:
745                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
746                 return -EFAULT;
747         };
748         return -EFAULT;
749 }
750
751 /*
752  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
753  * This is digest size rounded up to nearest quadword
754  */
755 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
756 {
757         switch (qat_hash_alg) {
758         case ICP_QAT_HW_AUTH_ALGO_SHA1:
759                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
760                                                 QAT_HW_DEFAULT_ALIGNMENT);
761         case ICP_QAT_HW_AUTH_ALGO_SHA224:
762                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
763                                                 QAT_HW_DEFAULT_ALIGNMENT);
764         case ICP_QAT_HW_AUTH_ALGO_SHA256:
765                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
766                                                 QAT_HW_DEFAULT_ALIGNMENT);
767         case ICP_QAT_HW_AUTH_ALGO_SHA384:
768                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
769                                                 QAT_HW_DEFAULT_ALIGNMENT);
770         case ICP_QAT_HW_AUTH_ALGO_SHA512:
771                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
772                                                 QAT_HW_DEFAULT_ALIGNMENT);
773         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
774                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
775                                                 QAT_HW_DEFAULT_ALIGNMENT);
776         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
777         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
778                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
779                                                 QAT_HW_DEFAULT_ALIGNMENT);
780         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
781                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
782                                                 QAT_HW_DEFAULT_ALIGNMENT);
783         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
784                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
785                                                 QAT_HW_DEFAULT_ALIGNMENT);
786         case ICP_QAT_HW_AUTH_ALGO_MD5:
787                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
788                                                 QAT_HW_DEFAULT_ALIGNMENT);
789         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
790                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
791                                                 QAT_HW_DEFAULT_ALIGNMENT);
792         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
793                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
794                                                 QAT_HW_DEFAULT_ALIGNMENT);
795         case ICP_QAT_HW_AUTH_ALGO_NULL:
796                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
797                                                 QAT_HW_DEFAULT_ALIGNMENT);
798         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
799                 /* return maximum state1 size in this case */
800                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
801                                                 QAT_HW_DEFAULT_ALIGNMENT);
802         default:
803                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
804                 return -EFAULT;
805         };
806         return -EFAULT;
807 }
808
809 /* returns digest size in bytes  per hash algo */
810 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
811 {
812         switch (qat_hash_alg) {
813         case ICP_QAT_HW_AUTH_ALGO_SHA1:
814                 return ICP_QAT_HW_SHA1_STATE1_SZ;
815         case ICP_QAT_HW_AUTH_ALGO_SHA224:
816                 return ICP_QAT_HW_SHA224_STATE1_SZ;
817         case ICP_QAT_HW_AUTH_ALGO_SHA256:
818                 return ICP_QAT_HW_SHA256_STATE1_SZ;
819         case ICP_QAT_HW_AUTH_ALGO_SHA384:
820                 return ICP_QAT_HW_SHA384_STATE1_SZ;
821         case ICP_QAT_HW_AUTH_ALGO_SHA512:
822                 return ICP_QAT_HW_SHA512_STATE1_SZ;
823         case ICP_QAT_HW_AUTH_ALGO_MD5:
824                 return ICP_QAT_HW_MD5_STATE1_SZ;
825         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
826                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
827         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
828                 /* return maximum digest size in this case */
829                 return ICP_QAT_HW_SHA512_STATE1_SZ;
830         default:
831                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
832                 return -EFAULT;
833         };
834         return -EFAULT;
835 }
836
837 /* returns block size in byes per hash algo */
838 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
839 {
840         switch (qat_hash_alg) {
841         case ICP_QAT_HW_AUTH_ALGO_SHA1:
842                 return SHA_CBLOCK;
843         case ICP_QAT_HW_AUTH_ALGO_SHA224:
844                 return SHA256_CBLOCK;
845         case ICP_QAT_HW_AUTH_ALGO_SHA256:
846                 return SHA256_CBLOCK;
847         case ICP_QAT_HW_AUTH_ALGO_SHA384:
848                 return SHA512_CBLOCK;
849         case ICP_QAT_HW_AUTH_ALGO_SHA512:
850                 return SHA512_CBLOCK;
851         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
852                 return 16;
853         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
854                 return ICP_QAT_HW_AES_BLK_SZ;
855         case ICP_QAT_HW_AUTH_ALGO_MD5:
856                 return MD5_CBLOCK;
857         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
858                 /* return maximum block size in this case */
859                 return SHA512_CBLOCK;
860         default:
861                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
862                 return -EFAULT;
863         };
864         return -EFAULT;
865 }
866
867 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
868 {
869         SHA_CTX ctx;
870
871         if (!SHA1_Init(&ctx))
872                 return -EFAULT;
873         SHA1_Transform(&ctx, data_in);
874         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
875         return 0;
876 }
877
878 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
879 {
880         SHA256_CTX ctx;
881
882         if (!SHA224_Init(&ctx))
883                 return -EFAULT;
884         SHA256_Transform(&ctx, data_in);
885         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
886         return 0;
887 }
888
889 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
890 {
891         SHA256_CTX ctx;
892
893         if (!SHA256_Init(&ctx))
894                 return -EFAULT;
895         SHA256_Transform(&ctx, data_in);
896         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
897         return 0;
898 }
899
900 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
901 {
902         SHA512_CTX ctx;
903
904         if (!SHA384_Init(&ctx))
905                 return -EFAULT;
906         SHA512_Transform(&ctx, data_in);
907         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
908         return 0;
909 }
910
911 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
912 {
913         SHA512_CTX ctx;
914
915         if (!SHA512_Init(&ctx))
916                 return -EFAULT;
917         SHA512_Transform(&ctx, data_in);
918         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
919         return 0;
920 }
921
922 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
923 {
924         MD5_CTX ctx;
925
926         if (!MD5_Init(&ctx))
927                 return -EFAULT;
928         MD5_Transform(&ctx, data_in);
929         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
930
931         return 0;
932 }
933
934 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
935                         uint8_t *data_in,
936                         uint8_t *data_out)
937 {
938         int digest_size;
939         uint8_t digest[qat_hash_get_digest_size(
940                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
941         uint32_t *hash_state_out_be32;
942         uint64_t *hash_state_out_be64;
943         int i;
944
945         digest_size = qat_hash_get_digest_size(hash_alg);
946         if (digest_size <= 0)
947                 return -EFAULT;
948
949         hash_state_out_be32 = (uint32_t *)data_out;
950         hash_state_out_be64 = (uint64_t *)data_out;
951
952         switch (hash_alg) {
953         case ICP_QAT_HW_AUTH_ALGO_SHA1:
954                 if (partial_hash_sha1(data_in, digest))
955                         return -EFAULT;
956                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
957                         *hash_state_out_be32 =
958                                 rte_bswap32(*(((uint32_t *)digest)+i));
959                 break;
960         case ICP_QAT_HW_AUTH_ALGO_SHA224:
961                 if (partial_hash_sha224(data_in, digest))
962                         return -EFAULT;
963                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
964                         *hash_state_out_be32 =
965                                 rte_bswap32(*(((uint32_t *)digest)+i));
966                 break;
967         case ICP_QAT_HW_AUTH_ALGO_SHA256:
968                 if (partial_hash_sha256(data_in, digest))
969                         return -EFAULT;
970                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
971                         *hash_state_out_be32 =
972                                 rte_bswap32(*(((uint32_t *)digest)+i));
973                 break;
974         case ICP_QAT_HW_AUTH_ALGO_SHA384:
975                 if (partial_hash_sha384(data_in, digest))
976                         return -EFAULT;
977                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
978                         *hash_state_out_be64 =
979                                 rte_bswap64(*(((uint64_t *)digest)+i));
980                 break;
981         case ICP_QAT_HW_AUTH_ALGO_SHA512:
982                 if (partial_hash_sha512(data_in, digest))
983                         return -EFAULT;
984                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
985                         *hash_state_out_be64 =
986                                 rte_bswap64(*(((uint64_t *)digest)+i));
987                 break;
988         case ICP_QAT_HW_AUTH_ALGO_MD5:
989                 if (partial_hash_md5(data_in, data_out))
990                         return -EFAULT;
991                 break;
992         default:
993                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
994                 return -EFAULT;
995         }
996
997         return 0;
998 }
999 #define HMAC_IPAD_VALUE 0x36
1000 #define HMAC_OPAD_VALUE 0x5c
1001 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1002
1003 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1004
1005 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1006 {
1007         int i;
1008
1009         derived[0] = base[0] << 1;
1010         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1011                 derived[i] = base[i] << 1;
1012                 derived[i - 1] |= base[i] >> 7;
1013         }
1014
1015         if (base[0] & 0x80)
1016                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1017 }
1018
1019 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1020                                 const uint8_t *auth_key,
1021                                 uint16_t auth_keylen,
1022                                 uint8_t *p_state_buf,
1023                                 uint16_t *p_state_len,
1024                                 uint8_t aes_cmac)
1025 {
1026         int block_size;
1027         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1028         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1029         int i;
1030
1031         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1032
1033                 /* CMAC */
1034                 if (aes_cmac) {
1035                         AES_KEY enc_key;
1036                         uint8_t *in = NULL;
1037                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1038                         uint8_t *k1, *k2;
1039
1040                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1041
1042                         in = rte_zmalloc("AES CMAC K1",
1043                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1044
1045                         if (in == NULL) {
1046                                 QAT_LOG(ERR, "Failed to alloc memory");
1047                                 return -ENOMEM;
1048                         }
1049
1050                         rte_memcpy(in, AES_CMAC_SEED,
1051                                    ICP_QAT_HW_AES_128_KEY_SZ);
1052                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1053
1054                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1055                                 &enc_key) != 0) {
1056                                 rte_free(in);
1057                                 return -EFAULT;
1058                         }
1059
1060                         AES_encrypt(in, k0, &enc_key);
1061
1062                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1063                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1064
1065                         aes_cmac_key_derive(k0, k1);
1066                         aes_cmac_key_derive(k1, k2);
1067
1068                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1069                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1070                         rte_free(in);
1071                         return 0;
1072                 } else {
1073                         static uint8_t qat_aes_xcbc_key_seed[
1074                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1075                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1076                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1077                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1078                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1079                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1080                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1081                         };
1082
1083                         uint8_t *in = NULL;
1084                         uint8_t *out = p_state_buf;
1085                         int x;
1086                         AES_KEY enc_key;
1087
1088                         in = rte_zmalloc("working mem for key",
1089                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1090                         if (in == NULL) {
1091                                 QAT_LOG(ERR, "Failed to alloc memory");
1092                                 return -ENOMEM;
1093                         }
1094
1095                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1096                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1097                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1098                                 if (AES_set_encrypt_key(auth_key,
1099                                                         auth_keylen << 3,
1100                                                         &enc_key) != 0) {
1101                                         rte_free(in -
1102                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1103                                         memset(out -
1104                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1105                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1106                                         return -EFAULT;
1107                                 }
1108                                 AES_encrypt(in, out, &enc_key);
1109                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1110                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1111                         }
1112                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1113                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1114                         return 0;
1115                 }
1116
1117         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1118                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1119                 uint8_t *in = NULL;
1120                 uint8_t *out = p_state_buf;
1121                 AES_KEY enc_key;
1122
1123                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1124                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1125                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1126                 in = rte_zmalloc("working mem for key",
1127                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1128                 if (in == NULL) {
1129                         QAT_LOG(ERR, "Failed to alloc memory");
1130                         return -ENOMEM;
1131                 }
1132
1133                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1134                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1135                         &enc_key) != 0) {
1136                         return -EFAULT;
1137                 }
1138                 AES_encrypt(in, out, &enc_key);
1139                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1140                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1141                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1142                 rte_free(in);
1143                 return 0;
1144         }
1145
1146         block_size = qat_hash_get_block_size(hash_alg);
1147         if (block_size < 0)
1148                 return block_size;
1149         /* init ipad and opad from key and xor with fixed values */
1150         memset(ipad, 0, block_size);
1151         memset(opad, 0, block_size);
1152
1153         if (auth_keylen > (unsigned int)block_size) {
1154                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1155                 return -EFAULT;
1156         }
1157         rte_memcpy(ipad, auth_key, auth_keylen);
1158         rte_memcpy(opad, auth_key, auth_keylen);
1159
1160         for (i = 0; i < block_size; i++) {
1161                 uint8_t *ipad_ptr = ipad + i;
1162                 uint8_t *opad_ptr = opad + i;
1163                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1164                 *opad_ptr ^= HMAC_OPAD_VALUE;
1165         }
1166
1167         /* do partial hash of ipad and copy to state1 */
1168         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1169                 memset(ipad, 0, block_size);
1170                 memset(opad, 0, block_size);
1171                 QAT_LOG(ERR, "ipad precompute failed");
1172                 return -EFAULT;
1173         }
1174
1175         /*
1176          * State len is a multiple of 8, so may be larger than the digest.
1177          * Put the partial hash of opad state_len bytes after state1
1178          */
1179         *p_state_len = qat_hash_get_state1_size(hash_alg);
1180         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1181                 memset(ipad, 0, block_size);
1182                 memset(opad, 0, block_size);
1183                 QAT_LOG(ERR, "opad precompute failed");
1184                 return -EFAULT;
1185         }
1186
1187         /*  don't leave data lying around */
1188         memset(ipad, 0, block_size);
1189         memset(opad, 0, block_size);
1190         return 0;
1191 }
1192
1193 static void
1194 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1195                 enum qat_sym_proto_flag proto_flags)
1196 {
1197         header->hdr_flags =
1198                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1199         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1200         header->comn_req_flags =
1201                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1202                                         QAT_COMN_PTR_TYPE_FLAT);
1203         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1204                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1205         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1206                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1207
1208         switch (proto_flags)            {
1209         case QAT_CRYPTO_PROTO_FLAG_NONE:
1210                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1211                                         ICP_QAT_FW_LA_NO_PROTO);
1212                 break;
1213         case QAT_CRYPTO_PROTO_FLAG_CCM:
1214                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1215                                         ICP_QAT_FW_LA_CCM_PROTO);
1216                 break;
1217         case QAT_CRYPTO_PROTO_FLAG_GCM:
1218                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1219                                         ICP_QAT_FW_LA_GCM_PROTO);
1220                 break;
1221         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1222                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1223                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1224                 break;
1225         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1226                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1227                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1228                 break;
1229         }
1230
1231         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1232                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1233         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1234                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1235 }
1236
1237 /*
1238  *      Snow3G and ZUC should never use this function
1239  *      and set its protocol flag in both cipher and auth part of content
1240  *      descriptor building function
1241  */
1242 static enum qat_sym_proto_flag
1243 qat_get_crypto_proto_flag(uint16_t flags)
1244 {
1245         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1246         enum qat_sym_proto_flag qat_proto_flag =
1247                         QAT_CRYPTO_PROTO_FLAG_NONE;
1248
1249         switch (proto) {
1250         case ICP_QAT_FW_LA_GCM_PROTO:
1251                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1252                 break;
1253         case ICP_QAT_FW_LA_CCM_PROTO:
1254                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1255                 break;
1256         }
1257
1258         return qat_proto_flag;
1259 }
1260
1261 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1262                                                 uint8_t *cipherkey,
1263                                                 uint32_t cipherkeylen)
1264 {
1265         struct icp_qat_hw_cipher_algo_blk *cipher;
1266         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1267         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1268         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1269         void *ptr = &req_tmpl->cd_ctrl;
1270         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1271         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1272         enum icp_qat_hw_cipher_convert key_convert;
1273         enum qat_sym_proto_flag qat_proto_flag =
1274                 QAT_CRYPTO_PROTO_FLAG_NONE;
1275         uint32_t total_key_size;
1276         uint16_t cipher_offset, cd_size;
1277         uint32_t wordIndex  = 0;
1278         uint32_t *temp_key = NULL;
1279
1280         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1281                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1282                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1283                                         ICP_QAT_FW_SLICE_CIPHER);
1284                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1285                                         ICP_QAT_FW_SLICE_DRAM_WR);
1286                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1287                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1288                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1289                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1290                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1291         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1292                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1293                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1294                                         ICP_QAT_FW_SLICE_CIPHER);
1295                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1296                                         ICP_QAT_FW_SLICE_AUTH);
1297                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1298                                         ICP_QAT_FW_SLICE_AUTH);
1299                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1300                                         ICP_QAT_FW_SLICE_DRAM_WR);
1301                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1302         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1303                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1304                 return -EFAULT;
1305         }
1306
1307         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1308                 /*
1309                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1310                  * Overriding default values previously set
1311                  */
1312                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1313                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1314         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1315                 || cdesc->qat_cipher_alg ==
1316                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1317                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1318         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1319                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1320         else
1321                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1322
1323         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1324                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1325                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1326                 cipher_cd_ctrl->cipher_state_sz =
1327                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1328                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1329
1330         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1331                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1332                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1333                 cipher_cd_ctrl->cipher_padding_sz =
1334                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1335         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1336                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1337                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1338                 qat_proto_flag =
1339                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1340         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1341                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1342                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1343                 qat_proto_flag =
1344                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1345         } else if (cdesc->qat_cipher_alg ==
1346                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1347                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1348                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1349                 cipher_cd_ctrl->cipher_state_sz =
1350                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1351                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1352                 cdesc->min_qat_dev_gen = QAT_GEN2;
1353         } else {
1354                 total_key_size = cipherkeylen;
1355                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1356                 qat_proto_flag =
1357                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1358         }
1359         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1360         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1361         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1362
1363         header->service_cmd_id = cdesc->qat_cmd;
1364         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1365
1366         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1367         cipher->cipher_config.val =
1368             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1369                                         cdesc->qat_cipher_alg, key_convert,
1370                                         cdesc->qat_dir);
1371
1372         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1373                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1374                                         sizeof(struct icp_qat_hw_cipher_config)
1375                                         + cipherkeylen);
1376                 memcpy(cipher->key, cipherkey, cipherkeylen);
1377                 memcpy(temp_key, cipherkey, cipherkeylen);
1378
1379                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1380                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1381                                                                 wordIndex++)
1382                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1383
1384                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1385                                         cipherkeylen + cipherkeylen;
1386         } else {
1387                 memcpy(cipher->key, cipherkey, cipherkeylen);
1388                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1389                                         cipherkeylen;
1390         }
1391
1392         if (total_key_size > cipherkeylen) {
1393                 uint32_t padding_size =  total_key_size-cipherkeylen;
1394                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1395                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1396                         /* K3 not provided so use K1 = K3*/
1397                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1398                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1399                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1400                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1401                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1402                                 cipherkeylen);
1403                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1404                                 cipherkey, cipherkeylen);
1405                 } else
1406                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1407
1408                 cdesc->cd_cur_ptr += padding_size;
1409         }
1410         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1411         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1412
1413         return 0;
1414 }
1415
1416 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1417                                                 uint8_t *authkey,
1418                                                 uint32_t authkeylen,
1419                                                 uint32_t aad_length,
1420                                                 uint32_t digestsize,
1421                                                 unsigned int operation)
1422 {
1423         struct icp_qat_hw_auth_setup *hash;
1424         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1425         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1426         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1427         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1428         void *ptr = &req_tmpl->cd_ctrl;
1429         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1430         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1431         struct icp_qat_fw_la_auth_req_params *auth_param =
1432                 (struct icp_qat_fw_la_auth_req_params *)
1433                 ((char *)&req_tmpl->serv_specif_rqpars +
1434                 sizeof(struct icp_qat_fw_la_cipher_req_params));
1435         uint16_t state1_size = 0, state2_size = 0;
1436         uint16_t hash_offset, cd_size;
1437         uint32_t *aad_len = NULL;
1438         uint32_t wordIndex  = 0;
1439         uint32_t *pTempKey;
1440         enum qat_sym_proto_flag qat_proto_flag =
1441                 QAT_CRYPTO_PROTO_FLAG_NONE;
1442
1443         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1444                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1445                                         ICP_QAT_FW_SLICE_AUTH);
1446                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1447                                         ICP_QAT_FW_SLICE_DRAM_WR);
1448                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1449         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1450                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1451                                 ICP_QAT_FW_SLICE_AUTH);
1452                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1453                                 ICP_QAT_FW_SLICE_CIPHER);
1454                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1455                                 ICP_QAT_FW_SLICE_CIPHER);
1456                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1457                                 ICP_QAT_FW_SLICE_DRAM_WR);
1458                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1459         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1460                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1461                 return -EFAULT;
1462         }
1463
1464         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1465                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1466                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1467                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1468                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1469                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1470         } else {
1471                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1472                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1473                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1474                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1475                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1476         }
1477
1478         /*
1479          * Setup the inner hash config
1480          */
1481         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1482         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1483         hash->auth_config.reserved = 0;
1484         hash->auth_config.config =
1485                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1486                                 cdesc->qat_hash_alg, digestsize);
1487
1488         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1489                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1490                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1491                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1492                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1493                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1494                         )
1495                 hash->auth_counter.counter = 0;
1496         else {
1497                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1498
1499                 if (block_size < 0)
1500                         return block_size;
1501                 hash->auth_counter.counter = rte_bswap32(block_size);
1502         }
1503
1504         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1505
1506         /*
1507          * cd_cur_ptr now points at the state1 information.
1508          */
1509         switch (cdesc->qat_hash_alg) {
1510         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1511                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1512                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1513                         cdesc->aes_cmac)) {
1514                         QAT_LOG(ERR, "(SHA)precompute failed");
1515                         return -EFAULT;
1516                 }
1517                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1518                 break;
1519         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1520                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1521                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1522                         cdesc->aes_cmac)) {
1523                         QAT_LOG(ERR, "(SHA)precompute failed");
1524                         return -EFAULT;
1525                 }
1526                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1527                 break;
1528         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1529                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1530                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1531                         cdesc->aes_cmac)) {
1532                         QAT_LOG(ERR, "(SHA)precompute failed");
1533                         return -EFAULT;
1534                 }
1535                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1536                 break;
1537         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1538                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1539                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1540                         cdesc->aes_cmac)) {
1541                         QAT_LOG(ERR, "(SHA)precompute failed");
1542                         return -EFAULT;
1543                 }
1544                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1545                 break;
1546         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1547                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1548                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1549                         cdesc->aes_cmac)) {
1550                         QAT_LOG(ERR, "(SHA)precompute failed");
1551                         return -EFAULT;
1552                 }
1553                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1554                 break;
1555         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1556                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1557
1558                 if (cdesc->aes_cmac)
1559                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1560                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1561                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1562                         &state2_size, cdesc->aes_cmac)) {
1563                         cdesc->aes_cmac ? QAT_LOG(ERR,
1564                                                   "(CMAC)precompute failed")
1565                                         : QAT_LOG(ERR,
1566                                                   "(XCBC)precompute failed");
1567                         return -EFAULT;
1568                 }
1569                 break;
1570         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1571         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1572                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1573                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1574                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1575                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1576                         &state2_size, cdesc->aes_cmac)) {
1577                         QAT_LOG(ERR, "(GCM)precompute failed");
1578                         return -EFAULT;
1579                 }
1580                 /*
1581                  * Write (the length of AAD) into bytes 16-19 of state2
1582                  * in big-endian format. This field is 8 bytes
1583                  */
1584                 auth_param->u2.aad_sz =
1585                                 RTE_ALIGN_CEIL(aad_length, 16);
1586                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1587
1588                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1589                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1590                                         ICP_QAT_HW_GALOIS_H_SZ);
1591                 *aad_len = rte_bswap32(aad_length);
1592                 cdesc->aad_len = aad_length;
1593                 break;
1594         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1595                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1596                 state1_size = qat_hash_get_state1_size(
1597                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1598                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1599                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1600
1601                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1602                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1603                 cipherconfig->cipher_config.val =
1604                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1605                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1606                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1607                         ICP_QAT_HW_CIPHER_ENCRYPT);
1608                 memcpy(cipherconfig->key, authkey, authkeylen);
1609                 memset(cipherconfig->key + authkeylen,
1610                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1611                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1612                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1613                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1614                 break;
1615         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1616                 hash->auth_config.config =
1617                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1618                                 cdesc->qat_hash_alg, digestsize);
1619                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1620                 state1_size = qat_hash_get_state1_size(
1621                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1622                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1623                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1624                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1625
1626                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1627                 cdesc->cd_cur_ptr += state1_size + state2_size
1628                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1629                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1630                 cdesc->min_qat_dev_gen = QAT_GEN2;
1631
1632                 break;
1633         case ICP_QAT_HW_AUTH_ALGO_MD5:
1634                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1635                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1636                         cdesc->aes_cmac)) {
1637                         QAT_LOG(ERR, "(MD5)precompute failed");
1638                         return -EFAULT;
1639                 }
1640                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1641                 break;
1642         case ICP_QAT_HW_AUTH_ALGO_NULL:
1643                 state1_size = qat_hash_get_state1_size(
1644                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1645                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1646                 break;
1647         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1648                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1649                 state1_size = qat_hash_get_state1_size(
1650                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1651                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1652                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1653
1654                 if (aad_length > 0) {
1655                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1656                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1657                         auth_param->u2.aad_sz =
1658                         RTE_ALIGN_CEIL(aad_length,
1659                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1660                 } else {
1661                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1662                 }
1663                 cdesc->aad_len = aad_length;
1664                 hash->auth_counter.counter = 0;
1665
1666                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1667                 auth_param->hash_state_sz = digestsize;
1668
1669                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1670                 break;
1671         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1672                 state1_size = qat_hash_get_state1_size(
1673                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1674                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1675                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1676                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1677                                                         + authkeylen);
1678                 /*
1679                 * The Inner Hash Initial State2 block must contain IK
1680                 * (Initialisation Key), followed by IK XOR-ed with KM
1681                 * (Key Modifier): IK||(IK^KM).
1682                 */
1683                 /* write the auth key */
1684                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1685                 /* initialise temp key with auth key */
1686                 memcpy(pTempKey, authkey, authkeylen);
1687                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1688                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1689                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1690                 break;
1691         default:
1692                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1693                 return -EFAULT;
1694         }
1695
1696         /* Request template setup */
1697         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1698         header->service_cmd_id = cdesc->qat_cmd;
1699
1700         /* Auth CD config setup */
1701         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1702         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1703         hash_cd_ctrl->inner_res_sz = digestsize;
1704         hash_cd_ctrl->final_sz = digestsize;
1705         hash_cd_ctrl->inner_state1_sz = state1_size;
1706         auth_param->auth_res_sz = digestsize;
1707
1708         hash_cd_ctrl->inner_state2_sz  = state2_size;
1709         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1710                         ((sizeof(struct icp_qat_hw_auth_setup) +
1711                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1712                                         >> 3);
1713
1714         cdesc->cd_cur_ptr += state1_size + state2_size;
1715         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1716
1717         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1718         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1719
1720         return 0;
1721 }
1722
1723 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1724 {
1725         switch (key_len) {
1726         case ICP_QAT_HW_AES_128_KEY_SZ:
1727                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1728                 break;
1729         case ICP_QAT_HW_AES_192_KEY_SZ:
1730                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1731                 break;
1732         case ICP_QAT_HW_AES_256_KEY_SZ:
1733                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1734                 break;
1735         default:
1736                 return -EINVAL;
1737         }
1738         return 0;
1739 }
1740
1741 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1742                 enum icp_qat_hw_cipher_algo *alg)
1743 {
1744         switch (key_len) {
1745         case ICP_QAT_HW_AES_128_KEY_SZ:
1746                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1747                 break;
1748         default:
1749                 return -EINVAL;
1750         }
1751         return 0;
1752 }
1753
1754 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1755 {
1756         switch (key_len) {
1757         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1758                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1759                 break;
1760         default:
1761                 return -EINVAL;
1762         }
1763         return 0;
1764 }
1765
1766 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1767 {
1768         switch (key_len) {
1769         case ICP_QAT_HW_KASUMI_KEY_SZ:
1770                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1771                 break;
1772         default:
1773                 return -EINVAL;
1774         }
1775         return 0;
1776 }
1777
1778 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1779 {
1780         switch (key_len) {
1781         case ICP_QAT_HW_DES_KEY_SZ:
1782                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1783                 break;
1784         default:
1785                 return -EINVAL;
1786         }
1787         return 0;
1788 }
1789
1790 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1791 {
1792         switch (key_len) {
1793         case QAT_3DES_KEY_SZ_OPT1:
1794         case QAT_3DES_KEY_SZ_OPT2:
1795         case QAT_3DES_KEY_SZ_OPT3:
1796                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1797                 break;
1798         default:
1799                 return -EINVAL;
1800         }
1801         return 0;
1802 }
1803
1804 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1805 {
1806         switch (key_len) {
1807         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1808                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1809                 break;
1810         default:
1811                 return -EINVAL;
1812         }
1813         return 0;
1814 }