4d7ec01d4910c6acf536ddca8c2fbb012163ba84
[deb_dpdk.git] / drivers / crypto / qat / qat_sym_session.c
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0)
2  * Copyright(c) 2015-2018 Intel Corporation
3  */
4
5 #include <openssl/sha.h>        /* Needed to calculate pre-compute values */
6 #include <openssl/aes.h>        /* Needed to calculate pre-compute values */
7 #include <openssl/md5.h>        /* Needed to calculate pre-compute values */
8 #include <openssl/evp.h>        /* Needed for bpi runt block processing */
9
10 #include <rte_memcpy.h>
11 #include <rte_common.h>
12 #include <rte_spinlock.h>
13 #include <rte_byteorder.h>
14 #include <rte_log.h>
15 #include <rte_malloc.h>
16 #include <rte_crypto_sym.h>
17
18 #include "qat_logs.h"
19 #include "qat_sym_session.h"
20 #include "qat_sym_pmd.h"
21
22 /** Frees a context previously created
23  *  Depends on openssl libcrypto
24  */
25 static void
26 bpi_cipher_ctx_free(void *bpi_ctx)
27 {
28         if (bpi_ctx != NULL)
29                 EVP_CIPHER_CTX_free((EVP_CIPHER_CTX *)bpi_ctx);
30 }
31
32 /** Creates a context in either AES or DES in ECB mode
33  *  Depends on openssl libcrypto
34  */
35 static int
36 bpi_cipher_ctx_init(enum rte_crypto_cipher_algorithm cryptodev_algo,
37                 enum rte_crypto_cipher_operation direction __rte_unused,
38                 uint8_t *key, void **ctx)
39 {
40         const EVP_CIPHER *algo = NULL;
41         int ret;
42         *ctx = EVP_CIPHER_CTX_new();
43
44         if (*ctx == NULL) {
45                 ret = -ENOMEM;
46                 goto ctx_init_err;
47         }
48
49         if (cryptodev_algo == RTE_CRYPTO_CIPHER_DES_DOCSISBPI)
50                 algo = EVP_des_ecb();
51         else
52                 algo = EVP_aes_128_ecb();
53
54         /* IV will be ECB encrypted whether direction is encrypt or decrypt*/
55         if (EVP_EncryptInit_ex(*ctx, algo, NULL, key, 0) != 1) {
56                 ret = -EINVAL;
57                 goto ctx_init_err;
58         }
59
60         return 0;
61
62 ctx_init_err:
63         if (*ctx != NULL)
64                 EVP_CIPHER_CTX_free(*ctx);
65         return ret;
66 }
67
68 static int
69 qat_is_cipher_alg_supported(enum rte_crypto_cipher_algorithm algo,
70                 struct qat_sym_dev_private *internals)
71 {
72         int i = 0;
73         const struct rte_cryptodev_capabilities *capability;
74
75         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
76                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
77                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
78                         continue;
79
80                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
81                         continue;
82
83                 if (capability->sym.cipher.algo == algo)
84                         return 1;
85         }
86         return 0;
87 }
88
89 static int
90 qat_is_auth_alg_supported(enum rte_crypto_auth_algorithm algo,
91                 struct qat_sym_dev_private *internals)
92 {
93         int i = 0;
94         const struct rte_cryptodev_capabilities *capability;
95
96         while ((capability = &(internals->qat_dev_capabilities[i++]))->op !=
97                         RTE_CRYPTO_OP_TYPE_UNDEFINED) {
98                 if (capability->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
99                         continue;
100
101                 if (capability->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
102                         continue;
103
104                 if (capability->sym.auth.algo == algo)
105                         return 1;
106         }
107         return 0;
108 }
109
110 void
111 qat_sym_session_clear(struct rte_cryptodev *dev,
112                 struct rte_cryptodev_sym_session *sess)
113 {
114         uint8_t index = dev->driver_id;
115         void *sess_priv = get_sym_session_private_data(sess, index);
116         struct qat_sym_session *s = (struct qat_sym_session *)sess_priv;
117
118         if (sess_priv) {
119                 if (s->bpi_ctx)
120                         bpi_cipher_ctx_free(s->bpi_ctx);
121                 memset(s, 0, qat_sym_session_get_private_size(dev));
122                 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
123
124                 set_sym_session_private_data(sess, index, NULL);
125                 rte_mempool_put(sess_mp, sess_priv);
126         }
127 }
128
129 static int
130 qat_get_cmd_id(const struct rte_crypto_sym_xform *xform)
131 {
132         /* Cipher Only */
133         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL)
134                 return ICP_QAT_FW_LA_CMD_CIPHER;
135
136         /* Authentication Only */
137         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH && xform->next == NULL)
138                 return ICP_QAT_FW_LA_CMD_AUTH;
139
140         /* AEAD */
141         if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
142                 /* AES-GCM and AES-CCM works with different direction
143                  * GCM first encrypts and generate hash where AES-CCM
144                  * first generate hash and encrypts. Similar relation
145                  * applies to decryption.
146                  */
147                 if (xform->aead.op == RTE_CRYPTO_AEAD_OP_ENCRYPT)
148                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
149                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
150                         else
151                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
152                 else
153                         if (xform->aead.algo == RTE_CRYPTO_AEAD_AES_GCM)
154                                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
155                         else
156                                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
157         }
158
159         if (xform->next == NULL)
160                 return -1;
161
162         /* Cipher then Authenticate */
163         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
164                         xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
165                 return ICP_QAT_FW_LA_CMD_CIPHER_HASH;
166
167         /* Authenticate then Cipher */
168         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
169                         xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
170                 return ICP_QAT_FW_LA_CMD_HASH_CIPHER;
171
172         return -1;
173 }
174
175 static struct rte_crypto_auth_xform *
176 qat_get_auth_xform(struct rte_crypto_sym_xform *xform)
177 {
178         do {
179                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH)
180                         return &xform->auth;
181
182                 xform = xform->next;
183         } while (xform);
184
185         return NULL;
186 }
187
188 static struct rte_crypto_cipher_xform *
189 qat_get_cipher_xform(struct rte_crypto_sym_xform *xform)
190 {
191         do {
192                 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
193                         return &xform->cipher;
194
195                 xform = xform->next;
196         } while (xform);
197
198         return NULL;
199 }
200
201 int
202 qat_sym_session_configure_cipher(struct rte_cryptodev *dev,
203                 struct rte_crypto_sym_xform *xform,
204                 struct qat_sym_session *session)
205 {
206         struct qat_sym_dev_private *internals = dev->data->dev_private;
207         struct rte_crypto_cipher_xform *cipher_xform = NULL;
208         int ret;
209
210         /* Get cipher xform from crypto xform chain */
211         cipher_xform = qat_get_cipher_xform(xform);
212
213         session->cipher_iv.offset = cipher_xform->iv.offset;
214         session->cipher_iv.length = cipher_xform->iv.length;
215
216         switch (cipher_xform->algo) {
217         case RTE_CRYPTO_CIPHER_AES_CBC:
218                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
219                                 &session->qat_cipher_alg) != 0) {
220                         QAT_LOG(ERR, "Invalid AES cipher key size");
221                         ret = -EINVAL;
222                         goto error_out;
223                 }
224                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
225                 break;
226         case RTE_CRYPTO_CIPHER_AES_CTR:
227                 if (qat_sym_validate_aes_key(cipher_xform->key.length,
228                                 &session->qat_cipher_alg) != 0) {
229                         QAT_LOG(ERR, "Invalid AES cipher key size");
230                         ret = -EINVAL;
231                         goto error_out;
232                 }
233                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
234                 break;
235         case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
236                 if (qat_sym_validate_snow3g_key(cipher_xform->key.length,
237                                         &session->qat_cipher_alg) != 0) {
238                         QAT_LOG(ERR, "Invalid SNOW 3G cipher key size");
239                         ret = -EINVAL;
240                         goto error_out;
241                 }
242                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
243                 break;
244         case RTE_CRYPTO_CIPHER_NULL:
245                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
246                 break;
247         case RTE_CRYPTO_CIPHER_KASUMI_F8:
248                 if (qat_sym_validate_kasumi_key(cipher_xform->key.length,
249                                         &session->qat_cipher_alg) != 0) {
250                         QAT_LOG(ERR, "Invalid KASUMI cipher key size");
251                         ret = -EINVAL;
252                         goto error_out;
253                 }
254                 session->qat_mode = ICP_QAT_HW_CIPHER_F8_MODE;
255                 break;
256         case RTE_CRYPTO_CIPHER_3DES_CBC:
257                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
258                                 &session->qat_cipher_alg) != 0) {
259                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
260                         ret = -EINVAL;
261                         goto error_out;
262                 }
263                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
264                 break;
265         case RTE_CRYPTO_CIPHER_DES_CBC:
266                 if (qat_sym_validate_des_key(cipher_xform->key.length,
267                                 &session->qat_cipher_alg) != 0) {
268                         QAT_LOG(ERR, "Invalid DES cipher key size");
269                         ret = -EINVAL;
270                         goto error_out;
271                 }
272                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
273                 break;
274         case RTE_CRYPTO_CIPHER_3DES_CTR:
275                 if (qat_sym_validate_3des_key(cipher_xform->key.length,
276                                 &session->qat_cipher_alg) != 0) {
277                         QAT_LOG(ERR, "Invalid 3DES cipher key size");
278                         ret = -EINVAL;
279                         goto error_out;
280                 }
281                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
282                 break;
283         case RTE_CRYPTO_CIPHER_DES_DOCSISBPI:
284                 ret = bpi_cipher_ctx_init(
285                                         cipher_xform->algo,
286                                         cipher_xform->op,
287                                         cipher_xform->key.data,
288                                         &session->bpi_ctx);
289                 if (ret != 0) {
290                         QAT_LOG(ERR, "failed to create DES BPI ctx");
291                         goto error_out;
292                 }
293                 if (qat_sym_validate_des_key(cipher_xform->key.length,
294                                 &session->qat_cipher_alg) != 0) {
295                         QAT_LOG(ERR, "Invalid DES cipher key size");
296                         ret = -EINVAL;
297                         goto error_out;
298                 }
299                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
300                 break;
301         case RTE_CRYPTO_CIPHER_AES_DOCSISBPI:
302                 ret = bpi_cipher_ctx_init(
303                                         cipher_xform->algo,
304                                         cipher_xform->op,
305                                         cipher_xform->key.data,
306                                         &session->bpi_ctx);
307                 if (ret != 0) {
308                         QAT_LOG(ERR, "failed to create AES BPI ctx");
309                         goto error_out;
310                 }
311                 if (qat_sym_validate_aes_docsisbpi_key(cipher_xform->key.length,
312                                 &session->qat_cipher_alg) != 0) {
313                         QAT_LOG(ERR, "Invalid AES DOCSISBPI key size");
314                         ret = -EINVAL;
315                         goto error_out;
316                 }
317                 session->qat_mode = ICP_QAT_HW_CIPHER_CBC_MODE;
318                 break;
319         case RTE_CRYPTO_CIPHER_ZUC_EEA3:
320                 if (!qat_is_cipher_alg_supported(
321                         cipher_xform->algo, internals)) {
322                         QAT_LOG(ERR, "%s not supported on this device",
323                                 rte_crypto_cipher_algorithm_strings
324                                         [cipher_xform->algo]);
325                         ret = -ENOTSUP;
326                         goto error_out;
327                 }
328                 if (qat_sym_validate_zuc_key(cipher_xform->key.length,
329                                 &session->qat_cipher_alg) != 0) {
330                         QAT_LOG(ERR, "Invalid ZUC cipher key size");
331                         ret = -EINVAL;
332                         goto error_out;
333                 }
334                 session->qat_mode = ICP_QAT_HW_CIPHER_ECB_MODE;
335                 break;
336         case RTE_CRYPTO_CIPHER_3DES_ECB:
337         case RTE_CRYPTO_CIPHER_AES_ECB:
338         case RTE_CRYPTO_CIPHER_AES_F8:
339         case RTE_CRYPTO_CIPHER_AES_XTS:
340         case RTE_CRYPTO_CIPHER_ARC4:
341                 QAT_LOG(ERR, "Crypto QAT PMD: Unsupported Cipher alg %u",
342                                 cipher_xform->algo);
343                 ret = -ENOTSUP;
344                 goto error_out;
345         default:
346                 QAT_LOG(ERR, "Crypto: Undefined Cipher specified %u\n",
347                                 cipher_xform->algo);
348                 ret = -EINVAL;
349                 goto error_out;
350         }
351
352         if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
353                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
354         else
355                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
356
357         if (qat_sym_session_aead_create_cd_cipher(session,
358                                                 cipher_xform->key.data,
359                                                 cipher_xform->key.length)) {
360                 ret = -EINVAL;
361                 goto error_out;
362         }
363
364         return 0;
365
366 error_out:
367         if (session->bpi_ctx) {
368                 bpi_cipher_ctx_free(session->bpi_ctx);
369                 session->bpi_ctx = NULL;
370         }
371         return ret;
372 }
373
374 int
375 qat_sym_session_configure(struct rte_cryptodev *dev,
376                 struct rte_crypto_sym_xform *xform,
377                 struct rte_cryptodev_sym_session *sess,
378                 struct rte_mempool *mempool)
379 {
380         void *sess_private_data;
381         int ret;
382
383         if (rte_mempool_get(mempool, &sess_private_data)) {
384                 CDEV_LOG_ERR(
385                         "Couldn't get object from session mempool");
386                 return -ENOMEM;
387         }
388
389         ret = qat_sym_session_set_parameters(dev, xform, sess_private_data);
390         if (ret != 0) {
391                 QAT_LOG(ERR,
392                     "Crypto QAT PMD: failed to configure session parameters");
393
394                 /* Return session to mempool */
395                 rte_mempool_put(mempool, sess_private_data);
396                 return ret;
397         }
398
399         set_sym_session_private_data(sess, dev->driver_id,
400                 sess_private_data);
401
402         return 0;
403 }
404
405 int
406 qat_sym_session_set_parameters(struct rte_cryptodev *dev,
407                 struct rte_crypto_sym_xform *xform, void *session_private)
408 {
409         struct qat_sym_session *session = session_private;
410         int ret;
411         int qat_cmd_id;
412
413         /* Set context descriptor physical address */
414         session->cd_paddr = rte_mempool_virt2iova(session) +
415                         offsetof(struct qat_sym_session, cd);
416
417         session->min_qat_dev_gen = QAT_GEN1;
418
419         /* Get requested QAT command id */
420         qat_cmd_id = qat_get_cmd_id(xform);
421         if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
422                 QAT_LOG(ERR, "Unsupported xform chain requested");
423                 return -ENOTSUP;
424         }
425         session->qat_cmd = (enum icp_qat_fw_la_cmd_id)qat_cmd_id;
426         switch (session->qat_cmd) {
427         case ICP_QAT_FW_LA_CMD_CIPHER:
428                 ret = qat_sym_session_configure_cipher(dev, xform, session);
429                 if (ret < 0)
430                         return ret;
431                 break;
432         case ICP_QAT_FW_LA_CMD_AUTH:
433                 ret = qat_sym_session_configure_auth(dev, xform, session);
434                 if (ret < 0)
435                         return ret;
436                 break;
437         case ICP_QAT_FW_LA_CMD_CIPHER_HASH:
438                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
439                         ret = qat_sym_session_configure_aead(xform,
440                                         session);
441                         if (ret < 0)
442                                 return ret;
443                 } else {
444                         ret = qat_sym_session_configure_cipher(dev,
445                                         xform, session);
446                         if (ret < 0)
447                                 return ret;
448                         ret = qat_sym_session_configure_auth(dev,
449                                         xform, session);
450                         if (ret < 0)
451                                 return ret;
452                 }
453                 break;
454         case ICP_QAT_FW_LA_CMD_HASH_CIPHER:
455                 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) {
456                         ret = qat_sym_session_configure_aead(xform,
457                                         session);
458                         if (ret < 0)
459                                 return ret;
460                 } else {
461                         ret = qat_sym_session_configure_auth(dev,
462                                         xform, session);
463                         if (ret < 0)
464                                 return ret;
465                         ret = qat_sym_session_configure_cipher(dev,
466                                         xform, session);
467                         if (ret < 0)
468                                 return ret;
469                 }
470                 break;
471         case ICP_QAT_FW_LA_CMD_TRNG_GET_RANDOM:
472         case ICP_QAT_FW_LA_CMD_TRNG_TEST:
473         case ICP_QAT_FW_LA_CMD_SSL3_KEY_DERIVE:
474         case ICP_QAT_FW_LA_CMD_TLS_V1_1_KEY_DERIVE:
475         case ICP_QAT_FW_LA_CMD_TLS_V1_2_KEY_DERIVE:
476         case ICP_QAT_FW_LA_CMD_MGF1:
477         case ICP_QAT_FW_LA_CMD_AUTH_PRE_COMP:
478         case ICP_QAT_FW_LA_CMD_CIPHER_PRE_COMP:
479         case ICP_QAT_FW_LA_CMD_DELIMITER:
480         QAT_LOG(ERR, "Unsupported Service %u",
481                 session->qat_cmd);
482                 return -ENOTSUP;
483         default:
484         QAT_LOG(ERR, "Unsupported Service %u",
485                 session->qat_cmd);
486                 return -ENOTSUP;
487         }
488
489         return 0;
490 }
491
492 int
493 qat_sym_session_configure_auth(struct rte_cryptodev *dev,
494                                 struct rte_crypto_sym_xform *xform,
495                                 struct qat_sym_session *session)
496 {
497         struct rte_crypto_auth_xform *auth_xform = qat_get_auth_xform(xform);
498         struct qat_sym_dev_private *internals = dev->data->dev_private;
499         uint8_t *key_data = auth_xform->key.data;
500         uint8_t key_length = auth_xform->key.length;
501         session->aes_cmac = 0;
502
503         switch (auth_xform->algo) {
504         case RTE_CRYPTO_AUTH_SHA1_HMAC:
505                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA1;
506                 break;
507         case RTE_CRYPTO_AUTH_SHA224_HMAC:
508                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA224;
509                 break;
510         case RTE_CRYPTO_AUTH_SHA256_HMAC:
511                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA256;
512                 break;
513         case RTE_CRYPTO_AUTH_SHA384_HMAC:
514                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA384;
515                 break;
516         case RTE_CRYPTO_AUTH_SHA512_HMAC:
517                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SHA512;
518                 break;
519         case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
520                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
521                 break;
522         case RTE_CRYPTO_AUTH_AES_CMAC:
523                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC;
524                 session->aes_cmac = 1;
525                 break;
526         case RTE_CRYPTO_AUTH_AES_GMAC:
527                 if (qat_sym_validate_aes_key(auth_xform->key.length,
528                                 &session->qat_cipher_alg) != 0) {
529                         QAT_LOG(ERR, "Invalid AES key size");
530                         return -EINVAL;
531                 }
532                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
533                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
534
535                 break;
536         case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
537                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2;
538                 break;
539         case RTE_CRYPTO_AUTH_MD5_HMAC:
540                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_MD5;
541                 break;
542         case RTE_CRYPTO_AUTH_NULL:
543                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_NULL;
544                 break;
545         case RTE_CRYPTO_AUTH_KASUMI_F9:
546                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_KASUMI_F9;
547                 break;
548         case RTE_CRYPTO_AUTH_ZUC_EIA3:
549                 if (!qat_is_auth_alg_supported(auth_xform->algo, internals)) {
550                         QAT_LOG(ERR, "%s not supported on this device",
551                                 rte_crypto_auth_algorithm_strings
552                                 [auth_xform->algo]);
553                         return -ENOTSUP;
554                 }
555                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3;
556                 break;
557         case RTE_CRYPTO_AUTH_SHA1:
558         case RTE_CRYPTO_AUTH_SHA256:
559         case RTE_CRYPTO_AUTH_SHA512:
560         case RTE_CRYPTO_AUTH_SHA224:
561         case RTE_CRYPTO_AUTH_SHA384:
562         case RTE_CRYPTO_AUTH_MD5:
563         case RTE_CRYPTO_AUTH_AES_CBC_MAC:
564                 QAT_LOG(ERR, "Crypto: Unsupported hash alg %u",
565                                 auth_xform->algo);
566                 return -ENOTSUP;
567         default:
568                 QAT_LOG(ERR, "Crypto: Undefined Hash algo %u specified",
569                                 auth_xform->algo);
570                 return -EINVAL;
571         }
572
573         session->auth_iv.offset = auth_xform->iv.offset;
574         session->auth_iv.length = auth_xform->iv.length;
575
576         if (auth_xform->algo == RTE_CRYPTO_AUTH_AES_GMAC) {
577                 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) {
578                         session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
579                         session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
580                         /*
581                          * It needs to create cipher desc content first,
582                          * then authentication
583                          */
584
585                         if (qat_sym_session_aead_create_cd_cipher(session,
586                                                 auth_xform->key.data,
587                                                 auth_xform->key.length))
588                                 return -EINVAL;
589
590                         if (qat_sym_session_aead_create_cd_auth(session,
591                                                 key_data,
592                                                 key_length,
593                                                 0,
594                                                 auth_xform->digest_length,
595                                                 auth_xform->op))
596                                 return -EINVAL;
597                 } else {
598                         session->qat_cmd = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
599                         session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
600                         /*
601                          * It needs to create authentication desc content first,
602                          * then cipher
603                          */
604
605                         if (qat_sym_session_aead_create_cd_auth(session,
606                                         key_data,
607                                         key_length,
608                                         0,
609                                         auth_xform->digest_length,
610                                         auth_xform->op))
611                                 return -EINVAL;
612
613                         if (qat_sym_session_aead_create_cd_cipher(session,
614                                                 auth_xform->key.data,
615                                                 auth_xform->key.length))
616                                 return -EINVAL;
617                 }
618                 /* Restore to authentication only only */
619                 session->qat_cmd = ICP_QAT_FW_LA_CMD_AUTH;
620         } else {
621                 if (qat_sym_session_aead_create_cd_auth(session,
622                                 key_data,
623                                 key_length,
624                                 0,
625                                 auth_xform->digest_length,
626                                 auth_xform->op))
627                         return -EINVAL;
628         }
629
630         session->digest_length = auth_xform->digest_length;
631         return 0;
632 }
633
634 int
635 qat_sym_session_configure_aead(struct rte_crypto_sym_xform *xform,
636                                 struct qat_sym_session *session)
637 {
638         struct rte_crypto_aead_xform *aead_xform = &xform->aead;
639         enum rte_crypto_auth_operation crypto_operation;
640
641         /*
642          * Store AEAD IV parameters as cipher IV,
643          * to avoid unnecessary memory usage
644          */
645         session->cipher_iv.offset = xform->aead.iv.offset;
646         session->cipher_iv.length = xform->aead.iv.length;
647
648         switch (aead_xform->algo) {
649         case RTE_CRYPTO_AEAD_AES_GCM:
650                 if (qat_sym_validate_aes_key(aead_xform->key.length,
651                                 &session->qat_cipher_alg) != 0) {
652                         QAT_LOG(ERR, "Invalid AES key size");
653                         return -EINVAL;
654                 }
655                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
656                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
657                 break;
658         case RTE_CRYPTO_AEAD_AES_CCM:
659                 if (qat_sym_validate_aes_key(aead_xform->key.length,
660                                 &session->qat_cipher_alg) != 0) {
661                         QAT_LOG(ERR, "Invalid AES key size");
662                         return -EINVAL;
663                 }
664                 session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
665                 session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
666                 break;
667         default:
668                 QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
669                                 aead_xform->algo);
670                 return -EINVAL;
671         }
672
673         if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
674                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
675                         (aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
676                         aead_xform->algo == RTE_CRYPTO_AEAD_AES_CCM)) {
677                 session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
678                 /*
679                  * It needs to create cipher desc content first,
680                  * then authentication
681                  */
682                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
683                         RTE_CRYPTO_AUTH_OP_GENERATE : RTE_CRYPTO_AUTH_OP_VERIFY;
684
685                 if (qat_sym_session_aead_create_cd_cipher(session,
686                                         aead_xform->key.data,
687                                         aead_xform->key.length))
688                         return -EINVAL;
689
690                 if (qat_sym_session_aead_create_cd_auth(session,
691                                         aead_xform->key.data,
692                                         aead_xform->key.length,
693                                         aead_xform->aad_length,
694                                         aead_xform->digest_length,
695                                         crypto_operation))
696                         return -EINVAL;
697         } else {
698                 session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
699                 /*
700                  * It needs to create authentication desc content first,
701                  * then cipher
702                  */
703
704                 crypto_operation = aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM ?
705                         RTE_CRYPTO_AUTH_OP_VERIFY : RTE_CRYPTO_AUTH_OP_GENERATE;
706
707                 if (qat_sym_session_aead_create_cd_auth(session,
708                                         aead_xform->key.data,
709                                         aead_xform->key.length,
710                                         aead_xform->aad_length,
711                                         aead_xform->digest_length,
712                                         crypto_operation))
713                         return -EINVAL;
714
715                 if (qat_sym_session_aead_create_cd_cipher(session,
716                                         aead_xform->key.data,
717                                         aead_xform->key.length))
718                         return -EINVAL;
719         }
720
721         session->digest_length = aead_xform->digest_length;
722         return 0;
723 }
724
725 unsigned int qat_sym_session_get_private_size(
726                 struct rte_cryptodev *dev __rte_unused)
727 {
728         return RTE_ALIGN_CEIL(sizeof(struct qat_sym_session), 8);
729 }
730
731 /* returns block size in bytes per cipher algo */
732 int qat_cipher_get_block_size(enum icp_qat_hw_cipher_algo qat_cipher_alg)
733 {
734         switch (qat_cipher_alg) {
735         case ICP_QAT_HW_CIPHER_ALGO_DES:
736                 return ICP_QAT_HW_DES_BLK_SZ;
737         case ICP_QAT_HW_CIPHER_ALGO_3DES:
738                 return ICP_QAT_HW_3DES_BLK_SZ;
739         case ICP_QAT_HW_CIPHER_ALGO_AES128:
740         case ICP_QAT_HW_CIPHER_ALGO_AES192:
741         case ICP_QAT_HW_CIPHER_ALGO_AES256:
742                 return ICP_QAT_HW_AES_BLK_SZ;
743         default:
744                 QAT_LOG(ERR, "invalid block cipher alg %u", qat_cipher_alg);
745                 return -EFAULT;
746         };
747         return -EFAULT;
748 }
749
750 /*
751  * Returns size in bytes per hash algo for state1 size field in cd_ctrl
752  * This is digest size rounded up to nearest quadword
753  */
754 static int qat_hash_get_state1_size(enum icp_qat_hw_auth_algo qat_hash_alg)
755 {
756         switch (qat_hash_alg) {
757         case ICP_QAT_HW_AUTH_ALGO_SHA1:
758                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA1_STATE1_SZ,
759                                                 QAT_HW_DEFAULT_ALIGNMENT);
760         case ICP_QAT_HW_AUTH_ALGO_SHA224:
761                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA224_STATE1_SZ,
762                                                 QAT_HW_DEFAULT_ALIGNMENT);
763         case ICP_QAT_HW_AUTH_ALGO_SHA256:
764                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA256_STATE1_SZ,
765                                                 QAT_HW_DEFAULT_ALIGNMENT);
766         case ICP_QAT_HW_AUTH_ALGO_SHA384:
767                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA384_STATE1_SZ,
768                                                 QAT_HW_DEFAULT_ALIGNMENT);
769         case ICP_QAT_HW_AUTH_ALGO_SHA512:
770                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
771                                                 QAT_HW_DEFAULT_ALIGNMENT);
772         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
773                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ,
774                                                 QAT_HW_DEFAULT_ALIGNMENT);
775         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
776         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
777                 return QAT_HW_ROUND_UP(ICP_QAT_HW_GALOIS_128_STATE1_SZ,
778                                                 QAT_HW_DEFAULT_ALIGNMENT);
779         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
780                 return QAT_HW_ROUND_UP(ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ,
781                                                 QAT_HW_DEFAULT_ALIGNMENT);
782         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
783                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ,
784                                                 QAT_HW_DEFAULT_ALIGNMENT);
785         case ICP_QAT_HW_AUTH_ALGO_MD5:
786                 return QAT_HW_ROUND_UP(ICP_QAT_HW_MD5_STATE1_SZ,
787                                                 QAT_HW_DEFAULT_ALIGNMENT);
788         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
789                 return QAT_HW_ROUND_UP(ICP_QAT_HW_KASUMI_F9_STATE1_SZ,
790                                                 QAT_HW_DEFAULT_ALIGNMENT);
791         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
792                 return QAT_HW_ROUND_UP(ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ,
793                                                 QAT_HW_DEFAULT_ALIGNMENT);
794         case ICP_QAT_HW_AUTH_ALGO_NULL:
795                 return QAT_HW_ROUND_UP(ICP_QAT_HW_NULL_STATE1_SZ,
796                                                 QAT_HW_DEFAULT_ALIGNMENT);
797         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
798                 /* return maximum state1 size in this case */
799                 return QAT_HW_ROUND_UP(ICP_QAT_HW_SHA512_STATE1_SZ,
800                                                 QAT_HW_DEFAULT_ALIGNMENT);
801         default:
802                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
803                 return -EFAULT;
804         };
805         return -EFAULT;
806 }
807
808 /* returns digest size in bytes  per hash algo */
809 static int qat_hash_get_digest_size(enum icp_qat_hw_auth_algo qat_hash_alg)
810 {
811         switch (qat_hash_alg) {
812         case ICP_QAT_HW_AUTH_ALGO_SHA1:
813                 return ICP_QAT_HW_SHA1_STATE1_SZ;
814         case ICP_QAT_HW_AUTH_ALGO_SHA224:
815                 return ICP_QAT_HW_SHA224_STATE1_SZ;
816         case ICP_QAT_HW_AUTH_ALGO_SHA256:
817                 return ICP_QAT_HW_SHA256_STATE1_SZ;
818         case ICP_QAT_HW_AUTH_ALGO_SHA384:
819                 return ICP_QAT_HW_SHA384_STATE1_SZ;
820         case ICP_QAT_HW_AUTH_ALGO_SHA512:
821                 return ICP_QAT_HW_SHA512_STATE1_SZ;
822         case ICP_QAT_HW_AUTH_ALGO_MD5:
823                 return ICP_QAT_HW_MD5_STATE1_SZ;
824         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
825                 return ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
826         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
827                 /* return maximum digest size in this case */
828                 return ICP_QAT_HW_SHA512_STATE1_SZ;
829         default:
830                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
831                 return -EFAULT;
832         };
833         return -EFAULT;
834 }
835
836 /* returns block size in byes per hash algo */
837 static int qat_hash_get_block_size(enum icp_qat_hw_auth_algo qat_hash_alg)
838 {
839         switch (qat_hash_alg) {
840         case ICP_QAT_HW_AUTH_ALGO_SHA1:
841                 return SHA_CBLOCK;
842         case ICP_QAT_HW_AUTH_ALGO_SHA224:
843                 return SHA256_CBLOCK;
844         case ICP_QAT_HW_AUTH_ALGO_SHA256:
845                 return SHA256_CBLOCK;
846         case ICP_QAT_HW_AUTH_ALGO_SHA384:
847                 return SHA512_CBLOCK;
848         case ICP_QAT_HW_AUTH_ALGO_SHA512:
849                 return SHA512_CBLOCK;
850         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
851                 return 16;
852         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
853                 return ICP_QAT_HW_AES_BLK_SZ;
854         case ICP_QAT_HW_AUTH_ALGO_MD5:
855                 return MD5_CBLOCK;
856         case ICP_QAT_HW_AUTH_ALGO_DELIMITER:
857                 /* return maximum block size in this case */
858                 return SHA512_CBLOCK;
859         default:
860                 QAT_LOG(ERR, "invalid hash alg %u", qat_hash_alg);
861                 return -EFAULT;
862         };
863         return -EFAULT;
864 }
865
866 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
867 {
868         SHA_CTX ctx;
869
870         if (!SHA1_Init(&ctx))
871                 return -EFAULT;
872         SHA1_Transform(&ctx, data_in);
873         rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
874         return 0;
875 }
876
877 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
878 {
879         SHA256_CTX ctx;
880
881         if (!SHA224_Init(&ctx))
882                 return -EFAULT;
883         SHA256_Transform(&ctx, data_in);
884         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
885         return 0;
886 }
887
888 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
889 {
890         SHA256_CTX ctx;
891
892         if (!SHA256_Init(&ctx))
893                 return -EFAULT;
894         SHA256_Transform(&ctx, data_in);
895         rte_memcpy(data_out, &ctx, SHA256_DIGEST_LENGTH);
896         return 0;
897 }
898
899 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
900 {
901         SHA512_CTX ctx;
902
903         if (!SHA384_Init(&ctx))
904                 return -EFAULT;
905         SHA512_Transform(&ctx, data_in);
906         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
907         return 0;
908 }
909
910 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
911 {
912         SHA512_CTX ctx;
913
914         if (!SHA512_Init(&ctx))
915                 return -EFAULT;
916         SHA512_Transform(&ctx, data_in);
917         rte_memcpy(data_out, &ctx, SHA512_DIGEST_LENGTH);
918         return 0;
919 }
920
921 static int partial_hash_md5(uint8_t *data_in, uint8_t *data_out)
922 {
923         MD5_CTX ctx;
924
925         if (!MD5_Init(&ctx))
926                 return -EFAULT;
927         MD5_Transform(&ctx, data_in);
928         rte_memcpy(data_out, &ctx, MD5_DIGEST_LENGTH);
929
930         return 0;
931 }
932
933 static int partial_hash_compute(enum icp_qat_hw_auth_algo hash_alg,
934                         uint8_t *data_in,
935                         uint8_t *data_out)
936 {
937         int digest_size;
938         uint8_t digest[qat_hash_get_digest_size(
939                         ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
940         uint32_t *hash_state_out_be32;
941         uint64_t *hash_state_out_be64;
942         int i;
943
944         digest_size = qat_hash_get_digest_size(hash_alg);
945         if (digest_size <= 0)
946                 return -EFAULT;
947
948         hash_state_out_be32 = (uint32_t *)data_out;
949         hash_state_out_be64 = (uint64_t *)data_out;
950
951         switch (hash_alg) {
952         case ICP_QAT_HW_AUTH_ALGO_SHA1:
953                 if (partial_hash_sha1(data_in, digest))
954                         return -EFAULT;
955                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
956                         *hash_state_out_be32 =
957                                 rte_bswap32(*(((uint32_t *)digest)+i));
958                 break;
959         case ICP_QAT_HW_AUTH_ALGO_SHA224:
960                 if (partial_hash_sha224(data_in, digest))
961                         return -EFAULT;
962                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
963                         *hash_state_out_be32 =
964                                 rte_bswap32(*(((uint32_t *)digest)+i));
965                 break;
966         case ICP_QAT_HW_AUTH_ALGO_SHA256:
967                 if (partial_hash_sha256(data_in, digest))
968                         return -EFAULT;
969                 for (i = 0; i < digest_size >> 2; i++, hash_state_out_be32++)
970                         *hash_state_out_be32 =
971                                 rte_bswap32(*(((uint32_t *)digest)+i));
972                 break;
973         case ICP_QAT_HW_AUTH_ALGO_SHA384:
974                 if (partial_hash_sha384(data_in, digest))
975                         return -EFAULT;
976                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
977                         *hash_state_out_be64 =
978                                 rte_bswap64(*(((uint64_t *)digest)+i));
979                 break;
980         case ICP_QAT_HW_AUTH_ALGO_SHA512:
981                 if (partial_hash_sha512(data_in, digest))
982                         return -EFAULT;
983                 for (i = 0; i < digest_size >> 3; i++, hash_state_out_be64++)
984                         *hash_state_out_be64 =
985                                 rte_bswap64(*(((uint64_t *)digest)+i));
986                 break;
987         case ICP_QAT_HW_AUTH_ALGO_MD5:
988                 if (partial_hash_md5(data_in, data_out))
989                         return -EFAULT;
990                 break;
991         default:
992                 QAT_LOG(ERR, "invalid hash alg %u", hash_alg);
993                 return -EFAULT;
994         }
995
996         return 0;
997 }
998 #define HMAC_IPAD_VALUE 0x36
999 #define HMAC_OPAD_VALUE 0x5c
1000 #define HASH_XCBC_PRECOMP_KEY_NUM 3
1001
1002 static const uint8_t AES_CMAC_SEED[ICP_QAT_HW_AES_128_KEY_SZ];
1003
1004 static void aes_cmac_key_derive(uint8_t *base, uint8_t *derived)
1005 {
1006         int i;
1007
1008         derived[0] = base[0] << 1;
1009         for (i = 1; i < ICP_QAT_HW_AES_BLK_SZ ; i++) {
1010                 derived[i] = base[i] << 1;
1011                 derived[i - 1] |= base[i] >> 7;
1012         }
1013
1014         if (base[0] & 0x80)
1015                 derived[ICP_QAT_HW_AES_BLK_SZ - 1] ^= QAT_AES_CMAC_CONST_RB;
1016 }
1017
1018 static int qat_sym_do_precomputes(enum icp_qat_hw_auth_algo hash_alg,
1019                                 const uint8_t *auth_key,
1020                                 uint16_t auth_keylen,
1021                                 uint8_t *p_state_buf,
1022                                 uint16_t *p_state_len,
1023                                 uint8_t aes_cmac)
1024 {
1025         int block_size;
1026         uint8_t ipad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1027         uint8_t opad[qat_hash_get_block_size(ICP_QAT_HW_AUTH_ALGO_DELIMITER)];
1028         int i;
1029
1030         if (hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC) {
1031
1032                 /* CMAC */
1033                 if (aes_cmac) {
1034                         AES_KEY enc_key;
1035                         uint8_t *in = NULL;
1036                         uint8_t k0[ICP_QAT_HW_AES_128_KEY_SZ];
1037                         uint8_t *k1, *k2;
1038
1039                         auth_keylen = ICP_QAT_HW_AES_128_KEY_SZ;
1040
1041                         in = rte_zmalloc("AES CMAC K1",
1042                                          ICP_QAT_HW_AES_128_KEY_SZ, 16);
1043
1044                         if (in == NULL) {
1045                                 QAT_LOG(ERR, "Failed to alloc memory");
1046                                 return -ENOMEM;
1047                         }
1048
1049                         rte_memcpy(in, AES_CMAC_SEED,
1050                                    ICP_QAT_HW_AES_128_KEY_SZ);
1051                         rte_memcpy(p_state_buf, auth_key, auth_keylen);
1052
1053                         if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1054                                 &enc_key) != 0) {
1055                                 rte_free(in);
1056                                 return -EFAULT;
1057                         }
1058
1059                         AES_encrypt(in, k0, &enc_key);
1060
1061                         k1 = p_state_buf + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1062                         k2 = k1 + ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1063
1064                         aes_cmac_key_derive(k0, k1);
1065                         aes_cmac_key_derive(k1, k2);
1066
1067                         memset(k0, 0, ICP_QAT_HW_AES_128_KEY_SZ);
1068                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1069                         rte_free(in);
1070                         return 0;
1071                 } else {
1072                         static uint8_t qat_aes_xcbc_key_seed[
1073                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ] = {
1074                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1075                                 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
1076                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1077                                 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
1078                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1079                                 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03,
1080                         };
1081
1082                         uint8_t *in = NULL;
1083                         uint8_t *out = p_state_buf;
1084                         int x;
1085                         AES_KEY enc_key;
1086
1087                         in = rte_zmalloc("working mem for key",
1088                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ, 16);
1089                         if (in == NULL) {
1090                                 QAT_LOG(ERR, "Failed to alloc memory");
1091                                 return -ENOMEM;
1092                         }
1093
1094                         rte_memcpy(in, qat_aes_xcbc_key_seed,
1095                                         ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1096                         for (x = 0; x < HASH_XCBC_PRECOMP_KEY_NUM; x++) {
1097                                 if (AES_set_encrypt_key(auth_key,
1098                                                         auth_keylen << 3,
1099                                                         &enc_key) != 0) {
1100                                         rte_free(in -
1101                                           (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ));
1102                                         memset(out -
1103                                            (x * ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ),
1104                                           0, ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ);
1105                                         return -EFAULT;
1106                                 }
1107                                 AES_encrypt(in, out, &enc_key);
1108                                 in += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1109                                 out += ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ;
1110                         }
1111                         *p_state_len = ICP_QAT_HW_AES_XCBC_MAC_STATE2_SZ;
1112                         rte_free(in - x*ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ);
1113                         return 0;
1114                 }
1115
1116         } else if ((hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128) ||
1117                 (hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64)) {
1118                 uint8_t *in = NULL;
1119                 uint8_t *out = p_state_buf;
1120                 AES_KEY enc_key;
1121
1122                 memset(p_state_buf, 0, ICP_QAT_HW_GALOIS_H_SZ +
1123                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1124                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ);
1125                 in = rte_zmalloc("working mem for key",
1126                                 ICP_QAT_HW_GALOIS_H_SZ, 16);
1127                 if (in == NULL) {
1128                         QAT_LOG(ERR, "Failed to alloc memory");
1129                         return -ENOMEM;
1130                 }
1131
1132                 memset(in, 0, ICP_QAT_HW_GALOIS_H_SZ);
1133                 if (AES_set_encrypt_key(auth_key, auth_keylen << 3,
1134                         &enc_key) != 0) {
1135                         return -EFAULT;
1136                 }
1137                 AES_encrypt(in, out, &enc_key);
1138                 *p_state_len = ICP_QAT_HW_GALOIS_H_SZ +
1139                                 ICP_QAT_HW_GALOIS_LEN_A_SZ +
1140                                 ICP_QAT_HW_GALOIS_E_CTR0_SZ;
1141                 rte_free(in);
1142                 return 0;
1143         }
1144
1145         block_size = qat_hash_get_block_size(hash_alg);
1146         if (block_size < 0)
1147                 return block_size;
1148         /* init ipad and opad from key and xor with fixed values */
1149         memset(ipad, 0, block_size);
1150         memset(opad, 0, block_size);
1151
1152         if (auth_keylen > (unsigned int)block_size) {
1153                 QAT_LOG(ERR, "invalid keylen %u", auth_keylen);
1154                 return -EFAULT;
1155         }
1156         rte_memcpy(ipad, auth_key, auth_keylen);
1157         rte_memcpy(opad, auth_key, auth_keylen);
1158
1159         for (i = 0; i < block_size; i++) {
1160                 uint8_t *ipad_ptr = ipad + i;
1161                 uint8_t *opad_ptr = opad + i;
1162                 *ipad_ptr ^= HMAC_IPAD_VALUE;
1163                 *opad_ptr ^= HMAC_OPAD_VALUE;
1164         }
1165
1166         /* do partial hash of ipad and copy to state1 */
1167         if (partial_hash_compute(hash_alg, ipad, p_state_buf)) {
1168                 memset(ipad, 0, block_size);
1169                 memset(opad, 0, block_size);
1170                 QAT_LOG(ERR, "ipad precompute failed");
1171                 return -EFAULT;
1172         }
1173
1174         /*
1175          * State len is a multiple of 8, so may be larger than the digest.
1176          * Put the partial hash of opad state_len bytes after state1
1177          */
1178         *p_state_len = qat_hash_get_state1_size(hash_alg);
1179         if (partial_hash_compute(hash_alg, opad, p_state_buf + *p_state_len)) {
1180                 memset(ipad, 0, block_size);
1181                 memset(opad, 0, block_size);
1182                 QAT_LOG(ERR, "opad precompute failed");
1183                 return -EFAULT;
1184         }
1185
1186         /*  don't leave data lying around */
1187         memset(ipad, 0, block_size);
1188         memset(opad, 0, block_size);
1189         return 0;
1190 }
1191
1192 static void
1193 qat_sym_session_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header,
1194                 enum qat_sym_proto_flag proto_flags)
1195 {
1196         header->hdr_flags =
1197                 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
1198         header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
1199         header->comn_req_flags =
1200                 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
1201                                         QAT_COMN_PTR_TYPE_FLAT);
1202         ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
1203                                   ICP_QAT_FW_LA_PARTIAL_NONE);
1204         ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
1205                                            ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
1206
1207         switch (proto_flags)            {
1208         case QAT_CRYPTO_PROTO_FLAG_NONE:
1209                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1210                                         ICP_QAT_FW_LA_NO_PROTO);
1211                 break;
1212         case QAT_CRYPTO_PROTO_FLAG_CCM:
1213                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1214                                         ICP_QAT_FW_LA_CCM_PROTO);
1215                 break;
1216         case QAT_CRYPTO_PROTO_FLAG_GCM:
1217                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1218                                         ICP_QAT_FW_LA_GCM_PROTO);
1219                 break;
1220         case QAT_CRYPTO_PROTO_FLAG_SNOW3G:
1221                 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
1222                                         ICP_QAT_FW_LA_SNOW_3G_PROTO);
1223                 break;
1224         case QAT_CRYPTO_PROTO_FLAG_ZUC:
1225                 ICP_QAT_FW_LA_ZUC_3G_PROTO_FLAG_SET(header->serv_specif_flags,
1226                         ICP_QAT_FW_LA_ZUC_3G_PROTO);
1227                 break;
1228         }
1229
1230         ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
1231                                            ICP_QAT_FW_LA_NO_UPDATE_STATE);
1232         ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
1233                                         ICP_QAT_FW_LA_NO_DIGEST_IN_BUFFER);
1234 }
1235
1236 /*
1237  *      Snow3G and ZUC should never use this function
1238  *      and set its protocol flag in both cipher and auth part of content
1239  *      descriptor building function
1240  */
1241 static enum qat_sym_proto_flag
1242 qat_get_crypto_proto_flag(uint16_t flags)
1243 {
1244         int proto = ICP_QAT_FW_LA_PROTO_GET(flags);
1245         enum qat_sym_proto_flag qat_proto_flag =
1246                         QAT_CRYPTO_PROTO_FLAG_NONE;
1247
1248         switch (proto) {
1249         case ICP_QAT_FW_LA_GCM_PROTO:
1250                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1251                 break;
1252         case ICP_QAT_FW_LA_CCM_PROTO:
1253                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1254                 break;
1255         }
1256
1257         return qat_proto_flag;
1258 }
1259
1260 int qat_sym_session_aead_create_cd_cipher(struct qat_sym_session *cdesc,
1261                                                 uint8_t *cipherkey,
1262                                                 uint32_t cipherkeylen)
1263 {
1264         struct icp_qat_hw_cipher_algo_blk *cipher;
1265         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1266         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1267         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1268         void *ptr = &req_tmpl->cd_ctrl;
1269         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1270         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1271         enum icp_qat_hw_cipher_convert key_convert;
1272         enum qat_sym_proto_flag qat_proto_flag =
1273                 QAT_CRYPTO_PROTO_FLAG_NONE;
1274         uint32_t total_key_size;
1275         uint16_t cipher_offset, cd_size;
1276         uint32_t wordIndex  = 0;
1277         uint32_t *temp_key = NULL;
1278
1279         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER) {
1280                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1281                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1282                                         ICP_QAT_FW_SLICE_CIPHER);
1283                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1284                                         ICP_QAT_FW_SLICE_DRAM_WR);
1285                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1286                                         ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1287                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1288                                         ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1289                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1290         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1291                 cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1292                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1293                                         ICP_QAT_FW_SLICE_CIPHER);
1294                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1295                                         ICP_QAT_FW_SLICE_AUTH);
1296                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1297                                         ICP_QAT_FW_SLICE_AUTH);
1298                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1299                                         ICP_QAT_FW_SLICE_DRAM_WR);
1300                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1301         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1302                 QAT_LOG(ERR, "Invalid param, must be a cipher command.");
1303                 return -EFAULT;
1304         }
1305
1306         if (cdesc->qat_mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
1307                 /*
1308                  * CTR Streaming ciphers are a special case. Decrypt = encrypt
1309                  * Overriding default values previously set
1310                  */
1311                 cdesc->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
1312                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1313         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2
1314                 || cdesc->qat_cipher_alg ==
1315                         ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3)
1316                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1317         else if (cdesc->qat_dir == ICP_QAT_HW_CIPHER_ENCRYPT)
1318                 key_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
1319         else
1320                 key_convert = ICP_QAT_HW_CIPHER_KEY_CONVERT;
1321
1322         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2) {
1323                 total_key_size = ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
1324                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1325                 cipher_cd_ctrl->cipher_state_sz =
1326                         ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1327                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1328
1329         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1330                 total_key_size = ICP_QAT_HW_KASUMI_F8_KEY_SZ;
1331                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_KASUMI_BLK_SZ >> 3;
1332                 cipher_cd_ctrl->cipher_padding_sz =
1333                                         (2 * ICP_QAT_HW_KASUMI_BLK_SZ) >> 3;
1334         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES) {
1335                 total_key_size = ICP_QAT_HW_3DES_KEY_SZ;
1336                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_3DES_BLK_SZ >> 3;
1337                 qat_proto_flag =
1338                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1339         } else if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_DES) {
1340                 total_key_size = ICP_QAT_HW_DES_KEY_SZ;
1341                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_DES_BLK_SZ >> 3;
1342                 qat_proto_flag =
1343                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1344         } else if (cdesc->qat_cipher_alg ==
1345                 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3) {
1346                 total_key_size = ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
1347                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1348                 cipher_cd_ctrl->cipher_state_sz =
1349                         ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1350                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1351                 cdesc->min_qat_dev_gen = QAT_GEN2;
1352         } else {
1353                 total_key_size = cipherkeylen;
1354                 cipher_cd_ctrl->cipher_state_sz = ICP_QAT_HW_AES_BLK_SZ >> 3;
1355                 qat_proto_flag =
1356                         qat_get_crypto_proto_flag(header->serv_specif_flags);
1357         }
1358         cipher_cd_ctrl->cipher_key_sz = total_key_size >> 3;
1359         cipher_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1360         cipher_cd_ctrl->cipher_cfg_offset = cipher_offset >> 3;
1361
1362         header->service_cmd_id = cdesc->qat_cmd;
1363         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1364
1365         cipher = (struct icp_qat_hw_cipher_algo_blk *)cdesc->cd_cur_ptr;
1366         cipher->cipher_config.val =
1367             ICP_QAT_HW_CIPHER_CONFIG_BUILD(cdesc->qat_mode,
1368                                         cdesc->qat_cipher_alg, key_convert,
1369                                         cdesc->qat_dir);
1370
1371         if (cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_KASUMI) {
1372                 temp_key = (uint32_t *)(cdesc->cd_cur_ptr +
1373                                         sizeof(struct icp_qat_hw_cipher_config)
1374                                         + cipherkeylen);
1375                 memcpy(cipher->key, cipherkey, cipherkeylen);
1376                 memcpy(temp_key, cipherkey, cipherkeylen);
1377
1378                 /* XOR Key with KASUMI F8 key modifier at 4 bytes level */
1379                 for (wordIndex = 0; wordIndex < (cipherkeylen >> 2);
1380                                                                 wordIndex++)
1381                         temp_key[wordIndex] ^= KASUMI_F8_KEY_MODIFIER_4_BYTES;
1382
1383                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1384                                         cipherkeylen + cipherkeylen;
1385         } else {
1386                 memcpy(cipher->key, cipherkey, cipherkeylen);
1387                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1388                                         cipherkeylen;
1389         }
1390
1391         if (total_key_size > cipherkeylen) {
1392                 uint32_t padding_size =  total_key_size-cipherkeylen;
1393                 if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1394                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT2)) {
1395                         /* K3 not provided so use K1 = K3*/
1396                         memcpy(cdesc->cd_cur_ptr, cipherkey, padding_size);
1397                 } else if ((cdesc->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_3DES)
1398                         && (cipherkeylen == QAT_3DES_KEY_SZ_OPT3)) {
1399                         /* K2 and K3 not provided so use K1 = K2 = K3*/
1400                         memcpy(cdesc->cd_cur_ptr, cipherkey,
1401                                 cipherkeylen);
1402                         memcpy(cdesc->cd_cur_ptr+cipherkeylen,
1403                                 cipherkey, cipherkeylen);
1404                 } else
1405                         memset(cdesc->cd_cur_ptr, 0, padding_size);
1406
1407                 cdesc->cd_cur_ptr += padding_size;
1408         }
1409         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1410         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1411
1412         return 0;
1413 }
1414
1415 int qat_sym_session_aead_create_cd_auth(struct qat_sym_session *cdesc,
1416                                                 uint8_t *authkey,
1417                                                 uint32_t authkeylen,
1418                                                 uint32_t aad_length,
1419                                                 uint32_t digestsize,
1420                                                 unsigned int operation)
1421 {
1422         struct icp_qat_hw_auth_setup *hash;
1423         struct icp_qat_hw_cipher_algo_blk *cipherconfig;
1424         struct icp_qat_fw_la_bulk_req *req_tmpl = &cdesc->fw_req;
1425         struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
1426         struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
1427         void *ptr = &req_tmpl->cd_ctrl;
1428         struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
1429         struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
1430         struct icp_qat_fw_la_auth_req_params *auth_param =
1431                 (struct icp_qat_fw_la_auth_req_params *)
1432                 ((char *)&req_tmpl->serv_specif_rqpars +
1433                 sizeof(struct icp_qat_fw_la_cipher_req_params));
1434         uint16_t state1_size = 0, state2_size = 0;
1435         uint16_t hash_offset, cd_size;
1436         uint32_t *aad_len = NULL;
1437         uint32_t wordIndex  = 0;
1438         uint32_t *pTempKey;
1439         enum qat_sym_proto_flag qat_proto_flag =
1440                 QAT_CRYPTO_PROTO_FLAG_NONE;
1441
1442         if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_AUTH) {
1443                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1444                                         ICP_QAT_FW_SLICE_AUTH);
1445                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1446                                         ICP_QAT_FW_SLICE_DRAM_WR);
1447                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1448         } else if (cdesc->qat_cmd == ICP_QAT_FW_LA_CMD_HASH_CIPHER) {
1449                 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl,
1450                                 ICP_QAT_FW_SLICE_AUTH);
1451                 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl,
1452                                 ICP_QAT_FW_SLICE_CIPHER);
1453                 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl,
1454                                 ICP_QAT_FW_SLICE_CIPHER);
1455                 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl,
1456                                 ICP_QAT_FW_SLICE_DRAM_WR);
1457                 cdesc->cd_cur_ptr = (uint8_t *)&cdesc->cd;
1458         } else if (cdesc->qat_cmd != ICP_QAT_FW_LA_CMD_CIPHER_HASH) {
1459                 QAT_LOG(ERR, "Invalid param, must be a hash command.");
1460                 return -EFAULT;
1461         }
1462
1463         if (operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
1464                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1465                                 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
1466                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1467                                 ICP_QAT_FW_LA_CMP_AUTH_RES);
1468                 cdesc->auth_op = ICP_QAT_HW_AUTH_VERIFY;
1469         } else {
1470                 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
1471                                            ICP_QAT_FW_LA_RET_AUTH_RES);
1472                 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
1473                                            ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
1474                 cdesc->auth_op = ICP_QAT_HW_AUTH_GENERATE;
1475         }
1476
1477         /*
1478          * Setup the inner hash config
1479          */
1480         hash_offset = cdesc->cd_cur_ptr-((uint8_t *)&cdesc->cd);
1481         hash = (struct icp_qat_hw_auth_setup *)cdesc->cd_cur_ptr;
1482         hash->auth_config.reserved = 0;
1483         hash->auth_config.config =
1484                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
1485                                 cdesc->qat_hash_alg, digestsize);
1486
1487         if (cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2
1488                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_KASUMI_F9
1489                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3
1490                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC
1491                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC
1492                 || cdesc->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL
1493                         )
1494                 hash->auth_counter.counter = 0;
1495         else {
1496                 int block_size = qat_hash_get_block_size(cdesc->qat_hash_alg);
1497
1498                 if (block_size < 0)
1499                         return block_size;
1500                 hash->auth_counter.counter = rte_bswap32(block_size);
1501         }
1502
1503         cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_auth_setup);
1504
1505         /*
1506          * cd_cur_ptr now points at the state1 information.
1507          */
1508         switch (cdesc->qat_hash_alg) {
1509         case ICP_QAT_HW_AUTH_ALGO_SHA1:
1510                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA1, authkey,
1511                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1512                         cdesc->aes_cmac)) {
1513                         QAT_LOG(ERR, "(SHA)precompute failed");
1514                         return -EFAULT;
1515                 }
1516                 state2_size = RTE_ALIGN_CEIL(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
1517                 break;
1518         case ICP_QAT_HW_AUTH_ALGO_SHA224:
1519                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA224, authkey,
1520                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1521                         cdesc->aes_cmac)) {
1522                         QAT_LOG(ERR, "(SHA)precompute failed");
1523                         return -EFAULT;
1524                 }
1525                 state2_size = ICP_QAT_HW_SHA224_STATE2_SZ;
1526                 break;
1527         case ICP_QAT_HW_AUTH_ALGO_SHA256:
1528                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA256, authkey,
1529                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1530                         cdesc->aes_cmac)) {
1531                         QAT_LOG(ERR, "(SHA)precompute failed");
1532                         return -EFAULT;
1533                 }
1534                 state2_size = ICP_QAT_HW_SHA256_STATE2_SZ;
1535                 break;
1536         case ICP_QAT_HW_AUTH_ALGO_SHA384:
1537                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA384, authkey,
1538                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1539                         cdesc->aes_cmac)) {
1540                         QAT_LOG(ERR, "(SHA)precompute failed");
1541                         return -EFAULT;
1542                 }
1543                 state2_size = ICP_QAT_HW_SHA384_STATE2_SZ;
1544                 break;
1545         case ICP_QAT_HW_AUTH_ALGO_SHA512:
1546                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_SHA512, authkey,
1547                         authkeylen, cdesc->cd_cur_ptr,  &state1_size,
1548                         cdesc->aes_cmac)) {
1549                         QAT_LOG(ERR, "(SHA)precompute failed");
1550                         return -EFAULT;
1551                 }
1552                 state2_size = ICP_QAT_HW_SHA512_STATE2_SZ;
1553                 break;
1554         case ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC:
1555                 state1_size = ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ;
1556
1557                 if (cdesc->aes_cmac)
1558                         memset(cdesc->cd_cur_ptr, 0, state1_size);
1559                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC,
1560                         authkey, authkeylen, cdesc->cd_cur_ptr + state1_size,
1561                         &state2_size, cdesc->aes_cmac)) {
1562                         cdesc->aes_cmac ? QAT_LOG(ERR,
1563                                                   "(CMAC)precompute failed")
1564                                         : QAT_LOG(ERR,
1565                                                   "(XCBC)precompute failed");
1566                         return -EFAULT;
1567                 }
1568                 break;
1569         case ICP_QAT_HW_AUTH_ALGO_GALOIS_128:
1570         case ICP_QAT_HW_AUTH_ALGO_GALOIS_64:
1571                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_GCM;
1572                 state1_size = ICP_QAT_HW_GALOIS_128_STATE1_SZ;
1573                 if (qat_sym_do_precomputes(cdesc->qat_hash_alg, authkey,
1574                         authkeylen, cdesc->cd_cur_ptr + state1_size,
1575                         &state2_size, cdesc->aes_cmac)) {
1576                         QAT_LOG(ERR, "(GCM)precompute failed");
1577                         return -EFAULT;
1578                 }
1579                 /*
1580                  * Write (the length of AAD) into bytes 16-19 of state2
1581                  * in big-endian format. This field is 8 bytes
1582                  */
1583                 auth_param->u2.aad_sz =
1584                                 RTE_ALIGN_CEIL(aad_length, 16);
1585                 auth_param->hash_state_sz = (auth_param->u2.aad_sz) >> 3;
1586
1587                 aad_len = (uint32_t *)(cdesc->cd_cur_ptr +
1588                                         ICP_QAT_HW_GALOIS_128_STATE1_SZ +
1589                                         ICP_QAT_HW_GALOIS_H_SZ);
1590                 *aad_len = rte_bswap32(aad_length);
1591                 cdesc->aad_len = aad_length;
1592                 break;
1593         case ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2:
1594                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_SNOW3G;
1595                 state1_size = qat_hash_get_state1_size(
1596                                 ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2);
1597                 state2_size = ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ;
1598                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1599
1600                 cipherconfig = (struct icp_qat_hw_cipher_algo_blk *)
1601                                 (cdesc->cd_cur_ptr + state1_size + state2_size);
1602                 cipherconfig->cipher_config.val =
1603                 ICP_QAT_HW_CIPHER_CONFIG_BUILD(ICP_QAT_HW_CIPHER_ECB_MODE,
1604                         ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
1605                         ICP_QAT_HW_CIPHER_KEY_CONVERT,
1606                         ICP_QAT_HW_CIPHER_ENCRYPT);
1607                 memcpy(cipherconfig->key, authkey, authkeylen);
1608                 memset(cipherconfig->key + authkeylen,
1609                                 0, ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
1610                 cdesc->cd_cur_ptr += sizeof(struct icp_qat_hw_cipher_config) +
1611                                 authkeylen + ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
1612                 auth_param->hash_state_sz = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ >> 3;
1613                 break;
1614         case ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3:
1615                 hash->auth_config.config =
1616                         ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE0,
1617                                 cdesc->qat_hash_alg, digestsize);
1618                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_ZUC;
1619                 state1_size = qat_hash_get_state1_size(
1620                                 ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3);
1621                 state2_size = ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ;
1622                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size
1623                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
1624
1625                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1626                 cdesc->cd_cur_ptr += state1_size + state2_size
1627                         + ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
1628                 auth_param->hash_state_sz = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ >> 3;
1629                 cdesc->min_qat_dev_gen = QAT_GEN2;
1630
1631                 break;
1632         case ICP_QAT_HW_AUTH_ALGO_MD5:
1633                 if (qat_sym_do_precomputes(ICP_QAT_HW_AUTH_ALGO_MD5, authkey,
1634                         authkeylen, cdesc->cd_cur_ptr, &state1_size,
1635                         cdesc->aes_cmac)) {
1636                         QAT_LOG(ERR, "(MD5)precompute failed");
1637                         return -EFAULT;
1638                 }
1639                 state2_size = ICP_QAT_HW_MD5_STATE2_SZ;
1640                 break;
1641         case ICP_QAT_HW_AUTH_ALGO_NULL:
1642                 state1_size = qat_hash_get_state1_size(
1643                                 ICP_QAT_HW_AUTH_ALGO_NULL);
1644                 state2_size = ICP_QAT_HW_NULL_STATE2_SZ;
1645                 break;
1646         case ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC:
1647                 qat_proto_flag = QAT_CRYPTO_PROTO_FLAG_CCM;
1648                 state1_size = qat_hash_get_state1_size(
1649                                 ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC);
1650                 state2_size = ICP_QAT_HW_AES_CBC_MAC_KEY_SZ +
1651                                 ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ;
1652
1653                 if (aad_length > 0) {
1654                         aad_length += ICP_QAT_HW_CCM_AAD_B0_LEN +
1655                         ICP_QAT_HW_CCM_AAD_LEN_INFO;
1656                         auth_param->u2.aad_sz =
1657                         RTE_ALIGN_CEIL(aad_length,
1658                         ICP_QAT_HW_CCM_AAD_ALIGNMENT);
1659                 } else {
1660                         auth_param->u2.aad_sz = ICP_QAT_HW_CCM_AAD_B0_LEN;
1661                 }
1662                 cdesc->aad_len = aad_length;
1663                 hash->auth_counter.counter = 0;
1664
1665                 hash_cd_ctrl->outer_prefix_sz = digestsize;
1666                 auth_param->hash_state_sz = digestsize;
1667
1668                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1669                 break;
1670         case ICP_QAT_HW_AUTH_ALGO_KASUMI_F9:
1671                 state1_size = qat_hash_get_state1_size(
1672                                 ICP_QAT_HW_AUTH_ALGO_KASUMI_F9);
1673                 state2_size = ICP_QAT_HW_KASUMI_F9_STATE2_SZ;
1674                 memset(cdesc->cd_cur_ptr, 0, state1_size + state2_size);
1675                 pTempKey = (uint32_t *)(cdesc->cd_cur_ptr + state1_size
1676                                                         + authkeylen);
1677                 /*
1678                 * The Inner Hash Initial State2 block must contain IK
1679                 * (Initialisation Key), followed by IK XOR-ed with KM
1680                 * (Key Modifier): IK||(IK^KM).
1681                 */
1682                 /* write the auth key */
1683                 memcpy(cdesc->cd_cur_ptr + state1_size, authkey, authkeylen);
1684                 /* initialise temp key with auth key */
1685                 memcpy(pTempKey, authkey, authkeylen);
1686                 /* XOR Key with KASUMI F9 key modifier at 4 bytes level */
1687                 for (wordIndex = 0; wordIndex < (authkeylen >> 2); wordIndex++)
1688                         pTempKey[wordIndex] ^= KASUMI_F9_KEY_MODIFIER_4_BYTES;
1689                 break;
1690         default:
1691                 QAT_LOG(ERR, "Invalid HASH alg %u", cdesc->qat_hash_alg);
1692                 return -EFAULT;
1693         }
1694
1695         /* Request template setup */
1696         qat_sym_session_init_common_hdr(header, qat_proto_flag);
1697         header->service_cmd_id = cdesc->qat_cmd;
1698
1699         /* Auth CD config setup */
1700         hash_cd_ctrl->hash_cfg_offset = hash_offset >> 3;
1701         hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
1702         hash_cd_ctrl->inner_res_sz = digestsize;
1703         hash_cd_ctrl->final_sz = digestsize;
1704         hash_cd_ctrl->inner_state1_sz = state1_size;
1705         auth_param->auth_res_sz = digestsize;
1706
1707         hash_cd_ctrl->inner_state2_sz  = state2_size;
1708         hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
1709                         ((sizeof(struct icp_qat_hw_auth_setup) +
1710                          RTE_ALIGN_CEIL(hash_cd_ctrl->inner_state1_sz, 8))
1711                                         >> 3);
1712
1713         cdesc->cd_cur_ptr += state1_size + state2_size;
1714         cd_size = cdesc->cd_cur_ptr-(uint8_t *)&cdesc->cd;
1715
1716         cd_pars->u.s.content_desc_addr = cdesc->cd_paddr;
1717         cd_pars->u.s.content_desc_params_sz = RTE_ALIGN_CEIL(cd_size, 8) >> 3;
1718
1719         return 0;
1720 }
1721
1722 int qat_sym_validate_aes_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1723 {
1724         switch (key_len) {
1725         case ICP_QAT_HW_AES_128_KEY_SZ:
1726                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1727                 break;
1728         case ICP_QAT_HW_AES_192_KEY_SZ:
1729                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
1730                 break;
1731         case ICP_QAT_HW_AES_256_KEY_SZ:
1732                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
1733                 break;
1734         default:
1735                 return -EINVAL;
1736         }
1737         return 0;
1738 }
1739
1740 int qat_sym_validate_aes_docsisbpi_key(int key_len,
1741                 enum icp_qat_hw_cipher_algo *alg)
1742 {
1743         switch (key_len) {
1744         case ICP_QAT_HW_AES_128_KEY_SZ:
1745                 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
1746                 break;
1747         default:
1748                 return -EINVAL;
1749         }
1750         return 0;
1751 }
1752
1753 int qat_sym_validate_snow3g_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1754 {
1755         switch (key_len) {
1756         case ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ:
1757                 *alg = ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2;
1758                 break;
1759         default:
1760                 return -EINVAL;
1761         }
1762         return 0;
1763 }
1764
1765 int qat_sym_validate_kasumi_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1766 {
1767         switch (key_len) {
1768         case ICP_QAT_HW_KASUMI_KEY_SZ:
1769                 *alg = ICP_QAT_HW_CIPHER_ALGO_KASUMI;
1770                 break;
1771         default:
1772                 return -EINVAL;
1773         }
1774         return 0;
1775 }
1776
1777 int qat_sym_validate_des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1778 {
1779         switch (key_len) {
1780         case ICP_QAT_HW_DES_KEY_SZ:
1781                 *alg = ICP_QAT_HW_CIPHER_ALGO_DES;
1782                 break;
1783         default:
1784                 return -EINVAL;
1785         }
1786         return 0;
1787 }
1788
1789 int qat_sym_validate_3des_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1790 {
1791         switch (key_len) {
1792         case QAT_3DES_KEY_SZ_OPT1:
1793         case QAT_3DES_KEY_SZ_OPT2:
1794         case QAT_3DES_KEY_SZ_OPT3:
1795                 *alg = ICP_QAT_HW_CIPHER_ALGO_3DES;
1796                 break;
1797         default:
1798                 return -EINVAL;
1799         }
1800         return 0;
1801 }
1802
1803 int qat_sym_validate_zuc_key(int key_len, enum icp_qat_hw_cipher_algo *alg)
1804 {
1805         switch (key_len) {
1806         case ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ:
1807                 *alg = ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3;
1808                 break;
1809         default:
1810                 return -EINVAL;
1811         }
1812         return 0;
1813 }