New upstream version 17.11.4
[deb_dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused,
44                 uint16_t iv_offset __rte_unused)
45 {
46         uint16_t i;
47
48         for (i = 0; i < nb_ops; i++) {
49                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50
51                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
52                 rte_crypto_op_attach_sym_session(ops[i], sess);
53
54                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
55                                                         src_buf_offset);
56
57                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
58                 if (dst_buf_offset == 0)
59                         sym_op->m_dst = NULL;
60                 else
61                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
62                                                         dst_buf_offset);
63
64                 /* cipher parameters */
65                 sym_op->cipher.data.length = options->test_buffer_size;
66                 sym_op->cipher.data.offset = 0;
67         }
68
69         return 0;
70 }
71
72 static int
73 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
74                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
75                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
76                 const struct cperf_options *options,
77                 const struct cperf_test_vector *test_vector __rte_unused,
78                 uint16_t iv_offset __rte_unused)
79 {
80         uint16_t i;
81
82         for (i = 0; i < nb_ops; i++) {
83                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
84
85                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
86                 rte_crypto_op_attach_sym_session(ops[i], sess);
87
88                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
89                                                         src_buf_offset);
90
91                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
92                 if (dst_buf_offset == 0)
93                         sym_op->m_dst = NULL;
94                 else
95                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
96                                                         dst_buf_offset);
97
98                 /* auth parameters */
99                 sym_op->auth.data.length = options->test_buffer_size;
100                 sym_op->auth.data.offset = 0;
101         }
102
103         return 0;
104 }
105
106 static int
107 cperf_set_ops_cipher(struct rte_crypto_op **ops,
108                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
109                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
110                 const struct cperf_options *options,
111                 const struct cperf_test_vector *test_vector,
112                 uint16_t iv_offset)
113 {
114         uint16_t i;
115
116         for (i = 0; i < nb_ops; i++) {
117                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
118
119                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
120                 rte_crypto_op_attach_sym_session(ops[i], sess);
121
122                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
123                                                         src_buf_offset);
124
125                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
126                 if (dst_buf_offset == 0)
127                         sym_op->m_dst = NULL;
128                 else
129                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
130                                                         dst_buf_offset);
131
132                 /* cipher parameters */
133                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
134                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
135                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
136                         sym_op->cipher.data.length = options->test_buffer_size << 3;
137                 else
138                         sym_op->cipher.data.length = options->test_buffer_size;
139
140                 sym_op->cipher.data.offset = 0;
141         }
142
143         if (options->test == CPERF_TEST_TYPE_VERIFY) {
144                 for (i = 0; i < nb_ops; i++) {
145                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
146                                         uint8_t *, iv_offset);
147
148                         memcpy(iv_ptr, test_vector->cipher_iv.data,
149                                         test_vector->cipher_iv.length);
150
151                 }
152         }
153
154         return 0;
155 }
156
157 static int
158 cperf_set_ops_auth(struct rte_crypto_op **ops,
159                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
160                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
161                 const struct cperf_options *options,
162                 const struct cperf_test_vector *test_vector,
163                 uint16_t iv_offset)
164 {
165         uint16_t i;
166
167         for (i = 0; i < nb_ops; i++) {
168                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
169
170                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
171                 rte_crypto_op_attach_sym_session(ops[i], sess);
172
173                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
174                                                         src_buf_offset);
175
176                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
177                 if (dst_buf_offset == 0)
178                         sym_op->m_dst = NULL;
179                 else
180                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
181                                                         dst_buf_offset);
182
183                 if (test_vector->auth_iv.length) {
184                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
185                                                                 uint8_t *,
186                                                                 iv_offset);
187                         memcpy(iv_ptr, test_vector->auth_iv.data,
188                                         test_vector->auth_iv.length);
189                 }
190
191                 /* authentication parameters */
192                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
193                         sym_op->auth.digest.data = test_vector->digest.data;
194                         sym_op->auth.digest.phys_addr =
195                                         test_vector->digest.phys_addr;
196                 } else {
197
198                         uint32_t offset = options->test_buffer_size;
199                         struct rte_mbuf *buf, *tbuf;
200
201                         if (options->out_of_place) {
202                                 buf = sym_op->m_dst;
203                         } else {
204                                 tbuf = sym_op->m_src;
205                                 while ((tbuf->next != NULL) &&
206                                                 (offset >= tbuf->data_len)) {
207                                         offset -= tbuf->data_len;
208                                         tbuf = tbuf->next;
209                                 }
210                                 /*
211                                  * If there is not enough room in segment,
212                                  * place the digest in the next segment
213                                  */
214                                 if ((tbuf->data_len - offset) < options->digest_sz) {
215                                         tbuf = tbuf->next;
216                                         offset = 0;
217                                 }
218                                 buf = tbuf;
219                         }
220
221                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
222                                         uint8_t *, offset);
223                         sym_op->auth.digest.phys_addr =
224                                         rte_pktmbuf_iova_offset(buf, offset);
225
226                 }
227
228                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
229                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
230                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
231                         sym_op->auth.data.length = options->test_buffer_size << 3;
232                 else
233                         sym_op->auth.data.length = options->test_buffer_size;
234
235                 sym_op->auth.data.offset = 0;
236         }
237
238         if (options->test == CPERF_TEST_TYPE_VERIFY) {
239                 if (test_vector->auth_iv.length) {
240                         for (i = 0; i < nb_ops; i++) {
241                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
242                                                 uint8_t *, iv_offset);
243
244                                 memcpy(iv_ptr, test_vector->auth_iv.data,
245                                                 test_vector->auth_iv.length);
246                         }
247                 }
248         }
249         return 0;
250 }
251
252 static int
253 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
254                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
255                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
256                 const struct cperf_options *options,
257                 const struct cperf_test_vector *test_vector,
258                 uint16_t iv_offset)
259 {
260         uint16_t i;
261
262         for (i = 0; i < nb_ops; i++) {
263                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
264
265                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
266                 rte_crypto_op_attach_sym_session(ops[i], sess);
267
268                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
269                                                         src_buf_offset);
270
271                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
272                 if (dst_buf_offset == 0)
273                         sym_op->m_dst = NULL;
274                 else
275                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
276                                                         dst_buf_offset);
277
278                 /* cipher parameters */
279                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
280                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
281                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
282                         sym_op->cipher.data.length = options->test_buffer_size << 3;
283                 else
284                         sym_op->cipher.data.length = options->test_buffer_size;
285
286                 sym_op->cipher.data.offset = 0;
287
288                 /* authentication parameters */
289                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
290                         sym_op->auth.digest.data = test_vector->digest.data;
291                         sym_op->auth.digest.phys_addr =
292                                         test_vector->digest.phys_addr;
293                 } else {
294
295                         uint32_t offset = options->test_buffer_size;
296                         struct rte_mbuf *buf, *tbuf;
297
298                         if (options->out_of_place) {
299                                 buf = sym_op->m_dst;
300                         } else {
301                                 tbuf = sym_op->m_src;
302                                 while ((tbuf->next != NULL) &&
303                                                 (offset >= tbuf->data_len)) {
304                                         offset -= tbuf->data_len;
305                                         tbuf = tbuf->next;
306                                 }
307                                 /*
308                                  * If there is not enough room in segment,
309                                  * place the digest in the next segment
310                                  */
311                                 if ((tbuf->data_len - offset) < options->digest_sz) {
312                                         tbuf = tbuf->next;
313                                         offset = 0;
314                                 }
315                                 buf = tbuf;
316                         }
317
318                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
319                                         uint8_t *, offset);
320                         sym_op->auth.digest.phys_addr =
321                                         rte_pktmbuf_iova_offset(buf, offset);
322                 }
323
324                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
325                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
326                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
327                         sym_op->auth.data.length = options->test_buffer_size << 3;
328                 else
329                         sym_op->auth.data.length = options->test_buffer_size;
330
331                 sym_op->auth.data.offset = 0;
332         }
333
334         if (options->test == CPERF_TEST_TYPE_VERIFY) {
335                 for (i = 0; i < nb_ops; i++) {
336                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
337                                         uint8_t *, iv_offset);
338
339                         memcpy(iv_ptr, test_vector->cipher_iv.data,
340                                         test_vector->cipher_iv.length);
341                         if (test_vector->auth_iv.length) {
342                                 /*
343                                  * Copy IV after the crypto operation and
344                                  * the cipher IV
345                                  */
346                                 iv_ptr += test_vector->cipher_iv.length;
347                                 memcpy(iv_ptr, test_vector->auth_iv.data,
348                                                 test_vector->auth_iv.length);
349                         }
350                 }
351
352         }
353
354         return 0;
355 }
356
357 static int
358 cperf_set_ops_aead(struct rte_crypto_op **ops,
359                 uint32_t src_buf_offset, uint32_t dst_buf_offset,
360                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
361                 const struct cperf_options *options,
362                 const struct cperf_test_vector *test_vector,
363                 uint16_t iv_offset)
364 {
365         uint16_t i;
366         /* AAD is placed after the IV */
367         uint16_t aad_offset = iv_offset +
368                         RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
369
370         for (i = 0; i < nb_ops; i++) {
371                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
372
373                 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
374                 rte_crypto_op_attach_sym_session(ops[i], sess);
375
376                 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
377                                                         src_buf_offset);
378
379                 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
380                 if (dst_buf_offset == 0)
381                         sym_op->m_dst = NULL;
382                 else
383                         sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
384                                                         dst_buf_offset);
385
386                 /* AEAD parameters */
387                 sym_op->aead.data.length = options->test_buffer_size;
388                 sym_op->aead.data.offset = 0;
389
390                 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
391                                         uint8_t *, aad_offset);
392                 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
393                                         aad_offset);
394
395                 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
396                         sym_op->aead.digest.data = test_vector->digest.data;
397                         sym_op->aead.digest.phys_addr =
398                                         test_vector->digest.phys_addr;
399                 } else {
400
401                         uint32_t offset = sym_op->aead.data.length +
402                                                 sym_op->aead.data.offset;
403                         struct rte_mbuf *buf, *tbuf;
404
405                         if (options->out_of_place) {
406                                 buf = sym_op->m_dst;
407                         } else {
408                                 tbuf = sym_op->m_src;
409                                 while ((tbuf->next != NULL) &&
410                                                 (offset >= tbuf->data_len)) {
411                                         offset -= tbuf->data_len;
412                                         tbuf = tbuf->next;
413                                 }
414                                 /*
415                                  * If there is not enough room in segment,
416                                  * place the digest in the next segment
417                                  */
418                                 if ((tbuf->data_len - offset) < options->digest_sz) {
419                                         tbuf = tbuf->next;
420                                         offset = 0;
421                                 }
422                                 buf = tbuf;
423                         }
424
425                         sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
426                                         uint8_t *, offset);
427                         sym_op->aead.digest.phys_addr =
428                                         rte_pktmbuf_iova_offset(buf, offset);
429                 }
430         }
431
432         if (options->test == CPERF_TEST_TYPE_VERIFY) {
433                 for (i = 0; i < nb_ops; i++) {
434                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
435                                         uint8_t *, iv_offset);
436
437                         /*
438                          * If doing AES-CCM, nonce is copied one byte
439                          * after the start of IV field, and AAD is copied
440                          * 18 bytes after the start of the AAD field.
441                          */
442                         if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
443                                 memcpy(iv_ptr + 1, test_vector->aead_iv.data,
444                                         test_vector->aead_iv.length);
445
446                                 memcpy(ops[i]->sym->aead.aad.data + 18,
447                                         test_vector->aad.data,
448                                         test_vector->aad.length);
449                         } else {
450                                 memcpy(iv_ptr, test_vector->aead_iv.data,
451                                         test_vector->aead_iv.length);
452
453                                 memcpy(ops[i]->sym->aead.aad.data,
454                                         test_vector->aad.data,
455                                         test_vector->aad.length);
456                         }
457                 }
458         }
459
460         return 0;
461 }
462
463 static struct rte_cryptodev_sym_session *
464 cperf_create_session(struct rte_mempool *sess_mp,
465         uint8_t dev_id,
466         const struct cperf_options *options,
467         const struct cperf_test_vector *test_vector,
468         uint16_t iv_offset)
469 {
470         struct rte_crypto_sym_xform cipher_xform;
471         struct rte_crypto_sym_xform auth_xform;
472         struct rte_crypto_sym_xform aead_xform;
473         struct rte_cryptodev_sym_session *sess = NULL;
474
475         sess = rte_cryptodev_sym_session_create(sess_mp);
476         /*
477          * cipher only
478          */
479         if (options->op_type == CPERF_CIPHER_ONLY) {
480                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
481                 cipher_xform.next = NULL;
482                 cipher_xform.cipher.algo = options->cipher_algo;
483                 cipher_xform.cipher.op = options->cipher_op;
484                 cipher_xform.cipher.iv.offset = iv_offset;
485
486                 /* cipher different than null */
487                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
488                         cipher_xform.cipher.key.data =
489                                         test_vector->cipher_key.data;
490                         cipher_xform.cipher.key.length =
491                                         test_vector->cipher_key.length;
492                         cipher_xform.cipher.iv.length =
493                                         test_vector->cipher_iv.length;
494                 } else {
495                         cipher_xform.cipher.key.data = NULL;
496                         cipher_xform.cipher.key.length = 0;
497                         cipher_xform.cipher.iv.length = 0;
498                 }
499                 /* create crypto session */
500                 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
501                                 sess_mp);
502         /*
503          *  auth only
504          */
505         } else if (options->op_type == CPERF_AUTH_ONLY) {
506                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
507                 auth_xform.next = NULL;
508                 auth_xform.auth.algo = options->auth_algo;
509                 auth_xform.auth.op = options->auth_op;
510                 auth_xform.auth.iv.offset = iv_offset;
511
512                 /* auth different than null */
513                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
514                         auth_xform.auth.digest_length =
515                                         options->digest_sz;
516                         auth_xform.auth.key.length =
517                                         test_vector->auth_key.length;
518                         auth_xform.auth.key.data = test_vector->auth_key.data;
519                         auth_xform.auth.iv.length =
520                                         test_vector->auth_iv.length;
521                 } else {
522                         auth_xform.auth.digest_length = 0;
523                         auth_xform.auth.key.length = 0;
524                         auth_xform.auth.key.data = NULL;
525                         auth_xform.auth.iv.length = 0;
526                 }
527                 /* create crypto session */
528                 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
529                                 sess_mp);
530         /*
531          * cipher and auth
532          */
533         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
534                         || options->op_type == CPERF_AUTH_THEN_CIPHER) {
535                 /*
536                  * cipher
537                  */
538                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
539                 cipher_xform.next = NULL;
540                 cipher_xform.cipher.algo = options->cipher_algo;
541                 cipher_xform.cipher.op = options->cipher_op;
542                 cipher_xform.cipher.iv.offset = iv_offset;
543
544                 /* cipher different than null */
545                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
546                         cipher_xform.cipher.key.data =
547                                         test_vector->cipher_key.data;
548                         cipher_xform.cipher.key.length =
549                                         test_vector->cipher_key.length;
550                         cipher_xform.cipher.iv.length =
551                                         test_vector->cipher_iv.length;
552                 } else {
553                         cipher_xform.cipher.key.data = NULL;
554                         cipher_xform.cipher.key.length = 0;
555                         cipher_xform.cipher.iv.length = 0;
556                 }
557
558                 /*
559                  * auth
560                  */
561                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
562                 auth_xform.next = NULL;
563                 auth_xform.auth.algo = options->auth_algo;
564                 auth_xform.auth.op = options->auth_op;
565                 auth_xform.auth.iv.offset = iv_offset +
566                         cipher_xform.cipher.iv.length;
567
568                 /* auth different than null */
569                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
570                         auth_xform.auth.digest_length = options->digest_sz;
571                         auth_xform.auth.iv.length = test_vector->auth_iv.length;
572                         auth_xform.auth.key.length =
573                                         test_vector->auth_key.length;
574                         auth_xform.auth.key.data =
575                                         test_vector->auth_key.data;
576                 } else {
577                         auth_xform.auth.digest_length = 0;
578                         auth_xform.auth.key.length = 0;
579                         auth_xform.auth.key.data = NULL;
580                         auth_xform.auth.iv.length = 0;
581                 }
582
583                 /* cipher then auth */
584                 if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
585                         cipher_xform.next = &auth_xform;
586                         /* create crypto session */
587                         rte_cryptodev_sym_session_init(dev_id,
588                                         sess, &cipher_xform, sess_mp);
589                 } else { /* auth then cipher */
590                         auth_xform.next = &cipher_xform;
591                         /* create crypto session */
592                         rte_cryptodev_sym_session_init(dev_id,
593                                         sess, &auth_xform, sess_mp);
594                 }
595         } else { /* options->op_type == CPERF_AEAD */
596                 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
597                 aead_xform.next = NULL;
598                 aead_xform.aead.algo = options->aead_algo;
599                 aead_xform.aead.op = options->aead_op;
600                 aead_xform.aead.iv.offset = iv_offset;
601
602                 aead_xform.aead.key.data =
603                                         test_vector->aead_key.data;
604                 aead_xform.aead.key.length =
605                                         test_vector->aead_key.length;
606                 aead_xform.aead.iv.length = test_vector->aead_iv.length;
607
608                 aead_xform.aead.digest_length = options->digest_sz;
609                 aead_xform.aead.aad_length =
610                                         options->aead_aad_sz;
611
612                 /* Create crypto session */
613                 rte_cryptodev_sym_session_init(dev_id,
614                                         sess, &aead_xform, sess_mp);
615         }
616
617         return sess;
618 }
619
620 int
621 cperf_get_op_functions(const struct cperf_options *options,
622                 struct cperf_op_fns *op_fns)
623 {
624         memset(op_fns, 0, sizeof(struct cperf_op_fns));
625
626         op_fns->sess_create = cperf_create_session;
627
628         if (options->op_type == CPERF_AEAD) {
629                 op_fns->populate_ops = cperf_set_ops_aead;
630                 return 0;
631         }
632
633         if (options->op_type == CPERF_AUTH_THEN_CIPHER
634                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
635                 op_fns->populate_ops = cperf_set_ops_cipher_auth;
636                 return 0;
637         }
638         if (options->op_type == CPERF_AUTH_ONLY) {
639                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
640                         op_fns->populate_ops = cperf_set_ops_null_auth;
641                 else
642                         op_fns->populate_ops = cperf_set_ops_auth;
643                 return 0;
644         }
645         if (options->op_type == CPERF_CIPHER_ONLY) {
646                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
647                         op_fns->populate_ops = cperf_set_ops_null_cipher;
648                 else
649                         op_fns->populate_ops = cperf_set_ops_cipher;
650                 return 0;
651         }
652
653         return -1;
654 }