Imported Upstream version 17.05.2
[deb_dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused)
44 {
45         uint16_t i;
46
47         for (i = 0; i < nb_ops; i++) {
48                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
49
50                 rte_crypto_op_attach_sym_session(ops[i], sess);
51
52                 sym_op->m_src = bufs_in[i];
53                 sym_op->m_dst = bufs_out[i];
54
55                 /* cipher parameters */
56                 sym_op->cipher.data.length = options->test_buffer_size;
57                 sym_op->cipher.data.offset = 0;
58         }
59
60         return 0;
61 }
62
63 static int
64 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
65                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
66                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
67                 const struct cperf_options *options,
68                 const struct cperf_test_vector *test_vector __rte_unused)
69 {
70         uint16_t i;
71
72         for (i = 0; i < nb_ops; i++) {
73                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
74
75                 rte_crypto_op_attach_sym_session(ops[i], sess);
76
77                 sym_op->m_src = bufs_in[i];
78                 sym_op->m_dst = bufs_out[i];
79
80                 /* auth parameters */
81                 sym_op->auth.data.length = options->test_buffer_size;
82                 sym_op->auth.data.offset = 0;
83         }
84
85         return 0;
86 }
87
88 static int
89 cperf_set_ops_cipher(struct rte_crypto_op **ops,
90                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
91                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
92                 const struct cperf_options *options,
93                 const struct cperf_test_vector *test_vector)
94 {
95         uint16_t i;
96
97         for (i = 0; i < nb_ops; i++) {
98                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
99
100                 rte_crypto_op_attach_sym_session(ops[i], sess);
101
102                 sym_op->m_src = bufs_in[i];
103                 sym_op->m_dst = bufs_out[i];
104
105                 /* cipher parameters */
106                 sym_op->cipher.iv.data = test_vector->iv.data;
107                 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
108                 sym_op->cipher.iv.length = test_vector->iv.length;
109
110                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
111                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
112                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
113                         sym_op->cipher.data.length = options->test_buffer_size << 3;
114                 else
115                         sym_op->cipher.data.length = options->test_buffer_size;
116
117                 sym_op->cipher.data.offset = 0;
118         }
119
120         return 0;
121 }
122
123 static int
124 cperf_set_ops_auth(struct rte_crypto_op **ops,
125                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
126                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
127                 const struct cperf_options *options,
128                 const struct cperf_test_vector *test_vector)
129 {
130         uint16_t i;
131
132         for (i = 0; i < nb_ops; i++) {
133                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
134
135                 rte_crypto_op_attach_sym_session(ops[i], sess);
136
137                 sym_op->m_src = bufs_in[i];
138                 sym_op->m_dst = bufs_out[i];
139
140                 /* authentication parameters */
141                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
142                         sym_op->auth.digest.data = test_vector->digest.data;
143                         sym_op->auth.digest.phys_addr =
144                                         test_vector->digest.phys_addr;
145                         sym_op->auth.digest.length = options->auth_digest_sz;
146                 } else {
147
148                         uint32_t offset = options->test_buffer_size;
149                         struct rte_mbuf *buf, *tbuf;
150
151                         if (options->out_of_place) {
152                                 buf =  bufs_out[i];
153                         } else {
154                                 tbuf =  bufs_in[i];
155                                 while ((tbuf->next != NULL) &&
156                                                 (offset >= tbuf->data_len)) {
157                                         offset -= tbuf->data_len;
158                                         tbuf = tbuf->next;
159                                 }
160                                 buf = tbuf;
161                         }
162
163                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
164                                         uint8_t *, offset);
165                         sym_op->auth.digest.phys_addr =
166                                         rte_pktmbuf_mtophys_offset(buf, offset);
167                         sym_op->auth.digest.length = options->auth_digest_sz;
168                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
169                         sym_op->auth.aad.data = test_vector->aad.data;
170                         sym_op->auth.aad.length = options->auth_aad_sz;
171
172                 }
173
174                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
175                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
176                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
177                         sym_op->auth.data.length = options->test_buffer_size << 3;
178                 else
179                         sym_op->auth.data.length = options->test_buffer_size;
180
181                 sym_op->auth.data.offset = 0;
182         }
183
184         return 0;
185 }
186
187 static int
188 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
189                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
190                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
191                 const struct cperf_options *options,
192                 const struct cperf_test_vector *test_vector)
193 {
194         uint16_t i;
195
196         for (i = 0; i < nb_ops; i++) {
197                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
198
199                 rte_crypto_op_attach_sym_session(ops[i], sess);
200
201                 sym_op->m_src = bufs_in[i];
202                 sym_op->m_dst = bufs_out[i];
203
204                 /* cipher parameters */
205                 sym_op->cipher.iv.data = test_vector->iv.data;
206                 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
207                 sym_op->cipher.iv.length = test_vector->iv.length;
208
209                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
210                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
211                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
212                         sym_op->cipher.data.length = options->test_buffer_size << 3;
213                 else
214                         sym_op->cipher.data.length = options->test_buffer_size;
215
216                 sym_op->cipher.data.offset = 0;
217
218                 /* authentication parameters */
219                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
220                         sym_op->auth.digest.data = test_vector->digest.data;
221                         sym_op->auth.digest.phys_addr =
222                                         test_vector->digest.phys_addr;
223                         sym_op->auth.digest.length = options->auth_digest_sz;
224                 } else {
225
226                         uint32_t offset = options->test_buffer_size;
227                         struct rte_mbuf *buf, *tbuf;
228
229                         if (options->out_of_place) {
230                                 buf =  bufs_out[i];
231                         } else {
232                                 tbuf =  bufs_in[i];
233                                 while ((tbuf->next != NULL) &&
234                                                 (offset >= tbuf->data_len)) {
235                                         offset -= tbuf->data_len;
236                                         tbuf = tbuf->next;
237                                 }
238                                 buf = tbuf;
239                         }
240
241                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
242                                         uint8_t *, offset);
243                         sym_op->auth.digest.phys_addr =
244                                         rte_pktmbuf_mtophys_offset(buf, offset);
245                         sym_op->auth.digest.length = options->auth_digest_sz;
246                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
247                         sym_op->auth.aad.data = test_vector->aad.data;
248                         sym_op->auth.aad.length = options->auth_aad_sz;
249                 }
250
251                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
252                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
253                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
254                         sym_op->auth.data.length = options->test_buffer_size << 3;
255                 else
256                         sym_op->auth.data.length = options->test_buffer_size;
257
258                 sym_op->auth.data.offset = 0;
259         }
260
261         return 0;
262 }
263
264 static int
265 cperf_set_ops_aead(struct rte_crypto_op **ops,
266                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
267                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
268                 const struct cperf_options *options,
269                 const struct cperf_test_vector *test_vector)
270 {
271         uint16_t i;
272
273         for (i = 0; i < nb_ops; i++) {
274                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
275
276                 rte_crypto_op_attach_sym_session(ops[i], sess);
277
278                 sym_op->m_src = bufs_in[i];
279                 sym_op->m_dst = bufs_out[i];
280
281                 /* cipher parameters */
282                 sym_op->cipher.iv.data = test_vector->iv.data;
283                 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
284                 sym_op->cipher.iv.length = test_vector->iv.length;
285
286                 sym_op->cipher.data.length = options->test_buffer_size;
287                 sym_op->cipher.data.offset =
288                                 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
289
290                 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
291                 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
292                 sym_op->auth.aad.length = options->auth_aad_sz;
293
294                 /* authentication parameters */
295                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
296                         sym_op->auth.digest.data = test_vector->digest.data;
297                         sym_op->auth.digest.phys_addr =
298                                         test_vector->digest.phys_addr;
299                         sym_op->auth.digest.length = options->auth_digest_sz;
300                 } else {
301
302                         uint32_t offset = sym_op->cipher.data.length +
303                                                 sym_op->cipher.data.offset;
304                         struct rte_mbuf *buf, *tbuf;
305
306                         if (options->out_of_place) {
307                                 buf =  bufs_out[i];
308                         } else {
309                                 tbuf =  bufs_in[i];
310                                 while ((tbuf->next != NULL) &&
311                                                 (offset >= tbuf->data_len)) {
312                                         offset -= tbuf->data_len;
313                                         tbuf = tbuf->next;
314                                 }
315                                 buf = tbuf;
316                         }
317
318                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
319                                         uint8_t *, offset);
320                         sym_op->auth.digest.phys_addr =
321                                         rte_pktmbuf_mtophys_offset(buf, offset);
322
323                         sym_op->auth.digest.length = options->auth_digest_sz;
324                 }
325
326                 sym_op->auth.data.length = options->test_buffer_size;
327                 sym_op->auth.data.offset = options->auth_aad_sz;
328         }
329
330         return 0;
331 }
332
333 static struct rte_cryptodev_sym_session *
334 cperf_create_session(uint8_t dev_id,
335         const struct cperf_options *options,
336         const struct cperf_test_vector *test_vector)
337 {
338         struct rte_crypto_sym_xform cipher_xform;
339         struct rte_crypto_sym_xform auth_xform;
340         struct rte_cryptodev_sym_session *sess = NULL;
341
342         /*
343          * cipher only
344          */
345         if (options->op_type == CPERF_CIPHER_ONLY) {
346                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
347                 cipher_xform.next = NULL;
348                 cipher_xform.cipher.algo = options->cipher_algo;
349                 cipher_xform.cipher.op = options->cipher_op;
350
351                 /* cipher different than null */
352                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
353                         cipher_xform.cipher.key.data =
354                                         test_vector->cipher_key.data;
355                         cipher_xform.cipher.key.length =
356                                         test_vector->cipher_key.length;
357                 } else {
358                         cipher_xform.cipher.key.data = NULL;
359                         cipher_xform.cipher.key.length = 0;
360                 }
361                 /* create crypto session */
362                 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
363         /*
364          *  auth only
365          */
366         } else if (options->op_type == CPERF_AUTH_ONLY) {
367                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
368                 auth_xform.next = NULL;
369                 auth_xform.auth.algo = options->auth_algo;
370                 auth_xform.auth.op = options->auth_op;
371
372                 /* auth different than null */
373                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
374                         auth_xform.auth.digest_length =
375                                         options->auth_digest_sz;
376                         auth_xform.auth.add_auth_data_length =
377                                         options->auth_aad_sz;
378                         auth_xform.auth.key.length =
379                                         test_vector->auth_key.length;
380                         auth_xform.auth.key.data = test_vector->auth_key.data;
381                 } else {
382                         auth_xform.auth.digest_length = 0;
383                         auth_xform.auth.add_auth_data_length = 0;
384                         auth_xform.auth.key.length = 0;
385                         auth_xform.auth.key.data = NULL;
386                 }
387                 /* create crypto session */
388                 sess =  rte_cryptodev_sym_session_create(dev_id, &auth_xform);
389         /*
390          * cipher and auth
391          */
392         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
393                         || options->op_type == CPERF_AUTH_THEN_CIPHER
394                         || options->op_type == CPERF_AEAD) {
395
396                 /*
397                  * cipher
398                  */
399                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
400                 cipher_xform.next = NULL;
401                 cipher_xform.cipher.algo = options->cipher_algo;
402                 cipher_xform.cipher.op = options->cipher_op;
403
404                 /* cipher different than null */
405                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
406                         cipher_xform.cipher.key.data =
407                                         test_vector->cipher_key.data;
408                         cipher_xform.cipher.key.length =
409                                         test_vector->cipher_key.length;
410                 } else {
411                         cipher_xform.cipher.key.data = NULL;
412                         cipher_xform.cipher.key.length = 0;
413                 }
414
415                 /*
416                  * auth
417                  */
418                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
419                 auth_xform.next = NULL;
420                 auth_xform.auth.algo = options->auth_algo;
421                 auth_xform.auth.op = options->auth_op;
422
423                 /* auth different than null */
424                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
425                         auth_xform.auth.digest_length = options->auth_digest_sz;
426                         auth_xform.auth.add_auth_data_length =
427                                         options->auth_aad_sz;
428                         /* auth options for aes gcm */
429                         if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
430                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
431                                 auth_xform.auth.key.length = 0;
432                                 auth_xform.auth.key.data = NULL;
433                         } else { /* auth options for others */
434                                 auth_xform.auth.key.length =
435                                         test_vector->auth_key.length;
436                                 auth_xform.auth.key.data =
437                                                 test_vector->auth_key.data;
438                         }
439                 } else {
440                         auth_xform.auth.digest_length = 0;
441                         auth_xform.auth.add_auth_data_length = 0;
442                         auth_xform.auth.key.length = 0;
443                         auth_xform.auth.key.data = NULL;
444                 }
445
446                 /* create crypto session for aes gcm */
447                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
448                         if (options->cipher_op ==
449                                         RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
450                                 cipher_xform.next = &auth_xform;
451                                 /* create crypto session */
452                                 sess = rte_cryptodev_sym_session_create(dev_id,
453                                         &cipher_xform);
454                         } else { /* decrypt */
455                                 auth_xform.next = &cipher_xform;
456                                 /* create crypto session */
457                                 sess = rte_cryptodev_sym_session_create(dev_id,
458                                         &auth_xform);
459                         }
460                 } else { /* create crypto session for other */
461                         /* cipher then auth */
462                         if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
463                                 cipher_xform.next = &auth_xform;
464                                 /* create crypto session */
465                                 sess = rte_cryptodev_sym_session_create(dev_id,
466                                                 &cipher_xform);
467                         } else { /* auth then cipher */
468                                 auth_xform.next = &cipher_xform;
469                                 /* create crypto session */
470                                 sess = rte_cryptodev_sym_session_create(dev_id,
471                                                 &auth_xform);
472                         }
473                 }
474         }
475         return sess;
476 }
477
478 int
479 cperf_get_op_functions(const struct cperf_options *options,
480                 struct cperf_op_fns *op_fns)
481 {
482         memset(op_fns, 0, sizeof(struct cperf_op_fns));
483
484         op_fns->sess_create = cperf_create_session;
485
486         if (options->op_type == CPERF_AEAD
487                         || options->op_type == CPERF_AUTH_THEN_CIPHER
488                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
489                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
490                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
491                         op_fns->populate_ops = cperf_set_ops_aead;
492                 else
493                         op_fns->populate_ops = cperf_set_ops_cipher_auth;
494                 return 0;
495         }
496         if (options->op_type == CPERF_AUTH_ONLY) {
497                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
498                         op_fns->populate_ops = cperf_set_ops_null_auth;
499                 else
500                         op_fns->populate_ops = cperf_set_ops_auth;
501                 return 0;
502         }
503         if (options->op_type == CPERF_CIPHER_ONLY) {
504                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
505                         op_fns->populate_ops = cperf_set_ops_null_cipher;
506                 else
507                         op_fns->populate_ops = cperf_set_ops_cipher;
508                 return 0;
509         }
510
511         return -1;
512 }