c2c3db572f1917c5ee8bdca1d9375fae6c2a3d22
[deb_dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused)
44 {
45         uint16_t i;
46
47         for (i = 0; i < nb_ops; i++) {
48                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
49
50                 rte_crypto_op_attach_sym_session(ops[i], sess);
51
52                 sym_op->m_src = bufs_in[i];
53                 sym_op->m_dst = bufs_out[i];
54
55                 /* cipher parameters */
56                 sym_op->cipher.data.length = options->test_buffer_size;
57                 sym_op->cipher.data.offset = 0;
58         }
59
60         return 0;
61 }
62
63 static int
64 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
65                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
66                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
67                 const struct cperf_options *options,
68                 const struct cperf_test_vector *test_vector __rte_unused)
69 {
70         uint16_t i;
71
72         for (i = 0; i < nb_ops; i++) {
73                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
74
75                 rte_crypto_op_attach_sym_session(ops[i], sess);
76
77                 sym_op->m_src = bufs_in[i];
78                 sym_op->m_dst = bufs_out[i];
79
80                 /* auth parameters */
81                 sym_op->auth.data.length = options->test_buffer_size;
82                 sym_op->auth.data.offset = 0;
83         }
84
85         return 0;
86 }
87
88 static int
89 cperf_set_ops_cipher(struct rte_crypto_op **ops,
90                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
91                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
92                 const struct cperf_options *options,
93                 const struct cperf_test_vector *test_vector)
94 {
95         uint16_t i;
96
97         for (i = 0; i < nb_ops; i++) {
98                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
99
100                 rte_crypto_op_attach_sym_session(ops[i], sess);
101
102                 sym_op->m_src = bufs_in[i];
103                 sym_op->m_dst = bufs_out[i];
104
105                 /* cipher parameters */
106                 sym_op->cipher.iv.data = test_vector->iv.data;
107                 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
108                 sym_op->cipher.iv.length = test_vector->iv.length;
109
110                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
111                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
112                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
113                         sym_op->cipher.data.length = options->test_buffer_size << 3;
114                 else
115                         sym_op->cipher.data.length = options->test_buffer_size;
116
117                 sym_op->cipher.data.offset = 0;
118         }
119
120         return 0;
121 }
122
123 static int
124 cperf_set_ops_auth(struct rte_crypto_op **ops,
125                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
126                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
127                 const struct cperf_options *options,
128                 const struct cperf_test_vector *test_vector)
129 {
130         uint16_t i;
131
132         for (i = 0; i < nb_ops; i++) {
133                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
134
135                 rte_crypto_op_attach_sym_session(ops[i], sess);
136
137                 sym_op->m_src = bufs_in[i];
138                 sym_op->m_dst = bufs_out[i];
139
140                 /* authentication parameters */
141                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
142                         sym_op->auth.digest.data = test_vector->digest.data;
143                         sym_op->auth.digest.phys_addr =
144                                         test_vector->digest.phys_addr;
145                         sym_op->auth.digest.length = options->auth_digest_sz;
146                 } else {
147
148                         uint32_t offset = options->test_buffer_size;
149                         struct rte_mbuf *buf, *tbuf;
150
151                         if (options->out_of_place) {
152                                 buf =  bufs_out[i];
153                         } else {
154                                 buf =  bufs_in[i];
155
156                                 tbuf = buf;
157                                 while ((tbuf->next != NULL) &&
158                                                 (offset >= tbuf->data_len)) {
159                                         offset -= tbuf->data_len;
160                                         tbuf = tbuf->next;
161                                 }
162                         }
163
164                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
165                                         uint8_t *, offset);
166                         sym_op->auth.digest.phys_addr =
167                                         rte_pktmbuf_mtophys_offset(buf, offset);
168                         sym_op->auth.digest.length = options->auth_digest_sz;
169                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
170                         sym_op->auth.aad.data = test_vector->aad.data;
171                         sym_op->auth.aad.length = options->auth_aad_sz;
172
173                 }
174
175                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
176                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
177                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
178                         sym_op->auth.data.length = options->test_buffer_size << 3;
179                 else
180                         sym_op->auth.data.length = options->test_buffer_size;
181
182                 sym_op->auth.data.offset = 0;
183         }
184
185         return 0;
186 }
187
188 static int
189 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
190                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
191                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
192                 const struct cperf_options *options,
193                 const struct cperf_test_vector *test_vector)
194 {
195         uint16_t i;
196
197         for (i = 0; i < nb_ops; i++) {
198                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
199
200                 rte_crypto_op_attach_sym_session(ops[i], sess);
201
202                 sym_op->m_src = bufs_in[i];
203                 sym_op->m_dst = bufs_out[i];
204
205                 /* cipher parameters */
206                 sym_op->cipher.iv.data = test_vector->iv.data;
207                 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
208                 sym_op->cipher.iv.length = test_vector->iv.length;
209
210                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
211                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
212                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
213                         sym_op->cipher.data.length = options->test_buffer_size << 3;
214                 else
215                         sym_op->cipher.data.length = options->test_buffer_size;
216
217                 sym_op->cipher.data.offset = 0;
218
219                 /* authentication parameters */
220                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
221                         sym_op->auth.digest.data = test_vector->digest.data;
222                         sym_op->auth.digest.phys_addr =
223                                         test_vector->digest.phys_addr;
224                         sym_op->auth.digest.length = options->auth_digest_sz;
225                 } else {
226
227                         uint32_t offset = options->test_buffer_size;
228                         struct rte_mbuf *buf, *tbuf;
229
230                         if (options->out_of_place) {
231                                 buf =  bufs_out[i];
232                         } else {
233                                 buf =  bufs_in[i];
234
235                                 tbuf = buf;
236                                 while ((tbuf->next != NULL) &&
237                                                 (offset >= tbuf->data_len)) {
238                                         offset -= tbuf->data_len;
239                                         tbuf = tbuf->next;
240                                 }
241                         }
242
243                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
244                                         uint8_t *, offset);
245                         sym_op->auth.digest.phys_addr =
246                                         rte_pktmbuf_mtophys_offset(buf, offset);
247                         sym_op->auth.digest.length = options->auth_digest_sz;
248                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
249                         sym_op->auth.aad.data = test_vector->aad.data;
250                         sym_op->auth.aad.length = options->auth_aad_sz;
251                 }
252
253                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
254                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
255                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
256                         sym_op->auth.data.length = options->test_buffer_size << 3;
257                 else
258                         sym_op->auth.data.length = options->test_buffer_size;
259
260                 sym_op->auth.data.offset = 0;
261         }
262
263         return 0;
264 }
265
266 static int
267 cperf_set_ops_aead(struct rte_crypto_op **ops,
268                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
269                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
270                 const struct cperf_options *options,
271                 const struct cperf_test_vector *test_vector)
272 {
273         uint16_t i;
274
275         for (i = 0; i < nb_ops; i++) {
276                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
277
278                 rte_crypto_op_attach_sym_session(ops[i], sess);
279
280                 sym_op->m_src = bufs_in[i];
281                 sym_op->m_dst = bufs_out[i];
282
283                 /* cipher parameters */
284                 sym_op->cipher.iv.data = test_vector->iv.data;
285                 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr;
286                 sym_op->cipher.iv.length = test_vector->iv.length;
287
288                 sym_op->cipher.data.length = options->test_buffer_size;
289                 sym_op->cipher.data.offset =
290                                 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
291
292                 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
293                 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
294                 sym_op->auth.aad.length = options->auth_aad_sz;
295
296                 /* authentication parameters */
297                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
298                         sym_op->auth.digest.data = test_vector->digest.data;
299                         sym_op->auth.digest.phys_addr =
300                                         test_vector->digest.phys_addr;
301                         sym_op->auth.digest.length = options->auth_digest_sz;
302                 } else {
303
304                         uint32_t offset = sym_op->cipher.data.length +
305                                                 sym_op->cipher.data.offset;
306                         struct rte_mbuf *buf, *tbuf;
307
308                         if (options->out_of_place) {
309                                 buf =  bufs_out[i];
310                         } else {
311                                 buf =  bufs_in[i];
312
313                                 tbuf = buf;
314                                 while ((tbuf->next != NULL) &&
315                                                 (offset >= tbuf->data_len)) {
316                                         offset -= tbuf->data_len;
317                                         tbuf = tbuf->next;
318                                 }
319                         }
320
321                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
322                                         uint8_t *, offset);
323                         sym_op->auth.digest.phys_addr =
324                                         rte_pktmbuf_mtophys_offset(buf, offset);
325
326                         sym_op->auth.digest.length = options->auth_digest_sz;
327                 }
328
329                 sym_op->auth.data.length = options->test_buffer_size;
330                 sym_op->auth.data.offset = options->auth_aad_sz;
331         }
332
333         return 0;
334 }
335
336 static struct rte_cryptodev_sym_session *
337 cperf_create_session(uint8_t dev_id,
338         const struct cperf_options *options,
339         const struct cperf_test_vector *test_vector)
340 {
341         struct rte_crypto_sym_xform cipher_xform;
342         struct rte_crypto_sym_xform auth_xform;
343         struct rte_cryptodev_sym_session *sess = NULL;
344
345         /*
346          * cipher only
347          */
348         if (options->op_type == CPERF_CIPHER_ONLY) {
349                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
350                 cipher_xform.next = NULL;
351                 cipher_xform.cipher.algo = options->cipher_algo;
352                 cipher_xform.cipher.op = options->cipher_op;
353
354                 /* cipher different than null */
355                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
356                         cipher_xform.cipher.key.data =
357                                         test_vector->cipher_key.data;
358                         cipher_xform.cipher.key.length =
359                                         test_vector->cipher_key.length;
360                 } else {
361                         cipher_xform.cipher.key.data = NULL;
362                         cipher_xform.cipher.key.length = 0;
363                 }
364                 /* create crypto session */
365                 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
366         /*
367          *  auth only
368          */
369         } else if (options->op_type == CPERF_AUTH_ONLY) {
370                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
371                 auth_xform.next = NULL;
372                 auth_xform.auth.algo = options->auth_algo;
373                 auth_xform.auth.op = options->auth_op;
374
375                 /* auth different than null */
376                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
377                         auth_xform.auth.digest_length =
378                                         options->auth_digest_sz;
379                         auth_xform.auth.add_auth_data_length =
380                                         options->auth_aad_sz;
381                         auth_xform.auth.key.length =
382                                         test_vector->auth_key.length;
383                         auth_xform.auth.key.data = test_vector->auth_key.data;
384                 } else {
385                         auth_xform.auth.digest_length = 0;
386                         auth_xform.auth.add_auth_data_length = 0;
387                         auth_xform.auth.key.length = 0;
388                         auth_xform.auth.key.data = NULL;
389                 }
390                 /* create crypto session */
391                 sess =  rte_cryptodev_sym_session_create(dev_id, &auth_xform);
392         /*
393          * cipher and auth
394          */
395         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
396                         || options->op_type == CPERF_AUTH_THEN_CIPHER
397                         || options->op_type == CPERF_AEAD) {
398
399                 /*
400                  * cipher
401                  */
402                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
403                 cipher_xform.next = NULL;
404                 cipher_xform.cipher.algo = options->cipher_algo;
405                 cipher_xform.cipher.op = options->cipher_op;
406
407                 /* cipher different than null */
408                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
409                         cipher_xform.cipher.key.data =
410                                         test_vector->cipher_key.data;
411                         cipher_xform.cipher.key.length =
412                                         test_vector->cipher_key.length;
413                 } else {
414                         cipher_xform.cipher.key.data = NULL;
415                         cipher_xform.cipher.key.length = 0;
416                 }
417
418                 /*
419                  * auth
420                  */
421                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
422                 auth_xform.next = NULL;
423                 auth_xform.auth.algo = options->auth_algo;
424                 auth_xform.auth.op = options->auth_op;
425
426                 /* auth different than null */
427                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
428                         auth_xform.auth.digest_length = options->auth_digest_sz;
429                         auth_xform.auth.add_auth_data_length =
430                                         options->auth_aad_sz;
431                         /* auth options for aes gcm */
432                         if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
433                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
434                                 auth_xform.auth.key.length = 0;
435                                 auth_xform.auth.key.data = NULL;
436                         } else { /* auth options for others */
437                                 auth_xform.auth.key.length =
438                                         test_vector->auth_key.length;
439                                 auth_xform.auth.key.data =
440                                                 test_vector->auth_key.data;
441                         }
442                 } else {
443                         auth_xform.auth.digest_length = 0;
444                         auth_xform.auth.add_auth_data_length = 0;
445                         auth_xform.auth.key.length = 0;
446                         auth_xform.auth.key.data = NULL;
447                 }
448
449                 /* create crypto session for aes gcm */
450                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
451                         if (options->cipher_op ==
452                                         RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
453                                 cipher_xform.next = &auth_xform;
454                                 /* create crypto session */
455                                 sess = rte_cryptodev_sym_session_create(dev_id,
456                                         &cipher_xform);
457                         } else { /* decrypt */
458                                 auth_xform.next = &cipher_xform;
459                                 /* create crypto session */
460                                 sess = rte_cryptodev_sym_session_create(dev_id,
461                                         &auth_xform);
462                         }
463                 } else { /* create crypto session for other */
464                         /* cipher then auth */
465                         if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
466                                 cipher_xform.next = &auth_xform;
467                                 /* create crypto session */
468                                 sess = rte_cryptodev_sym_session_create(dev_id,
469                                                 &cipher_xform);
470                         } else { /* auth then cipher */
471                                 auth_xform.next = &cipher_xform;
472                                 /* create crypto session */
473                                 sess = rte_cryptodev_sym_session_create(dev_id,
474                                                 &auth_xform);
475                         }
476                 }
477         }
478         return sess;
479 }
480
481 int
482 cperf_get_op_functions(const struct cperf_options *options,
483                 struct cperf_op_fns *op_fns)
484 {
485         memset(op_fns, 0, sizeof(struct cperf_op_fns));
486
487         op_fns->sess_create = cperf_create_session;
488
489         if (options->op_type == CPERF_AEAD
490                         || options->op_type == CPERF_AUTH_THEN_CIPHER
491                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
492                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
493                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
494                         op_fns->populate_ops = cperf_set_ops_aead;
495                 else
496                         op_fns->populate_ops = cperf_set_ops_cipher_auth;
497                 return 0;
498         }
499         if (options->op_type == CPERF_AUTH_ONLY) {
500                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
501                         op_fns->populate_ops = cperf_set_ops_null_auth;
502                 else
503                         op_fns->populate_ops = cperf_set_ops_auth;
504                 return 0;
505         }
506         if (options->op_type == CPERF_CIPHER_ONLY) {
507                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
508                         op_fns->populate_ops = cperf_set_ops_null_cipher;
509                 else
510                         op_fns->populate_ops = cperf_set_ops_cipher;
511                 return 0;
512         }
513
514         return -1;
515 }