2 * Copyright (c) 2019 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
19 #include <vlib/vlib.h>
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26 _(DES_CBC, "des-cbc", 7) \
27 _(3DES_CBC, "3des-cbc", 24) \
28 _(AES_128_CBC, "aes-128-cbc", 16) \
29 _(AES_192_CBC, "aes-192-cbc", 24) \
30 _(AES_256_CBC, "aes-256-cbc", 32) \
31 _(AES_128_CTR, "aes-128-ctr", 16) \
32 _(AES_192_CTR, "aes-192-ctr", 24) \
33 _(AES_256_CTR, "aes-256-ctr", 32)
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg \
37 _(AES_128_GCM, "aes-128-gcm", 16) \
38 _(AES_192_GCM, "aes-192-gcm", 24) \
39 _(AES_256_GCM, "aes-256-gcm", 32) \
40 _(CHACHA20_POLY1305, "chacha20-poly1305", 32)
42 #define foreach_crypto_hash_alg \
44 _ (SHA224, "sha-224") \
45 _ (SHA256, "sha-256") \
46 _ (SHA384, "sha-384") \
49 #define foreach_crypto_hmac_alg \
52 _(SHA224, "sha-224") \
53 _(SHA256, "sha-256") \
54 _(SHA384, "sha-384") \
57 #define foreach_crypto_op_type \
58 _ (ENCRYPT, "encrypt") \
59 _ (DECRYPT, "decrypt") \
60 _ (AEAD_ENCRYPT, "aead-encrypt") \
61 _ (AEAD_DECRYPT, "aead-decrypt") \
67 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
68 foreach_crypto_op_type
70 VNET_CRYPTO_OP_N_TYPES,
71 } vnet_crypto_op_type_t;
73 #define foreach_crypto_op_status \
75 _(PENDING, "pending") \
76 _(WORK_IN_PROGRESS, "work-in-progress") \
77 _(COMPLETED, "completed") \
78 _(FAIL_NO_HANDLER, "no-handler") \
79 _(FAIL_BAD_HMAC, "bad-hmac") \
80 _(FAIL_ENGINE_ERR, "engine-error")
84 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
85 #define foreach_crypto_aead_async_alg \
86 _ (AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
87 _ (AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
88 _ (AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
89 _ (AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
90 _ (AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
91 _ (AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \
92 _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \
93 _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12) \
94 _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0)
96 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
97 #define foreach_crypto_link_async_alg \
98 _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12) \
99 _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12) \
100 _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12) \
101 _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12) \
102 _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12) \
103 _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \
104 _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \
105 _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \
106 _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14) \
107 _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \
108 _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \
109 _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \
110 _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16) \
111 _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \
112 _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \
113 _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \
114 _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24) \
115 _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \
116 _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \
117 _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \
118 _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32) \
119 _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \
120 _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \
121 _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32) \
122 _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12) \
123 _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12) \
124 _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
126 #define foreach_crypto_async_op_type \
127 _(ENCRYPT, "async-encrypt") \
128 _(DECRYPT, "async-decrypt")
132 VNET_CRYPTO_KEY_OP_ADD,
133 VNET_CRYPTO_KEY_OP_DEL,
134 VNET_CRYPTO_KEY_OP_MODIFY,
135 } vnet_crypto_key_op_t;
139 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
140 foreach_crypto_op_status
142 VNET_CRYPTO_OP_N_STATUS,
143 } vnet_crypto_op_status_t;
148 VNET_CRYPTO_ALG_NONE = 0,
149 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
150 foreach_crypto_cipher_alg foreach_crypto_aead_alg
152 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
153 foreach_crypto_hmac_alg
155 #define _(n, s) VNET_CRYPTO_ALG_HASH_##n,
156 foreach_crypto_hash_alg
163 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
164 foreach_crypto_async_op_type
166 VNET_CRYPTO_ASYNC_OP_N_TYPES,
167 } vnet_crypto_async_op_type_t;
171 VNET_CRYPTO_ASYNC_ALG_NONE = 0,
172 #define _(n, s, k, t, a) \
173 VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
174 foreach_crypto_aead_async_alg
176 #define _(c, h, s, k ,d) \
177 VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
178 foreach_crypto_link_async_alg
180 VNET_CRYPTO_N_ASYNC_ALGS,
181 } vnet_crypto_async_alg_t;
185 VNET_CRYPTO_ASYNC_OP_NONE = 0,
186 #define _(n, s, k, t, a) \
187 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
188 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
189 foreach_crypto_aead_async_alg
191 #define _(c, h, s, k ,d) \
192 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
193 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
194 foreach_crypto_link_async_alg
196 VNET_CRYPTO_ASYNC_OP_N_IDS,
197 } vnet_crypto_async_op_id_t;
206 vnet_crypto_alg_t alg:8;
212 vnet_crypto_async_alg_t async_alg:8;
215 #define VNET_CRYPTO_KEY_TYPE_DATA 0
216 #define VNET_CRYPTO_KEY_TYPE_LINK 1
222 VNET_CRYPTO_OP_NONE = 0,
223 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
224 foreach_crypto_cipher_alg foreach_crypto_aead_alg
226 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
227 foreach_crypto_hmac_alg
229 #define _(n, s) VNET_CRYPTO_OP_##n##_HASH,
230 foreach_crypto_hash_alg
232 VNET_CRYPTO_N_OP_IDS,
233 } vnet_crypto_op_id_t;
241 } crypto_op_class_type_t;
246 vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
247 } vnet_crypto_alg_data_t;
254 } vnet_crypto_op_chunk_t;
258 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
260 vnet_crypto_op_id_t op:16;
261 vnet_crypto_op_status_t status:8;
263 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 0)
264 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 1)
281 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
288 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
303 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
307 vnet_crypto_op_type_t type;
308 vnet_crypto_alg_t alg;
309 u32 active_engine_index_simple;
310 u32 active_engine_index_chained;
311 } vnet_crypto_op_data_t;
315 vnet_crypto_async_op_type_t type;
316 vnet_crypto_async_alg_t alg;
317 u32 active_engine_index_async;
318 } vnet_crypto_async_op_data_t;
323 vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
324 } vnet_crypto_async_alg_data_t;
336 u32 crypto_total_length;
337 i16 crypto_start_offset; /* first buffer offset */
338 i16 integ_start_offset;
339 /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
340 i16 integ_length_adj;
341 vnet_crypto_op_status_t status : 8;
342 u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
343 } vnet_crypto_async_frame_elt_t;
345 /* Assert the size so the compiler will warn us when it changes */
346 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
348 typedef enum vnet_crypto_async_frame_state_t_
350 VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
351 /* frame waiting to be processed */
352 VNET_CRYPTO_FRAME_STATE_PENDING,
353 VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
354 VNET_CRYPTO_FRAME_STATE_SUCCESS,
355 VNET_CRYPTO_FRAME_STATE_ELT_ERROR
356 } __clib_packed vnet_crypto_async_frame_state_t;
360 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
361 vnet_crypto_async_frame_state_t state;
362 vnet_crypto_async_op_id_t op:8;
364 vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
365 u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
366 u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
367 u32 enqueue_thread_index;
368 } vnet_crypto_async_frame_t;
372 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
373 vnet_crypto_async_frame_t *frame_pool;
376 } vnet_crypto_thread_t;
378 typedef u32 vnet_crypto_key_index_t;
380 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
381 vnet_crypto_op_t * ops[],
382 vnet_crypto_op_chunk_t *
385 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
386 vnet_crypto_op_t * ops[], u32 n_ops);
388 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
389 vnet_crypto_key_op_t kop,
390 vnet_crypto_key_index_t idx);
392 /** async crypto function handlers **/
394 (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
395 vnet_crypto_async_frame_t * frame);
396 typedef vnet_crypto_async_frame_t *
397 (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
398 u32 * enqueue_thread_idx);
401 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
404 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
405 vnet_crypto_op_id_t opt,
406 vnet_crypto_ops_handler_t * oph);
408 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
410 vnet_crypto_op_id_t opt,
411 vnet_crypto_chained_ops_handler_t
414 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
415 vnet_crypto_op_id_t opt,
416 vnet_crypto_ops_handler_t * fn,
417 vnet_crypto_chained_ops_handler_t *
420 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
421 vnet_crypto_key_handler_t * keyh);
423 /** async crypto register functions */
424 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
427 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
428 vnet_crypto_async_op_id_t opt,
429 vnet_crypto_frame_enqueue_t *enq_fn);
432 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
433 vnet_crypto_frame_dequeue_t *deq_fn);
440 vnet_crypto_key_handler_t *key_op_handler;
441 vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
442 vnet_crypto_chained_ops_handler_t
443 * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
444 vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
445 vnet_crypto_frame_dequeue_t *dequeue_handler;
446 } vnet_crypto_engine_t;
452 } vnet_crypto_async_next_node_t;
456 vnet_crypto_alg_data_t *algs;
457 vnet_crypto_thread_t *threads;
458 vnet_crypto_ops_handler_t **ops_handlers;
459 vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
460 vnet_crypto_frame_enqueue_t **enqueue_handlers;
461 vnet_crypto_frame_dequeue_t **dequeue_handlers;
462 vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
463 vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
464 vnet_crypto_engine_t *engines;
465 vnet_crypto_key_t *keys;
466 uword *engine_index_by_name;
467 uword *alg_index_by_name;
468 uword *async_alg_index_by_name;
469 vnet_crypto_async_alg_data_t *async_algs;
470 vnet_crypto_async_next_node_t *next_nodes;
471 u32 crypto_node_index;
472 } vnet_crypto_main_t;
474 extern vnet_crypto_main_t crypto_main;
476 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
477 vnet_crypto_op_chunk_t * chunks,
479 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
483 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
484 crypto_op_class_type_t oct);
485 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
487 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
488 u8 * data, u16 length);
489 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
492 * Use 2 created keys to generate new key for linked algs (cipher + integ)
493 * The returned key index is to be used for linked alg only.
495 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
496 vnet_crypto_key_index_t index_crypto,
497 vnet_crypto_key_index_t index_integ);
499 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
501 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
503 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
504 vnet_crypto_alg_t integ_alg);
506 format_function_t format_vnet_crypto_alg;
507 format_function_t format_vnet_crypto_engine;
508 format_function_t format_vnet_crypto_op;
509 format_function_t format_vnet_crypto_op_type;
510 format_function_t format_vnet_crypto_op_status;
511 unformat_function_t unformat_vnet_crypto_alg;
513 format_function_t format_vnet_crypto_async_op;
514 format_function_t format_vnet_crypto_async_alg;
515 format_function_t format_vnet_crypto_async_op_type;
517 static_always_inline void
518 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
521 clib_memset (op, 0xfe, sizeof (*op));
528 static_always_inline vnet_crypto_op_type_t
529 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
531 vnet_crypto_main_t *cm = &crypto_main;
532 ASSERT (id < VNET_CRYPTO_N_OP_IDS);
533 vnet_crypto_op_data_t *od = cm->opt_data + id;
537 static_always_inline vnet_crypto_key_t *
538 vnet_crypto_get_key (vnet_crypto_key_index_t index)
540 vnet_crypto_main_t *cm = &crypto_main;
541 return vec_elt_at_index (cm->keys, index);
544 static_always_inline int
545 vnet_crypto_set_handler (char *alg_name, char *engine)
547 return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
550 /** async crypto inline functions **/
552 static_always_inline vnet_crypto_async_frame_t *
553 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
555 vnet_crypto_main_t *cm = &crypto_main;
556 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
557 vnet_crypto_async_frame_t *f = NULL;
559 pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
561 clib_memset (f, 0xfe, sizeof (*f));
562 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
569 static_always_inline void
570 vnet_crypto_async_free_frame (vlib_main_t * vm,
571 vnet_crypto_async_frame_t * frame)
573 vnet_crypto_main_t *cm = &crypto_main;
574 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
575 pool_put (ct->frame_pool, frame);
578 static_always_inline int
579 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
580 vnet_crypto_async_frame_t * frame)
582 vnet_crypto_main_t *cm = &crypto_main;
583 vlib_thread_main_t *tm = vlib_get_thread_main ();
587 frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
588 frame->enqueue_thread_index = vm->thread_index;
590 if (PREDICT_FALSE (cm->enqueue_handlers == NULL))
592 frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
596 int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
598 if (PREDICT_TRUE (ret == 0))
600 n = vlib_get_node (vm, cm->crypto_node_index);
601 if (n->state == VLIB_NODE_STATE_INTERRUPT)
603 for (i = 0; i < tm->n_vlib_mains; i++)
604 vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
605 cm->crypto_node_index);
610 frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
616 static_always_inline void
617 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
618 u32 key_index, u32 crypto_len,
619 i16 integ_len_adj, i16 crypto_start_offset,
620 i16 integ_start_offset, u32 buffer_index,
621 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
624 vnet_crypto_async_frame_elt_t *fe;
627 ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
630 fe = &f->elts[index];
632 fe->key_index = key_index;
633 fe->crypto_total_length = crypto_len;
634 fe->crypto_start_offset = crypto_start_offset;
635 fe->integ_start_offset = integ_start_offset;
636 fe->integ_length_adj = integ_len_adj;
641 f->buffer_indices[index] = buffer_index;
642 f->next_node_index[index] = next_node;
645 static_always_inline void
646 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
648 vnet_crypto_async_op_id_t opt;
650 ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
651 || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
654 clib_memset (f, 0xfe, sizeof (*f));
655 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
660 static_always_inline u8
661 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
663 return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
666 #endif /* included_vnet_crypto_crypto_h */
669 * fd.io coding-style-patch-verification: ON
672 * eval: (c-set-style "gnu")