diff options
author | Damjan Marion <damarion@cisco.com> | 2025-01-16 12:47:01 +0000 |
---|---|---|
committer | Andrew Yourtchenko <ayourtch@gmail.com> | 2025-01-16 14:53:36 +0000 |
commit | 4e003776cb9a7f7989ae4c49baec12d74ad16fa7 (patch) | |
tree | 07b2eb1625a37086d4ecc7a6c19d17b18f3b3996 | |
parent | 574c4574cd93e698af74d09992541af04150ead4 (diff) |
crypto: combine sync and async algos and ops
Type: improvement
Change-Id: I4d507b105e5b5ba7dd68d373c7f1ab156a9fc9f1
Signed-off-by: Damjan Marion <damarion@cisco.com>
-rw-r--r-- | src/crypto_engines/native/crypto_native.h | 4 | ||||
-rw-r--r-- | src/plugins/crypto_sw_scheduler/main.c | 2 | ||||
-rw-r--r-- | src/plugins/dpdk/cryptodev/cryptodev.c | 4 | ||||
-rw-r--r-- | src/plugins/unittest/crypto_test.c | 51 | ||||
-rw-r--r-- | src/vnet/CMakeLists.txt | 1 | ||||
-rw-r--r-- | src/vnet/crypto/cli.c | 319 | ||||
-rw-r--r-- | src/vnet/crypto/crypto.c | 410 | ||||
-rw-r--r-- | src/vnet/crypto/crypto.h | 289 | ||||
-rw-r--r-- | src/vnet/crypto/crypto_api.c | 29 | ||||
-rw-r--r-- | src/vnet/crypto/engine.h | 6 | ||||
-rw-r--r-- | src/vnet/crypto/format.c | 6 | ||||
-rw-r--r-- | src/vnet/crypto/main.c | 108 | ||||
-rw-r--r-- | src/vnet/crypto/node.c | 9 | ||||
-rw-r--r-- | src/vnet/ipsec/esp_decrypt.c | 4 | ||||
-rw-r--r-- | src/vnet/ipsec/esp_encrypt.c | 4 | ||||
-rw-r--r-- | src/vnet/ipsec/ipsec_sa.c | 4 |
16 files changed, 440 insertions, 810 deletions
diff --git a/src/crypto_engines/native/crypto_native.h b/src/crypto_engines/native/crypto_native.h index 0fcb6a99524..9e2a6b42e52 100644 --- a/src/crypto_engines/native/crypto_native.h +++ b/src/crypto_engines/native/crypto_native.h @@ -25,8 +25,8 @@ typedef struct crypto_native_op_handler { struct crypto_native_op_handler *next; vnet_crypto_op_id_t op_id; - vnet_crypto_ops_handler_t *fn; - vnet_crypto_chained_ops_handler_t *cfn; + vnet_crypto_simple_op_fn_t *fn; + vnet_crypto_chained_op_fn_t *cfn; crypto_native_variant_probe_t *probe; int priority; } crypto_native_op_handler_t; diff --git a/src/plugins/crypto_sw_scheduler/main.c b/src/plugins/crypto_sw_scheduler/main.c index 81f13912b6b..dc97ce937d9 100644 --- a/src/plugins/crypto_sw_scheduler/main.c +++ b/src/plugins/crypto_sw_scheduler/main.c @@ -401,7 +401,7 @@ crypto_sw_scheduler_process_link (vlib_main_t *vm, } static_always_inline int -convert_async_crypto_id (vnet_crypto_async_op_id_t async_op_id, u32 *crypto_op, +convert_async_crypto_id (vnet_crypto_op_id_t async_op_id, u32 *crypto_op, u32 *auth_op_or_aad_len, u16 *digest_len, u8 *is_enc) { switch (async_op_id) diff --git a/src/plugins/dpdk/cryptodev/cryptodev.c b/src/plugins/dpdk/cryptodev/cryptodev.c index 4f533406fca..c60f9c886ff 100644 --- a/src/plugins/dpdk/cryptodev/cryptodev.c +++ b/src/plugins/dpdk/cryptodev/cryptodev.c @@ -111,7 +111,7 @@ prepare_linked_xform (struct rte_crypto_sym_xform *xforms, xform_auth->type = RTE_CRYPTO_SYM_XFORM_AUTH; xforms->next = xforms + 1; - switch (key->async_alg) + switch (key->alg) { #define _(a, b, c, d, e) \ case VNET_CRYPTO_ALG_##a##_##d##_TAG##e: \ @@ -251,7 +251,7 @@ cryptodev_check_supported_vnet_alg (vnet_crypto_key_t *key) if (key->is_link) { - switch (key->async_alg) + switch (key->alg) { #define _(a, b, c, d, e) \ case VNET_CRYPTO_ALG_##a##_##d##_TAG##e: \ diff --git a/src/plugins/unittest/crypto_test.c b/src/plugins/unittest/crypto_test.c index eb3f9d05166..0254e1a29b8 100644 --- a/src/plugins/unittest/crypto_test.c +++ b/src/plugins/unittest/crypto_test.c @@ -220,9 +220,10 @@ restore_engines (u32 * engs) if (engs[i] != ~0) { - ce = vec_elt_at_index (cm->engines, engs[i]); - od->active_engine_index_simple = engs[i]; - cm->ops_handlers[i] = ce->ops_handlers[i]; + vnet_crypto_handler_type_t t = VNET_CRYPTO_HANDLER_TYPE_SIMPLE; + ce = vec_elt_at_index (cm->engines, engs[i]); + od->active_engine_index[t] = engs[i]; + cm->opt_data[i].handlers[t] = ce->ops[i].handlers[t]; } } @@ -247,12 +248,13 @@ save_current_engines (u32 * engs) for (i = 1; i < VNET_CRYPTO_N_OP_IDS; i++) { vnet_crypto_op_data_t *od = &cm->opt_data[i]; - if (od->active_engine_index_simple != ~0) + if (od->active_engine_index[VNET_CRYPTO_HANDLER_TYPE_SIMPLE]) { /* save engine index */ - engs[i] = od->active_engine_index_simple; - od->active_engine_index_simple = ce - cm->engines; - cm->ops_handlers[i] = ce->ops_handlers[i]; + vnet_crypto_handler_type_t t = VNET_CRYPTO_HANDLER_TYPE_SIMPLE; + engs[i] = od->active_engine_index[t]; + od->active_engine_index[t] = ce - cm->engines; + cm->opt_data[i].handlers[t] = ce->ops[i].handlers[t]; } } @@ -289,7 +291,7 @@ test_crypto_incremental (vlib_main_t * vm, crypto_test_main_t * tm, { r = rv[i]; int t; - ad = vec_elt_at_index (cm->algs, r->alg); + ad = cm->algs + r->alg; for (t = 0; t < VNET_CRYPTO_OP_N_TYPES; t++) { vnet_crypto_op_id_t id = ad->op_by_type[t]; @@ -345,7 +347,7 @@ test_crypto_incremental (vlib_main_t * vm, crypto_test_main_t * tm, { r = rv[i]; int t; - ad = vec_elt_at_index (cm->algs, r->alg); + ad = cm->algs + r->alg; for (t = 0; t < VNET_CRYPTO_OP_N_TYPES; t++) { vnet_crypto_op_id_t id = ad->op_by_type[t]; @@ -442,7 +444,7 @@ test_crypto_static (vlib_main_t * vm, crypto_test_main_t * tm, { r = rv[i]; int t; - ad = vec_elt_at_index (cm->algs, r->alg); + ad = cm->algs + r->alg; for (t = 0; t < VNET_CRYPTO_OP_N_TYPES; t++) { vnet_crypto_op_id_t id = ad->op_by_type[t]; @@ -648,29 +650,8 @@ test_crypto_static (vlib_main_t * vm, crypto_test_main_t * tm, static u32 test_crypto_get_key_sz (vnet_crypto_alg_t alg) { - switch (alg) - { -#define _(n, s, l) \ - case VNET_CRYPTO_ALG_##n: \ - return l; - foreach_crypto_cipher_alg - foreach_crypto_aead_alg -#undef _ - case VNET_CRYPTO_ALG_HMAC_MD5: - case VNET_CRYPTO_ALG_HMAC_SHA1: - return 20; - case VNET_CRYPTO_ALG_HMAC_SHA224: - return 28; - case VNET_CRYPTO_ALG_HMAC_SHA256: - return 32; - case VNET_CRYPTO_ALG_HMAC_SHA384: - return 48; - case VNET_CRYPTO_ALG_HMAC_SHA512: - return 64; - default: - return 0; - } - return 0; + vnet_crypto_main_t *cm = &crypto_main; + return cm->algs[alg].key_length; } static clib_error_t * @@ -702,7 +683,7 @@ test_crypto (vlib_main_t * vm, crypto_test_main_t * tm) else vec_add1 (static_tests, r); - ad = vec_elt_at_index (cm->algs, r->alg); + ad = cm->algs + r->alg; for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++) { @@ -836,7 +817,7 @@ test_crypto_perf (vlib_main_t * vm, crypto_test_main_t * tm) u32 n_buffers, n_alloc = 0, warmup_rounds, rounds; u32 *buffer_indices = 0; vnet_crypto_op_t *ops1 = 0, *ops2 = 0, *op1, *op2; - vnet_crypto_alg_data_t *ad = vec_elt_at_index (cm->algs, tm->alg); + vnet_crypto_alg_data_t *ad = cm->algs + tm->alg; vnet_crypto_key_index_t key_index = ~0; u8 key[64]; int buffer_size = vlib_buffer_get_default_data_size (vm); diff --git a/src/vnet/CMakeLists.txt b/src/vnet/CMakeLists.txt index 36cbb7e634b..b6227d45a2a 100644 --- a/src/vnet/CMakeLists.txt +++ b/src/vnet/CMakeLists.txt @@ -524,6 +524,7 @@ list(APPEND VNET_SOURCES crypto/cli.c crypto/crypto.c crypto/format.c + crypto/main.c crypto/node.c crypto/crypto_api.c ) diff --git a/src/vnet/crypto/cli.c b/src/vnet/crypto/cli.c index 2ca66f228c3..4cfa1bb1abc 100644 --- a/src/vnet/crypto/cli.c +++ b/src/vnet/crypto/cli.c @@ -1,19 +1,7 @@ -/* - * Copyright (c) 2019 Cisco and/or its affiliates. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at: - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. +/* SPDX-License-Identifier: Apache-2.0 + * Copyright(c) 2025 Cisco Systems, Inc. */ -#include <stdbool.h> #include <vlib/vlib.h> #include <vnet/crypto/crypto.h> @@ -38,7 +26,8 @@ show_crypto_engines_command_fn (vlib_main_t * vm, vlib_cli_output (vm, "%-20s%-8s%s", "Name", "Prio", "Description"); vec_foreach (p, cm->engines) { - vlib_cli_output (vm, "%-20s%-8u%s", p->name, p->priority, p->desc); + if (p->name) + vlib_cli_output (vm, "%-20s%-8u%s", p->name, p->priority, p->desc); } return 0; } @@ -50,100 +39,53 @@ VLIB_CLI_COMMAND (show_crypto_engines_command, static) = .function = show_crypto_engines_command_fn, }; -static u8 * -format_vnet_crypto_engine_candidates (u8 * s, va_list * args) -{ - vnet_crypto_engine_t *e; - vnet_crypto_main_t *cm = &crypto_main; - u32 id = va_arg (*args, u32); - u32 ei = va_arg (*args, u32); - int is_chained = va_arg (*args, int); - int is_async = va_arg (*args, int); - - if (is_async) - { - vec_foreach (e, cm->engines) - { - if (e->enqueue_handlers[id] && e->dequeue_handler) - { - s = format (s, "%U", format_vnet_crypto_engine, e - cm->engines); - if (ei == e - cm->engines) - s = format (s, "%c ", '*'); - else - s = format (s, " "); - } - } - - return s; - } - else - { - vec_foreach (e, cm->engines) - { - void * h = is_chained ? (void *) e->chained_ops_handlers[id] - : (void *) e->ops_handlers[id]; - - if (h) - { - s = format (s, "%U", format_vnet_crypto_engine, e - cm->engines); - if (ei == e - cm->engines) - s = format (s, "%c ", '*'); - else - s = format (s, " "); - } - } - return s; - } -} - -static u8 * -format_vnet_crypto_handlers (u8 * s, va_list * args) -{ - vnet_crypto_alg_t alg = va_arg (*args, vnet_crypto_alg_t); - vnet_crypto_main_t *cm = &crypto_main; - vnet_crypto_alg_data_t *d = vec_elt_at_index (cm->algs, alg); - u32 indent = format_get_indent (s); - int i, first = 1; - - for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++) - { - vnet_crypto_op_data_t *od; - vnet_crypto_op_id_t id = d->op_by_type[i]; - - if (id == 0) - continue; - - od = cm->opt_data + id; - if (first == 0) - s = format (s, "\n%U", format_white_space, indent); - s = format (s, "%-16U", format_vnet_crypto_op_type, od->type); - - s = format (s, "%-28U", format_vnet_crypto_engine_candidates, id, - od->active_engine_index_simple, 0, 0); - s = format (s, "%U", format_vnet_crypto_engine_candidates, id, - od->active_engine_index_chained, 1, 0); - first = 0; - } - return s; -} - - static clib_error_t * show_crypto_handlers_command_fn (vlib_main_t * vm, unformat_input_t * input, vlib_cli_command_t * cmd) { + vnet_crypto_main_t *cm = &crypto_main; unformat_input_t _line_input, *line_input = &_line_input; - int i; + u8 *s = 0; + char *handler_type_str[] = { +#define _(n, s) [VNET_CRYPTO_HANDLER_TYPE_##n] = s, + foreach_crypto_handler_type + }; if (unformat_user (input, unformat_line_input, line_input)) unformat_free (line_input); - vlib_cli_output (vm, "%-16s%-16s%-28s%s", "Algo", "Type", "Simple", - "Chained"); + FOREACH_ARRAY_ELT (a, cm->algs) + { + if (a == cm->algs) + continue; - for (i = 0; i < VNET_CRYPTO_N_ALGS; i++) - vlib_cli_output (vm, "%-20U%U", format_vnet_crypto_alg, i, - format_vnet_crypto_handlers, i); + vlib_cli_output (vm, "\n%s:", a->name); + for (u32 i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++) + if (a->op_by_type[i] != VNET_CRYPTO_OP_NONE) + { + vlib_cli_output (vm, " %U:", format_vnet_crypto_op_type, i); + vnet_crypto_op_id_t id = a->op_by_type[i]; + vnet_crypto_op_data_t *od = cm->opt_data + id; + vnet_crypto_engine_t *e; + + for (u32 i = 0; i < VNET_CRYPTO_HANDLER_N_TYPES; i++) + { + vec_foreach (e, cm->engines) + { + if (e->ops[id].handlers[i]) + { + s = format (s, " %s", e->name); + if (e->ops[id].handlers[i] == od->handlers[i]) + s = format (s, "*"); + } + } + + vlib_cli_output (vm, " %s:%v", handler_type_str[i], s); + vec_reset_length (s); + } + } + } + vec_free (s); return 0; } @@ -163,10 +105,10 @@ set_crypto_handler_command_fn (vlib_main_t * vm, unformat_input_t _line_input, *line_input = &_line_input; vnet_crypto_main_t *cm = &crypto_main; int rc = 0; - char **args = 0, *s, **arg, *engine = 0; + char **args = 0, *s, **arg; int all = 0; clib_error_t *error = 0; - crypto_op_class_type_t oct = CRYPTO_OP_BOTH; + vnet_crypto_set_handlers_args_t ha = {}; if (!unformat_user (input, unformat_line_input, line_input)) return 0; @@ -176,11 +118,13 @@ set_crypto_handler_command_fn (vlib_main_t * vm, if (unformat (line_input, "all")) all = 1; else if (unformat (line_input, "simple")) - oct = CRYPTO_OP_SIMPLE; + ha.set_simple = 1; else if (unformat (line_input, "chained")) - oct = CRYPTO_OP_CHAINED; + ha.set_chained = 1; else if (unformat (line_input, "both")) - oct = CRYPTO_OP_BOTH; + ha.set_simple = ha.set_chained = 1; + else if (unformat (line_input, "async")) + ha.set_async = 1; else if (unformat (line_input, "%s", &s)) vec_add1 (args, s); else @@ -196,7 +140,7 @@ set_crypto_handler_command_fn (vlib_main_t * vm, goto done; } - engine = vec_elt_at_index (args, vec_len (args) - 1)[0]; + ha.engine = vec_elt_at_index (args, vec_len (args) - 1)[0]; vec_del1 (args, vec_len (args) - 1); if (all) @@ -207,7 +151,8 @@ set_crypto_handler_command_fn (vlib_main_t * vm, hash_foreach_mem (key, value, cm->alg_index_by_name, ({ (void) value; - rc += vnet_crypto_set_handler2 (key, engine, oct); + ha.handler_name = key; + rc += vnet_crypto_set_handlers (&ha); })); if (rc) @@ -217,88 +162,29 @@ set_crypto_handler_command_fn (vlib_main_t * vm, { vec_foreach (arg, args) { - rc = vnet_crypto_set_handler2 (arg[0], engine, oct); - if (rc) - { - vlib_cli_output (vm, "failed to set engine %s for %s!", - engine, arg[0]); - } + ha.handler_name = arg[0]; + rc = vnet_crypto_set_handlers (&ha); + if (rc) + vlib_cli_output (vm, "failed to set engine %s for %s!", ha.engine, + arg[0]); } } done: - vec_free (engine); + vec_free (ha.engine); vec_foreach (arg, args) vec_free (arg[0]); vec_free (args); unformat_free (line_input); return error; } -VLIB_CLI_COMMAND (set_crypto_handler_command, static) = -{ +VLIB_CLI_COMMAND (set_crypto_handler_command, static) = { .path = "set crypto handler", .short_help = "set crypto handler cipher [cipher2 cipher3 ...] engine" - " [simple|chained]", + " [simple|chained|async]", .function = set_crypto_handler_command_fn, }; -static u8 * -format_vnet_crypto_async_handlers (u8 * s, va_list * args) -{ - vnet_crypto_async_alg_t alg = va_arg (*args, vnet_crypto_async_alg_t); - vnet_crypto_main_t *cm = &crypto_main; - vnet_crypto_async_alg_data_t *d = vec_elt_at_index (cm->async_algs, alg); - u32 indent = format_get_indent (s); - int i, first = 1; - - for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++) - { - vnet_crypto_async_op_data_t *od; - vnet_crypto_async_op_id_t id = d->op_by_type[i]; - - if (id == 0) - continue; - - od = cm->async_opt_data + id; - if (first == 0) - s = format (s, "\n%U", format_white_space, indent); - s = format (s, "%-16U", format_vnet_crypto_async_op_type, od->type); - - s = format (s, "%U", format_vnet_crypto_engine_candidates, id, - od->active_engine_index_async, 0, 1); - first = 0; - } - return s; -} - -static clib_error_t * -show_crypto_async_handlers_command_fn (vlib_main_t * vm, - unformat_input_t * input, - vlib_cli_command_t * cmd) -{ - unformat_input_t _line_input, *line_input = &_line_input; - int i; - - if (unformat_user (input, unformat_line_input, line_input)) - unformat_free (line_input); - - vlib_cli_output (vm, "%-28s%-16s%s", "Algo", "Type", "Handler"); - - for (i = 0; i < VNET_CRYPTO_N_ASYNC_ALGS; i++) - vlib_cli_output (vm, "%-28U%U", format_vnet_crypto_async_alg, i, - format_vnet_crypto_async_handlers, i); - - return 0; -} - -VLIB_CLI_COMMAND (show_crypto_async_handlers_command, static) = -{ - .path = "show crypto async handlers", - .short_help = "show crypto async handlers", - .function = show_crypto_async_handlers_command_fn, -}; - - static clib_error_t * show_crypto_async_status_command_fn (vlib_main_t * vm, unformat_input_t * input, @@ -334,85 +220,6 @@ VLIB_CLI_COMMAND (show_crypto_async_status_command, static) = }; static clib_error_t * -set_crypto_async_handler_command_fn (vlib_main_t * vm, - unformat_input_t * input, - vlib_cli_command_t * cmd) -{ - unformat_input_t _line_input, *line_input = &_line_input; - vnet_crypto_main_t *cm = &crypto_main; - int rc = 0; - char **args = 0, *s, **arg, *engine = 0; - int all = 0; - clib_error_t *error = 0; - - if (!unformat_user (input, unformat_line_input, line_input)) - return 0; - - while (unformat_check_input (line_input) != UNFORMAT_END_OF_INPUT) - { - if (unformat (line_input, "all")) - all = 1; - else if (unformat (line_input, "%s", &s)) - vec_add1 (args, s); - else - { - error = clib_error_return (0, "invalid params"); - goto done; - } - } - - if ((vec_len (args) < 2 && !all) || (vec_len (args) == 0 && all)) - { - error = clib_error_return (0, "missing cipher or engine!"); - goto done; - } - - engine = vec_elt_at_index (args, vec_len (args) - 1)[0]; - vec_del1 (args, vec_len (args) - 1); - - if (all) - { - char *key; - u8 *value; - - hash_foreach_mem (key, value, cm->async_alg_index_by_name, - ({ - (void) value; - rc += vnet_crypto_set_async_handler2 (key, engine); - })); - - if (rc) - vlib_cli_output (vm, "failed to set crypto engine!"); - } - else - { - vec_foreach (arg, args) - { - rc = vnet_crypto_set_async_handler2 (arg[0], engine); - if (rc) - { - vlib_cli_output (vm, "failed to set engine %s for %s!", - engine, arg[0]); - } - } - } - -done: - vec_free (engine); - vec_foreach (arg, args) vec_free (arg[0]); - vec_free (args); - unformat_free (line_input); - return error; -} - -VLIB_CLI_COMMAND (set_crypto_async_handler_command, static) = -{ - .path = "set crypto async handler", - .short_help = "set crypto async handler type [type2 type3 ...] engine", - .function = set_crypto_async_handler_command_fn, -}; - -static clib_error_t * set_crypto_async_dispatch_command_fn (vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd) { @@ -450,11 +257,3 @@ VLIB_CLI_COMMAND (set_crypto_async_dispatch_mode_command, static) = { .short_help = "set crypto async dispatch mode <polling|interrupt|adaptive>", .function = set_crypto_async_dispatch_command_fn, }; - -/* - * fd.io coding-style-patch-verification: ON - * - * Local Variables: - * eval: (c-set-style "gnu") - * End: - */ diff --git a/src/vnet/crypto/crypto.c b/src/vnet/crypto/crypto.c index 396b8d4ca6a..35e7768375d 100644 --- a/src/vnet/crypto/crypto.c +++ b/src/vnet/crypto/crypto.c @@ -1,16 +1,5 @@ -/* - * Copyright (c) 2018 Cisco and/or its affiliates. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at: - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. +/* SPDX-License-Identifier: Apache-2.0 + * Copyright(c) 2025 Cisco Systems, Inc. */ #include <stdbool.h> @@ -22,8 +11,6 @@ #include <dlfcn.h> #include <dirent.h> -vnet_crypto_main_t crypto_main; - VLIB_REGISTER_LOG_CLASS (crypto_main_log, static) = { .class_name = "crypto", .subclass_name = "main", @@ -52,26 +39,31 @@ vnet_crypto_process_ops_call_handler (vlib_main_t * vm, vnet_crypto_op_chunk_t * chunks, u32 n_ops) { + vnet_crypto_op_data_t *od = cm->opt_data + opt; u32 rv = 0; if (n_ops == 0) return 0; if (chunks) { + vnet_crypto_chained_op_fn_t *fn = + od->handlers[VNET_CRYPTO_HANDLER_TYPE_CHAINED]; - if (cm->chained_ops_handlers[opt] == 0) + if (fn == 0) crypto_set_op_status (ops, n_ops, VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER); else - rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops); + rv = fn (vm, ops, chunks, n_ops); } else { - if (cm->ops_handlers[opt] == 0) + vnet_crypto_simple_op_fn_t *fn = + od->handlers[VNET_CRYPTO_HANDLER_TYPE_SIMPLE]; + if (fn == 0) crypto_set_op_status (ops, n_ops, VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER); else - rv = (cm->ops_handlers[opt]) (vm, ops, n_ops); + rv = fn (vm, ops, n_ops); } return rv; } @@ -141,48 +133,34 @@ vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio, } static_always_inline void -crypto_set_active_engine (vnet_crypto_op_data_t * od, - vnet_crypto_op_id_t id, u32 ei, - crypto_op_class_type_t oct) +crypto_set_active_engine (vnet_crypto_op_data_t *od, vnet_crypto_op_id_t id, + u32 ei, vnet_crypto_handler_type_t t) { vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei); - if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED) + if (ce->ops[id].handlers[t]) { - if (ce->chained_ops_handlers[id]) - { - od->active_engine_index_chained = ei; - cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id]; - } - } - - if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE) - { - if (ce->ops_handlers[id]) - { - od->active_engine_index_simple = ei; - cm->ops_handlers[id] = ce->ops_handlers[id]; - } + od->active_engine_index[t] = ei; + cm->opt_data[id].handlers[t] = ce->ops[id].handlers[t]; } } int -vnet_crypto_set_handler2 (char *alg_name, char *engine, - crypto_op_class_type_t oct) +vnet_crypto_set_handlers (vnet_crypto_set_handlers_args_t *a) { uword *p; vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_alg_data_t *ad; int i; - p = hash_get_mem (cm->alg_index_by_name, alg_name); + p = hash_get_mem (cm->alg_index_by_name, a->handler_name); if (!p) return -1; - ad = vec_elt_at_index (cm->algs, p[0]); + ad = cm->algs + p[0]; - p = hash_get_mem (cm->engine_index_by_name, engine); + p = hash_get_mem (cm->engine_index_by_name, a->engine); if (!p) return -1; @@ -194,7 +172,15 @@ vnet_crypto_set_handler2 (char *alg_name, char *engine, continue; od = cm->opt_data + id; - crypto_set_active_engine (od, id, p[0], oct); + if (a->set_async) + crypto_set_active_engine (od, id, p[0], + VNET_CRYPTO_HANDLER_TYPE_ASYNC); + if (a->set_simple) + crypto_set_active_engine (od, id, p[0], + VNET_CRYPTO_HANDLER_TYPE_SIMPLE); + if (a->set_chained) + crypto_set_active_engine (od, id, p[0], + VNET_CRYPTO_HANDLER_TYPE_CHAINED); } return 0; @@ -207,117 +193,109 @@ vnet_crypto_is_set_handler (vnet_crypto_alg_t alg) vnet_crypto_op_id_t opt = 0; int i; - if (alg >= vec_len (cm->algs)) + if (alg >= ARRAY_LEN (cm->algs)) return 0; for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++) if ((opt = cm->algs[alg].op_by_type[i]) != 0) break; - if (opt >= vec_len (cm->ops_handlers)) - return 0; - - return NULL != cm->ops_handlers[opt]; + return NULL != cm->opt_data[opt].handlers[VNET_CRYPTO_HANDLER_TYPE_SIMPLE]; } void -vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index, +vnet_crypto_register_ops_handler_inline (vlib_main_t *vm, u32 engine_index, vnet_crypto_op_id_t opt, - vnet_crypto_ops_handler_t * fn, - vnet_crypto_chained_ops_handler_t * - cfn) + vnet_crypto_simple_op_fn_t *fn, + vnet_crypto_chained_op_fn_t *cfn) { vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index); vnet_crypto_op_data_t *otd = cm->opt_data + opt; - vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1, - CLIB_CACHE_LINE_BYTES); - vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1, - CLIB_CACHE_LINE_BYTES); if (fn) { - e->ops_handlers[opt] = fn; - if (otd->active_engine_index_simple == ~0) + vnet_crypto_handler_type_t t = VNET_CRYPTO_HANDLER_TYPE_SIMPLE; + e->ops[opt].handlers[t] = fn; + if (!otd->active_engine_index[t]) { - otd->active_engine_index_simple = engine_index; - cm->ops_handlers[opt] = fn; + otd->active_engine_index[t] = engine_index; + cm->opt_data[opt].handlers[t] = fn; } - ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple); + ae = vec_elt_at_index (cm->engines, otd->active_engine_index[t]); if (ae->priority < e->priority) - crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE); + crypto_set_active_engine (otd, opt, engine_index, t); } if (cfn) { - e->chained_ops_handlers[opt] = cfn; - if (otd->active_engine_index_chained == ~0) + vnet_crypto_handler_type_t t = VNET_CRYPTO_HANDLER_TYPE_CHAINED; + e->ops[opt].handlers[t] = cfn; + if (otd->active_engine_index[t]) { - otd->active_engine_index_chained = engine_index; - cm->chained_ops_handlers[opt] = cfn; + otd->active_engine_index[t] = engine_index; + cm->opt_data[opt].handlers[t] = cfn; } - ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained); + ae = vec_elt_at_index (cm->engines, otd->active_engine_index[t]); if (ae->priority < e->priority) - crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED); + crypto_set_active_engine (otd, opt, engine_index, t); } return; } void -vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index, +vnet_crypto_register_ops_handler (vlib_main_t *vm, u32 engine_index, vnet_crypto_op_id_t opt, - vnet_crypto_ops_handler_t * fn) + vnet_crypto_simple_op_fn_t *fn) { vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0); } void -vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index, +vnet_crypto_register_chained_ops_handler (vlib_main_t *vm, u32 engine_index, vnet_crypto_op_id_t opt, - vnet_crypto_chained_ops_handler_t * - fn) + vnet_crypto_chained_op_fn_t *fn) { vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn); } void -vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index, +vnet_crypto_register_ops_handlers (vlib_main_t *vm, u32 engine_index, vnet_crypto_op_id_t opt, - vnet_crypto_ops_handler_t * fn, - vnet_crypto_chained_ops_handler_t * cfn) + vnet_crypto_simple_op_fn_t *fn, + vnet_crypto_chained_op_fn_t *cfn) { vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn); } void vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index, - vnet_crypto_async_op_id_t opt, - vnet_crypto_frame_enqueue_t *enqueue_hdl) + vnet_crypto_op_id_t opt, + vnet_crypto_frame_enq_fn_t *enqueue_hdl) { vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index); - vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt; - vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS, - CLIB_CACHE_LINE_BYTES); + vnet_crypto_op_data_t *otd = cm->opt_data + opt; + vnet_crypto_handler_type_t t = VNET_CRYPTO_HANDLER_TYPE_ASYNC; if (!enqueue_hdl) return; - e->enqueue_handlers[opt] = enqueue_hdl; - if (otd->active_engine_index_async == ~0) + e->ops[opt].handlers[t] = enqueue_hdl; + if (!otd->active_engine_index[t]) { - otd->active_engine_index_async = engine_index; - cm->enqueue_handlers[opt] = enqueue_hdl; + otd->active_engine_index[t] = engine_index; + otd->handlers[t] = enqueue_hdl; } - ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async); + ae = vec_elt_at_index (cm->engines, otd->active_engine_index[t]); if (ae->priority <= e->priority) { - otd->active_engine_index_async = engine_index; - cm->enqueue_handlers[opt] = enqueue_hdl; + otd->active_engine_index[t] = engine_index; + otd->handlers[t] = enqueue_hdl; } return; @@ -340,21 +318,23 @@ static void vnet_crypto_update_cm_dequeue_handlers (void) { vnet_crypto_main_t *cm = &crypto_main; - vnet_crypto_async_op_data_t *otd; + vnet_crypto_op_data_t *otd; vnet_crypto_engine_t *e; u32 *active_engines = 0, *ei, last_ei = ~0, i; vec_reset_length (cm->dequeue_handlers); - for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_IDS; i++) + for (i = 0; i < VNET_CRYPTO_N_OP_IDS; i++) { - otd = cm->async_opt_data + i; - if (otd->active_engine_index_async == ~0) + otd = cm->opt_data + i; + if (!otd->active_engine_index[VNET_CRYPTO_HANDLER_TYPE_ASYNC]) continue; - e = cm->engines + otd->active_engine_index_async; + e = + cm->engines + otd->active_engine_index[VNET_CRYPTO_HANDLER_TYPE_ASYNC]; if (!e->dequeue_handler) continue; - vec_add1 (active_engines, otd->active_engine_index_async); + vec_add1 (active_engines, + otd->active_engine_index[VNET_CRYPTO_HANDLER_TYPE_ASYNC]); } vec_sort_with_function (active_engines, engine_index_cmp); @@ -392,8 +372,8 @@ vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index, } void -vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index, - vnet_crypto_key_handler_t * key_handler) +vnet_crypto_register_key_handler (vlib_main_t *vm, u32 engine_index, + vnet_crypto_key_fn_t *key_handler) { vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index); @@ -401,40 +381,6 @@ vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index, return; } -static int -vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length) -{ - switch (alg) - { - case VNET_CRYPTO_N_ALGS: - return 0; - case VNET_CRYPTO_ALG_NONE: - return 1; - -#define _(n, s, l) \ - case VNET_CRYPTO_ALG_##n: \ - if ((l) == length) \ - return 1; \ - break; - foreach_crypto_cipher_alg foreach_crypto_aead_alg -#undef _ - /* HMAC allows any key length */ -#define _(n, s) \ - case VNET_CRYPTO_ALG_HMAC_##n: \ - return 1; - foreach_crypto_hmac_alg -#undef _ - -#define _(n, s) \ - case VNET_CRYPTO_ALG_HASH_##n: \ - return 1; - foreach_crypto_hash_alg -#undef _ - } - - return 0; -} - u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data, u16 length) @@ -443,13 +389,24 @@ vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data, vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_engine_t *engine; vnet_crypto_key_t *key, **kp; + vnet_crypto_alg_data_t *ad = cm->algs + alg; u32 alloc_sz = sizeof (vnet_crypto_key_t) + round_pow2 (length, 16); - u8 need_barrier_sync = 0; - if (!vnet_crypto_key_len_check (alg, length)) + ASSERT (alg != 0); + + if (length == 0) return ~0; + if (ad->variable_key_length == 0) + { + if (ad->key_length == 0) + return ~0; + + if (ad->key_length != length) + return ~0; + } + need_barrier_sync = pool_get_will_expand (cm->keys); /* If the cm->keys will expand, stop the parade. */ if (need_barrier_sync) @@ -503,7 +460,7 @@ vnet_crypto_key_update (vlib_main_t *vm, vnet_crypto_key_index_t index) engine->key_op_handler (VNET_CRYPTO_KEY_OP_MODIFY, index); } -vnet_crypto_async_alg_t +vnet_crypto_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg, vnet_crypto_alg_t integ_alg) { @@ -525,7 +482,7 @@ vnet_crypto_key_add_linked (vlib_main_t * vm, vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_engine_t *engine; vnet_crypto_key_t *key_crypto, *key_integ, *key, **kp; - vnet_crypto_async_alg_t linked_alg; + vnet_crypto_alg_t linked_alg; key_crypto = cm->keys[index_crypto]; key_integ = cm->keys[index_integ]; @@ -553,7 +510,7 @@ vnet_crypto_key_add_linked (vlib_main_t * vm, .is_link = 1, .index_crypto = index_crypto, .index_integ = index_integ, - .async_alg = linked_alg, + .alg = linked_alg, }; vec_foreach (engine, cm->engines) @@ -563,54 +520,6 @@ vnet_crypto_key_add_linked (vlib_main_t * vm, return index; } -static_always_inline void -crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od, - vnet_crypto_async_op_id_t id, u32 ei) -{ - vnet_crypto_main_t *cm = &crypto_main; - vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei); - - if (ce->enqueue_handlers[id] && ce->dequeue_handler) - { - od->active_engine_index_async = ei; - cm->enqueue_handlers[id] = ce->enqueue_handlers[id]; - } -} - -int -vnet_crypto_set_async_handler2 (char *alg_name, char *engine) -{ - uword *p; - vnet_crypto_main_t *cm = &crypto_main; - vnet_crypto_async_alg_data_t *ad; - int i; - - p = hash_get_mem (cm->async_alg_index_by_name, alg_name); - if (!p) - return -1; - - ad = vec_elt_at_index (cm->async_algs, p[0]); - - p = hash_get_mem (cm->engine_index_by_name, engine); - if (!p) - return -1; - - for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++) - { - vnet_crypto_async_op_data_t *od; - vnet_crypto_async_op_id_t id = ad->op_by_type[i]; - if (id == 0) - continue; - - od = cm->async_opt_data + id; - crypto_set_active_async_engine (od, id, p[0]); - } - - vnet_crypto_update_cm_dequeue_handlers (); - - return 0; -} - u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name) { @@ -656,84 +565,6 @@ vnet_crypto_set_async_dispatch (u8 mode, u8 adaptive) } } -int -vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op) -{ - vnet_crypto_main_t *cm = &crypto_main; - - return (op < vec_len (cm->enqueue_handlers) && - NULL != cm->enqueue_handlers[op]); -} - -static void -vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid, - vnet_crypto_op_id_t did, char *name, u8 is_aead) -{ - vnet_crypto_main_t *cm = &crypto_main; - - cm->algs[alg].name = name; - cm->algs[alg].is_aead = is_aead; - cm->opt_data[eid].alg = cm->opt_data[did].alg = alg; - cm->opt_data[eid].active_engine_index_simple = ~0; - cm->opt_data[did].active_engine_index_simple = ~0; - cm->opt_data[eid].active_engine_index_chained = ~0; - cm->opt_data[did].active_engine_index_chained = ~0; - cm->opt_data[eid].type = VNET_CRYPTO_OP_TYPE_ENCRYPT; - cm->opt_data[did].type = VNET_CRYPTO_OP_TYPE_DECRYPT; - cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_ENCRYPT] = eid; - cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_DECRYPT] = did; - hash_set_mem (cm->alg_index_by_name, name, alg); -} - -static void -vnet_crypto_init_hash_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id, - char *name) -{ - vnet_crypto_main_t *cm = &crypto_main; - cm->algs[alg].name = name; - cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = id; - cm->opt_data[id].alg = alg; - cm->opt_data[id].active_engine_index_simple = ~0; - cm->opt_data[id].active_engine_index_chained = ~0; - cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HASH; - hash_set_mem (cm->alg_index_by_name, name, alg); -} - -static void -vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg, - vnet_crypto_op_id_t id, char *name) -{ - vnet_crypto_main_t *cm = &crypto_main; - cm->algs[alg].name = name; - cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id; - cm->opt_data[id].alg = alg; - cm->opt_data[id].active_engine_index_simple = ~0; - cm->opt_data[id].active_engine_index_chained = ~0; - cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC; - hash_set_mem (cm->alg_index_by_name, name, alg); -} - -static void -vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg, - vnet_crypto_async_op_id_t eid, - vnet_crypto_async_op_id_t did, char *name) -{ - vnet_crypto_main_t *cm = &crypto_main; - - cm->async_algs[alg].name = name; - cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid; - cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did; - cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT; - cm->async_opt_data[eid].alg = alg; - cm->async_opt_data[eid].active_engine_index_async = ~0; - cm->async_opt_data[eid].active_engine_index_async = ~0; - cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT; - cm->async_opt_data[did].alg = alg; - cm->async_opt_data[did].active_engine_index_async = ~0; - cm->async_opt_data[did].active_engine_index_async = ~0; - hash_set_mem (cm->async_alg_index_by_name, name, alg); -} - static void vnet_crypto_load_engines (vlib_main_t *vm) { @@ -851,54 +682,21 @@ vnet_crypto_init (vlib_main_t * vm) vnet_crypto_main_t *cm = &crypto_main; vlib_thread_main_t *tm = vlib_get_thread_main (); vnet_crypto_thread_t *ct = 0; + vnet_crypto_engine_t *p; + vec_add2 (cm->engines, p, 1); cm->engine_index_by_name = hash_create_string ( /* size */ 0, sizeof (uword)); cm->alg_index_by_name = hash_create_string (0, sizeof (uword)); - cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword)); vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES); vec_foreach (ct, cm->threads) pool_init_fixed (ct->frame_pool, VNET_CRYPTO_FRAME_POOL_SIZE); - vec_validate (cm->algs, VNET_CRYPTO_N_ALGS); - vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS); - -#define _(n, s, l) \ - vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \ - VNET_CRYPTO_OP_##n##_ENC, \ - VNET_CRYPTO_OP_##n##_DEC, s, 0); - foreach_crypto_cipher_alg; -#undef _ -#define _(n, s, l) \ - vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \ - VNET_CRYPTO_OP_##n##_ENC, \ - VNET_CRYPTO_OP_##n##_DEC, s, 1); - foreach_crypto_aead_alg; -#undef _ -#define _(n, s) \ - vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \ - VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s); - foreach_crypto_hmac_alg; -#undef _ -#define _(n, s) \ - vnet_crypto_init_hash_data (VNET_CRYPTO_ALG_HASH_##n, \ - VNET_CRYPTO_OP_##n##_HASH, s); - foreach_crypto_hash_alg; -#undef _ -#define _(n, s, k, t, a) \ - vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \ - VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ - VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \ - s); - foreach_crypto_aead_async_alg -#undef _ -#define _(c, h, s, k ,d) \ - vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \ - VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ - VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \ - s); - foreach_crypto_link_async_alg -#undef _ - cm->crypto_node_index = + + FOREACH_ARRAY_ELT (e, cm->algs) + if (e->name) + hash_set_mem (cm->alg_index_by_name, e->name, e - cm->algs); + + cm->crypto_node_index = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index; vnet_crypto_load_engines (vm); @@ -907,11 +705,3 @@ vnet_crypto_init (vlib_main_t * vm) } VLIB_INIT_FUNCTION (vnet_crypto_init); - -/* - * fd.io coding-style-patch-verification: ON - * - * Local Variables: - * eval: (c-set-style "gnu") - * End: - */ diff --git a/src/vnet/crypto/crypto.h b/src/vnet/crypto/crypto.h index daaff8e0333..ae959251603 100644 --- a/src/vnet/crypto/crypto.h +++ b/src/vnet/crypto/crypto.h @@ -21,42 +21,35 @@ #define VNET_CRYPTO_FRAME_SIZE 64 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024 -/* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */ -#define foreach_crypto_cipher_alg \ - _(DES_CBC, "des-cbc", 7) \ - _(3DES_CBC, "3des-cbc", 24) \ - _(AES_128_CBC, "aes-128-cbc", 16) \ - _(AES_192_CBC, "aes-192-cbc", 24) \ - _(AES_256_CBC, "aes-256-cbc", 32) \ - _(AES_128_CTR, "aes-128-ctr", 16) \ - _(AES_192_CTR, "aes-192-ctr", 24) \ - _(AES_256_CTR, "aes-256-ctr", 32) - -/* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */ +/* CRYPTO_ID, PRETTY_NAME, ARGS*/ +#define foreach_crypto_cipher_alg \ + _ (DES_CBC, "des-cbc", .key_length = 7) \ + _ (3DES_CBC, "3des-cbc", .key_length = 24) \ + _ (AES_128_CBC, "aes-128-cbc", .key_length = 16) \ + _ (AES_192_CBC, "aes-192-cbc", .key_length = 24) \ + _ (AES_256_CBC, "aes-256-cbc", .key_length = 32) \ + _ (AES_128_CTR, "aes-128-ctr", .key_length = 16) \ + _ (AES_192_CTR, "aes-192-ctr", .key_length = 24) \ + _ (AES_256_CTR, "aes-256-ctr", .key_length = 32) + +/* CRYPTO_ID, PRETTY_NAME, ARGS */ #define foreach_crypto_aead_alg \ - _ (AES_128_GCM, "aes-128-gcm", 16) \ - _ (AES_192_GCM, "aes-192-gcm", 24) \ - _ (AES_256_GCM, "aes-256-gcm", 32) \ - _ (AES_128_NULL_GMAC, "aes-128-null-gmac", 16) \ - _ (AES_192_NULL_GMAC, "aes-192-null-gmac", 24) \ - _ (AES_256_NULL_GMAC, "aes-256-null-gmac", 32) \ - _ (CHACHA20_POLY1305, "chacha20-poly1305", 32) + _ (AES_128_GCM, "aes-128-gcm", .is_aead = 1, .key_length = 16) \ + _ (AES_192_GCM, "aes-192-gcm", .is_aead = 1, .key_length = 24) \ + _ (AES_256_GCM, "aes-256-gcm", .is_aead = 1, .key_length = 32) \ + _ (AES_128_NULL_GMAC, "aes-128-null-gmac", .is_aead = 1, .key_length = 16) \ + _ (AES_192_NULL_GMAC, "aes-192-null-gmac", .is_aead = 1, .key_length = 24) \ + _ (AES_256_NULL_GMAC, "aes-256-null-gmac", .is_aead = 1, .key_length = 32) \ + _ (CHACHA20_POLY1305, "chacha20-poly1305", .is_aead = 1, .key_length = 32) #define foreach_crypto_hash_alg \ + _ (MD5, "md5") \ _ (SHA1, "sha-1") \ _ (SHA224, "sha-224") \ _ (SHA256, "sha-256") \ _ (SHA384, "sha-384") \ _ (SHA512, "sha-512") -#define foreach_crypto_hmac_alg \ - _(MD5, "md5") \ - _(SHA1, "sha-1") \ - _(SHA224, "sha-224") \ - _(SHA256, "sha-256") \ - _(SHA384, "sha-384") \ - _(SHA512, "sha-512") - #define foreach_crypto_op_type \ _ (ENCRYPT, "encrypt") \ _ (DECRYPT, "decrypt") \ @@ -98,7 +91,7 @@ typedef enum _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad12", 32, 16, 12) \ _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \ _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12) \ - _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0) + _ (CHACHA20_POLY1305, "chacha20-poly1305-aad0", 32, 16, 0) /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */ #define foreach_crypto_link_async_alg \ @@ -130,10 +123,6 @@ typedef enum _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12) \ _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12) -#define foreach_crypto_async_op_type \ - _(ENCRYPT, "async-encrypt") \ - _(DECRYPT, "async-decrypt") - typedef enum { VNET_CRYPTO_KEY_OP_ADD, @@ -152,72 +141,35 @@ typedef enum typedef enum { VNET_CRYPTO_ALG_NONE = 0, -#define _(n, s, l) VNET_CRYPTO_ALG_##n, +#define _(n, s, ...) VNET_CRYPTO_ALG_##n, foreach_crypto_cipher_alg foreach_crypto_aead_alg #undef _ -#define _(n, s) VNET_CRYPTO_ALG_HMAC_##n, - foreach_crypto_hmac_alg -#undef _ -#define _(n, s) VNET_CRYPTO_ALG_HASH_##n, - foreach_crypto_hash_alg -#undef _ - VNET_CRYPTO_N_ALGS, -} vnet_crypto_alg_t; - -typedef enum -{ -#define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n, - foreach_crypto_async_op_type +#define _(n, s) VNET_CRYPTO_ALG_HASH_##n, VNET_CRYPTO_ALG_HMAC_##n, + foreach_crypto_hash_alg #undef _ - VNET_CRYPTO_ASYNC_OP_N_TYPES, -} vnet_crypto_async_op_type_t; - -typedef enum -{ - VNET_CRYPTO_ASYNC_ALG_NONE = 0, #define _(n, s, k, t, a) \ VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, - foreach_crypto_aead_async_alg + foreach_crypto_aead_async_alg #undef _ #define _(c, h, s, k ,d) \ VNET_CRYPTO_ALG_##c##_##h##_TAG##d, - foreach_crypto_link_async_alg -#undef _ - VNET_CRYPTO_N_ASYNC_ALGS, -} vnet_crypto_async_alg_t; - -typedef enum -{ - VNET_CRYPTO_ASYNC_OP_NONE = 0, -#define _(n, s, k, t, a) \ - VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ - VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, - foreach_crypto_aead_async_alg -#undef _ -#define _(c, h, s, k ,d) \ - VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ - VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, - foreach_crypto_link_async_alg + foreach_crypto_link_async_alg #undef _ - VNET_CRYPTO_ASYNC_OP_N_IDS, -} vnet_crypto_async_op_id_t; + VNET_CRYPTO_N_ALGS, +} vnet_crypto_alg_t; typedef struct { u32 index; u16 length; u8 is_link : 1; + vnet_crypto_alg_t alg : 8; union { struct { - vnet_crypto_alg_t alg:8; - }; - struct - { u32 index_crypto; u32 index_integ; - vnet_crypto_async_alg_t async_alg:8; }; }; u8 data[]; @@ -226,29 +178,31 @@ typedef struct typedef enum { VNET_CRYPTO_OP_NONE = 0, -#define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC, +#define _(n, s, ...) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC, foreach_crypto_cipher_alg foreach_crypto_aead_alg #undef _ -#define _(n, s) VNET_CRYPTO_OP_##n##_HMAC, - foreach_crypto_hmac_alg +#define _(n, s) VNET_CRYPTO_OP_##n##_HASH, VNET_CRYPTO_OP_##n##_HMAC, + foreach_crypto_hash_alg #undef _ -#define _(n, s) VNET_CRYPTO_OP_##n##_HASH, - foreach_crypto_hash_alg +#define _(n, s, k, t, a) \ + VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ + VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, + foreach_crypto_aead_async_alg #undef _ - VNET_CRYPTO_N_OP_IDS, -} vnet_crypto_op_id_t; - -typedef enum -{ - CRYPTO_OP_SIMPLE, - CRYPTO_OP_CHAINED, - CRYPTO_OP_BOTH, -} crypto_op_class_type_t; +#define _(c, h, s, k, d) \ + VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ + VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, + foreach_crypto_link_async_alg +#undef _ + VNET_CRYPTO_N_OP_IDS, +} __clib_packed vnet_crypto_op_id_t; typedef struct { char *name; + u16 key_length; u8 is_aead : 1; + u8 variable_key_length : 1; vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES]; } vnet_crypto_alg_data_t; @@ -263,7 +217,7 @@ typedef struct { CLIB_CACHE_LINE_ALIGN_MARK (cacheline0); uword user_data; - vnet_crypto_op_id_t op:16; + vnet_crypto_op_id_t op; vnet_crypto_op_status_t status:8; u8 flags; #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 0) @@ -308,26 +262,19 @@ typedef struct STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES); -typedef struct -{ - vnet_crypto_op_type_t type; - vnet_crypto_alg_t alg; - u32 active_engine_index_simple; - u32 active_engine_index_chained; -} vnet_crypto_op_data_t; +#define foreach_crypto_handler_type \ + _ (SIMPLE, "simple") \ + _ (CHAINED, "chained") \ + _ (ASYNC, "async") -typedef struct +typedef enum { - vnet_crypto_async_op_type_t type; - vnet_crypto_async_alg_t alg; - u32 active_engine_index_async; -} vnet_crypto_async_op_data_t; +#define _(n, s) VNET_CRYPTO_HANDLER_TYPE_##n, + foreach_crypto_handler_type +#undef _ + VNET_CRYPTO_HANDLER_N_TYPES -typedef struct -{ - char *name; - vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES]; -} vnet_crypto_async_alg_data_t; +} vnet_crypto_handler_type_t; typedef struct { @@ -365,7 +312,7 @@ typedef struct { CLIB_CACHE_LINE_ALIGN_MARK (cacheline0); vnet_crypto_async_frame_state_t state; - vnet_crypto_async_op_id_t op:8; + vnet_crypto_op_id_t op : 8; u16 n_elts; vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE]; u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE]; @@ -383,21 +330,20 @@ typedef struct typedef u32 vnet_crypto_key_index_t; -typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm, - vnet_crypto_op_t * ops[], - vnet_crypto_op_chunk_t * - chunks, u32 n_ops); +typedef u32 (vnet_crypto_chained_op_fn_t) (vlib_main_t *vm, + vnet_crypto_op_t *ops[], + vnet_crypto_op_chunk_t *chunks, + u32 n_ops); -typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm, - vnet_crypto_op_t * ops[], u32 n_ops); +typedef u32 (vnet_crypto_simple_op_fn_t) (vlib_main_t *vm, + vnet_crypto_op_t *ops[], u32 n_ops); -typedef void (vnet_crypto_key_handler_t) (vnet_crypto_key_op_t kop, - vnet_crypto_key_index_t idx); +typedef void (vnet_crypto_key_fn_t) (vnet_crypto_key_op_t kop, + vnet_crypto_key_index_t idx); /** async crypto function handlers **/ -typedef int - (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm, - vnet_crypto_async_frame_t * frame); +typedef int (vnet_crypto_frame_enq_fn_t) (vlib_main_t *vm, + vnet_crypto_async_frame_t *frame); typedef vnet_crypto_async_frame_t * (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed, u32 * enqueue_thread_idx); @@ -406,32 +352,29 @@ u32 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio, char *desc); -void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index, +void vnet_crypto_register_ops_handler (vlib_main_t *vm, u32 engine_index, vnet_crypto_op_id_t opt, - vnet_crypto_ops_handler_t * oph); + vnet_crypto_simple_op_fn_t *oph); -void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, - u32 engine_index, - vnet_crypto_op_id_t opt, - vnet_crypto_chained_ops_handler_t - * oph); +void +vnet_crypto_register_chained_ops_handler (vlib_main_t *vm, u32 engine_index, + vnet_crypto_op_id_t opt, + vnet_crypto_chained_op_fn_t *oph); -void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index, +void vnet_crypto_register_ops_handlers (vlib_main_t *vm, u32 engine_index, vnet_crypto_op_id_t opt, - vnet_crypto_ops_handler_t * fn, - vnet_crypto_chained_ops_handler_t * - cfn); + vnet_crypto_simple_op_fn_t *fn, + vnet_crypto_chained_op_fn_t *cfn); -void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index, - vnet_crypto_key_handler_t * keyh); +void vnet_crypto_register_key_handler (vlib_main_t *vm, u32 engine_index, + vnet_crypto_key_fn_t *keyh); /** async crypto register functions */ u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name); -void -vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index, - vnet_crypto_async_op_id_t opt, - vnet_crypto_frame_enqueue_t *enq_fn); +void vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index, + vnet_crypto_op_id_t opt, + vnet_crypto_frame_enq_fn_t *enq_fn); void vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index, @@ -439,14 +382,16 @@ vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index, typedef struct { + void *handlers[VNET_CRYPTO_HANDLER_N_TYPES]; +} vnet_crypto_engine_op_t; + +typedef struct +{ char *name; char *desc; int priority; - vnet_crypto_key_handler_t *key_op_handler; - vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS]; - vnet_crypto_chained_ops_handler_t - * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS]; - vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS]; + vnet_crypto_engine_op_t ops[VNET_CRYPTO_N_OP_IDS]; + vnet_crypto_key_fn_t *key_op_handler; vnet_crypto_frame_dequeue_t *dequeue_handler; } vnet_crypto_engine_t; @@ -458,20 +403,22 @@ typedef struct typedef struct { - vnet_crypto_alg_data_t *algs; + vnet_crypto_op_type_t type; + vnet_crypto_alg_t alg; + u8 active_engine_index[VNET_CRYPTO_HANDLER_N_TYPES]; + void *handlers[VNET_CRYPTO_HANDLER_N_TYPES]; +} vnet_crypto_op_data_t; + +typedef struct +{ + vnet_crypto_alg_data_t algs[VNET_CRYPTO_N_ALGS]; vnet_crypto_thread_t *threads; - vnet_crypto_ops_handler_t **ops_handlers; - vnet_crypto_chained_ops_handler_t **chained_ops_handlers; - vnet_crypto_frame_enqueue_t **enqueue_handlers; vnet_crypto_frame_dequeue_t **dequeue_handlers; vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS]; - vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS]; vnet_crypto_engine_t *engines; vnet_crypto_key_t **keys; uword *engine_index_by_name; uword *alg_index_by_name; - uword *async_alg_index_by_name; - vnet_crypto_async_alg_data_t *async_algs; vnet_crypto_async_next_node_t *next_nodes; u32 crypto_node_index; } vnet_crypto_main_t; @@ -485,8 +432,17 @@ u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops); void vnet_crypto_set_async_dispatch (u8 mode, u8 adaptive); -int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine, - crypto_op_class_type_t oct); + +typedef struct +{ + char *handler_name; + char *engine; + u8 set_simple : 1; + u8 set_chained : 1; + u8 set_async : 1; +} vnet_crypto_set_handlers_args_t; + +int vnet_crypto_set_handlers (vnet_crypto_set_handlers_args_t *); int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg); u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, @@ -502,12 +458,8 @@ u32 vnet_crypto_key_add_linked (vlib_main_t * vm, vnet_crypto_key_index_t index_crypto, vnet_crypto_key_index_t index_integ); -int vnet_crypto_set_async_handler2 (char *alg_name, char *engine); - -int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt); - -vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg, - vnet_crypto_alg_t integ_alg); +vnet_crypto_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg, + vnet_crypto_alg_t integ_alg); format_function_t format_vnet_crypto_alg; format_function_t format_vnet_crypto_engine; @@ -516,10 +468,6 @@ format_function_t format_vnet_crypto_op_type; format_function_t format_vnet_crypto_op_status; unformat_function_t unformat_vnet_crypto_alg; -format_function_t format_vnet_crypto_async_op; -format_function_t format_vnet_crypto_async_alg; -format_function_t format_vnet_crypto_async_op_type; - static_always_inline void vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type) { @@ -547,16 +495,10 @@ vnet_crypto_get_key (vnet_crypto_key_index_t index) return cm->keys[index]; } -static_always_inline int -vnet_crypto_set_handler (char *alg_name, char *engine) -{ - return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH); -} - /** async crypto inline functions **/ static_always_inline vnet_crypto_async_frame_t * -vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt) +vnet_crypto_async_get_frame (vlib_main_t *vm, vnet_crypto_op_id_t opt) { vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_thread_t *ct = cm->threads + vm->thread_index; @@ -591,19 +533,22 @@ vnet_crypto_async_submit_open_frame (vlib_main_t * vm, { vnet_crypto_main_t *cm = &crypto_main; vlib_thread_main_t *tm = vlib_get_thread_main (); + vnet_crypto_op_id_t op = frame->op; + vnet_crypto_frame_enq_fn_t *fn = + cm->opt_data[op].handlers[VNET_CRYPTO_HANDLER_TYPE_ASYNC]; u32 i; vlib_node_t *n; frame->state = VNET_CRYPTO_FRAME_STATE_PENDING; frame->enqueue_thread_index = vm->thread_index; - if (PREDICT_FALSE (cm->enqueue_handlers == NULL)) + if (PREDICT_FALSE (fn == 0)) { frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR; return -1; } - int ret = (cm->enqueue_handlers[frame->op]) (vm, frame); + int ret = fn (vm, frame); if (PREDICT_TRUE (ret == 0)) { @@ -655,7 +600,7 @@ vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f, static_always_inline void vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f) { - vnet_crypto_async_op_id_t opt; + vnet_crypto_op_id_t opt; ASSERT (f != 0); ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR)); diff --git a/src/vnet/crypto/crypto_api.c b/src/vnet/crypto/crypto_api.c index e701864a5ba..e7322cdd553 100644 --- a/src/vnet/crypto/crypto_api.c +++ b/src/vnet/crypto/crypto_api.c @@ -68,18 +68,23 @@ vl_api_crypto_set_handler_t_handler (vl_api_crypto_set_handler_t * mp) { vl_api_crypto_set_handler_reply_t *rmp; int rv = 0; - char *engine; - char *alg_name; - crypto_op_class_type_t oct; - - engine = (char *) mp->engine; - alg_name = (char *) mp->alg_name; - oct = (crypto_op_class_type_t) mp->oct; - - if (mp->is_async) - rv = vnet_crypto_set_async_handler2 (alg_name, engine); - else - rv = vnet_crypto_set_handler2 (alg_name, engine, oct); + + enum + { + CRYPTO_OP_SIMPLE, + CRYPTO_OP_CHAINED, + CRYPTO_OP_BOTH, + } oct = (typeof (oct)) mp->oct; + + vnet_crypto_set_handlers_args_t args = { + .engine = (char *) mp->engine, + .handler_name = (char *) mp->alg_name, + .set_async = mp->is_async != 0, + .set_simple = oct == CRYPTO_OP_SIMPLE || oct == CRYPTO_OP_BOTH, + .set_chained = oct == CRYPTO_OP_CHAINED || oct == CRYPTO_OP_BOTH, + }; + + rv = vnet_crypto_set_handlers (&args); REPLY_MACRO (VL_API_CRYPTO_SET_HANDLER_REPLY); } diff --git a/src/vnet/crypto/engine.h b/src/vnet/crypto/engine.h index 993befb393a..517b6ec3457 100644 --- a/src/vnet/crypto/engine.h +++ b/src/vnet/crypto/engine.h @@ -12,8 +12,8 @@ typedef unsigned int u32; typedef struct { vnet_crypto_op_id_t opt; - vnet_crypto_ops_handler_t *fn; - vnet_crypto_chained_ops_handler_t *cfn; + vnet_crypto_simple_op_fn_t *fn; + vnet_crypto_chained_op_fn_t *cfn; } vnet_crypto_engine_op_handlers_t; struct vnet_crypto_engine_registration; @@ -31,7 +31,7 @@ typedef struct vnet_crypto_engine_registration u32 num_threads; void *per_thread_data; vnet_crypto_engine_init_fn_t *init_fn; - vnet_crypto_key_handler_t *key_handler; + vnet_crypto_key_fn_t *key_handler; vnet_crypto_engine_op_handlers_t *op_handlers; } vnet_crypto_engine_registration_t; diff --git a/src/vnet/crypto/format.c b/src/vnet/crypto/format.c index c503ac81663..cfcee2f4572 100644 --- a/src/vnet/crypto/format.c +++ b/src/vnet/crypto/format.c @@ -22,7 +22,7 @@ format_vnet_crypto_alg (u8 * s, va_list * args) { vnet_crypto_alg_t alg = va_arg (*args, vnet_crypto_alg_t); vnet_crypto_main_t *cm = &crypto_main; - vnet_crypto_alg_data_t *d = vec_elt_at_index (cm->algs, alg); + vnet_crypto_alg_data_t *d = cm->algs + alg; return format (s, "%s", d->name); } @@ -105,6 +105,7 @@ format_vnet_crypto_engine (u8 * s, va_list * args) return format (s, "%s", e->name); } +#if 0 u8 * format_vnet_crypto_async_op_type (u8 * s, va_list * args) { @@ -125,7 +126,7 @@ format_vnet_crypto_async_op_type (u8 * s, va_list * args) u8 * format_vnet_crypto_async_alg (u8 * s, va_list * args) { - vnet_crypto_async_alg_t alg = va_arg (*args, vnet_crypto_async_alg_t); + vnet_crypto_alg_t alg = va_arg (*args, vnet_crypto_alg_t); vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_async_alg_data_t *d = vec_elt_at_index (cm->async_algs, alg); return format (s, "%s", d->name); @@ -141,6 +142,7 @@ format_vnet_crypto_async_op (u8 * s, va_list * args) return format (s, "%U-%U", format_vnet_crypto_async_op_type, otd->type, format_vnet_crypto_async_alg, otd->alg); } +#endif /* * fd.io coding-style-patch-verification: ON diff --git a/src/vnet/crypto/main.c b/src/vnet/crypto/main.c new file mode 100644 index 00000000000..4f00e9b5c62 --- /dev/null +++ b/src/vnet/crypto/main.c @@ -0,0 +1,108 @@ +/* SPDX-License-Identifier: Apache-2.0 + * Copyright(c) 2025 Cisco Systems, Inc. + */ + +#include <vlib/vlib.h> +#include <vnet/crypto/crypto.h> +#include <vnet/crypto/engine.h> + +vnet_crypto_main_t crypto_main = +{ + .algs = { +#define _(n, s, ...) \ + [VNET_CRYPTO_ALG_##n] = { \ + .name = (s), \ + .op_by_type[VNET_CRYPTO_OP_TYPE_ENCRYPT] = VNET_CRYPTO_OP_##n##_ENC, \ + .op_by_type[VNET_CRYPTO_OP_TYPE_DECRYPT] = VNET_CRYPTO_OP_##n##_DEC, \ + __VA_ARGS__, \ + }, + foreach_crypto_cipher_alg foreach_crypto_aead_alg +#undef _ + +#define _(n, s) \ + [VNET_CRYPTO_ALG_HASH_##n] = { \ + .name = (s), \ + .op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = VNET_CRYPTO_OP_##n##_HASH, \ + }, \ + [VNET_CRYPTO_ALG_HMAC_##n] = { \ + .name = ("hmac-" s), \ + .op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = VNET_CRYPTO_OP_##n##_HMAC, \ + .variable_key_length = 1, \ + }, + foreach_crypto_hash_alg +#undef _ + +#define _(n, s, k, t, a) \ + [VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a] = { \ + .name = (s), \ + .op_by_type[VNET_CRYPTO_OP_TYPE_ENCRYPT] = \ + VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ + .op_by_type[VNET_CRYPTO_OP_TYPE_DECRYPT] = \ + VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \ + }, + foreach_crypto_aead_async_alg +#undef _ + +#define _(c, h, s, k, d) \ + [VNET_CRYPTO_ALG_##c##_##h##_TAG##d] = { \ + .name = (s), \ + .op_by_type[VNET_CRYPTO_OP_TYPE_ENCRYPT] = \ + VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ + .op_by_type[VNET_CRYPTO_OP_TYPE_DECRYPT] = \ + VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \ + }, + foreach_crypto_link_async_alg +#undef _ + + }, + .opt_data = { +#define _(n, s, ...) \ + [VNET_CRYPTO_OP_##n##_ENC] = { \ + .alg = VNET_CRYPTO_ALG_##n, \ + .type = VNET_CRYPTO_OP_TYPE_ENCRYPT, \ + }, \ + [VNET_CRYPTO_OP_##n##_DEC] = { \ + .alg = VNET_CRYPTO_ALG_##n, \ + .type = VNET_CRYPTO_OP_TYPE_DECRYPT, \ + }, + foreach_crypto_cipher_alg foreach_crypto_aead_alg +#undef _ + +#define _(n, s) \ + [VNET_CRYPTO_OP_##n##_HASH] = { \ + .alg = VNET_CRYPTO_ALG_HASH_##n, \ + .type = VNET_CRYPTO_OP_TYPE_HASH, \ + }, \ + [VNET_CRYPTO_OP_##n##_HMAC] = { \ + .alg = VNET_CRYPTO_ALG_HMAC_##n, \ + .type = VNET_CRYPTO_OP_TYPE_HMAC, \ + }, + foreach_crypto_hash_alg +#undef _ + +#define _(n, s, k, t, a) \ + [VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC] = { \ + .alg = VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \ + .type = VNET_CRYPTO_OP_TYPE_ENCRYPT, \ + }, \ + [VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC] = { \ + .alg = VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \ + .type = VNET_CRYPTO_OP_TYPE_DECRYPT, \ + }, + foreach_crypto_aead_async_alg +#undef _ + +#define _(c, h, s, k, d) \ + [VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC] = { \ + .alg = VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \ + .type = VNET_CRYPTO_OP_TYPE_ENCRYPT, \ + } , \ + [VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC] = { \ + .alg = VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \ + .type = VNET_CRYPTO_OP_TYPE_DECRYPT, \ + }, + foreach_crypto_link_async_alg +#undef _ + + }, +}; diff --git a/src/vnet/crypto/node.c b/src/vnet/crypto/node.c index ee7f344ce68..7d023f3ff9d 100644 --- a/src/vnet/crypto/node.c +++ b/src/vnet/crypto/node.c @@ -45,7 +45,7 @@ typedef enum typedef struct { vnet_crypto_op_status_t op_status; - vnet_crypto_async_op_id_t op; + vnet_crypto_op_id_t op; } crypto_dispatch_trace_t; static u8 * @@ -55,15 +55,14 @@ format_crypto_dispatch_trace (u8 * s, va_list * args) CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *); crypto_dispatch_trace_t *t = va_arg (*args, crypto_dispatch_trace_t *); - s = format (s, "%U: %U", format_vnet_crypto_async_op, t->op, + s = format (s, "%U: %U", format_vnet_crypto_op, t->op, format_vnet_crypto_op_status, t->op_status); return s; } static void -vnet_crypto_async_add_trace (vlib_main_t * vm, vlib_node_runtime_t * node, - vlib_buffer_t * b, - vnet_crypto_async_op_id_t op_id, +vnet_crypto_async_add_trace (vlib_main_t *vm, vlib_node_runtime_t *node, + vlib_buffer_t *b, vnet_crypto_op_id_t op_id, vnet_crypto_op_status_t status) { crypto_dispatch_trace_t *tr = vlib_add_trace (vm, node, b, sizeof (*tr)); diff --git a/src/vnet/ipsec/esp_decrypt.c b/src/vnet/ipsec/esp_decrypt.c index 01b2d2971b0..6384bb927a8 100644 --- a/src/vnet/ipsec/esp_decrypt.c +++ b/src/vnet/ipsec/esp_decrypt.c @@ -1104,8 +1104,8 @@ esp_decrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node, ipsec_sa_t *sa0 = 0; bool anti_replay_result; int is_async = im->async_mode; - vnet_crypto_async_op_id_t async_op = ~0; - vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_ASYNC_OP_N_IDS]; + vnet_crypto_op_id_t async_op = ~0; + vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_N_OP_IDS]; esp_decrypt_error_t err; vlib_get_buffers (vm, from, b, n_left); diff --git a/src/vnet/ipsec/esp_encrypt.c b/src/vnet/ipsec/esp_encrypt.c index f6d1ecaed24..4338cb01e5d 100644 --- a/src/vnet/ipsec/esp_encrypt.c +++ b/src/vnet/ipsec/esp_encrypt.c @@ -625,9 +625,9 @@ esp_encrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node, vlib_buffer_t *lb; vnet_crypto_op_t **crypto_ops = &ptd->crypto_ops; vnet_crypto_op_t **integ_ops = &ptd->integ_ops; - vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_ASYNC_OP_N_IDS]; + vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_N_OP_IDS]; int is_async = im->async_mode; - vnet_crypto_async_op_id_t async_op = ~0; + vnet_crypto_op_id_t async_op = ~0; u16 drop_next = (lt == VNET_LINK_IP6 ? ESP_ENCRYPT_NEXT_DROP6 : (lt == VNET_LINK_IP4 ? ESP_ENCRYPT_NEXT_DROP4 : diff --git a/src/vnet/ipsec/ipsec_sa.c b/src/vnet/ipsec/ipsec_sa.c index dfa2bf6b23f..d37d89d5e3e 100644 --- a/src/vnet/ipsec/ipsec_sa.c +++ b/src/vnet/ipsec/ipsec_sa.c @@ -161,7 +161,7 @@ ipsec_sa_set_async_op_ids (ipsec_sa_t * sa) { if (ipsec_sa_is_set_USE_ESN (sa)) { -#define _(n, s, k) \ +#define _(n, s, ...) \ if (sa->crypto_sync_enc_op_id == VNET_CRYPTO_OP_##n##_ENC) \ sa->crypto_async_enc_op_id = VNET_CRYPTO_OP_##n##_TAG16_AAD12_ENC; \ if (sa->crypto_sync_dec_op_id == VNET_CRYPTO_OP_##n##_DEC) \ @@ -171,7 +171,7 @@ ipsec_sa_set_async_op_ids (ipsec_sa_t * sa) } else { -#define _(n, s, k) \ +#define _(n, s, ...) \ if (sa->crypto_sync_enc_op_id == VNET_CRYPTO_OP_##n##_ENC) \ sa->crypto_async_enc_op_id = VNET_CRYPTO_OP_##n##_TAG16_AAD8_ENC; \ if (sa->crypto_sync_dec_op_id == VNET_CRYPTO_OP_##n##_DEC) \ |