summaryrefslogtreecommitdiffstats
path: root/src/vnet/crypto/crypto.c
diff options
context:
space:
mode:
authorFan Zhang <roy.fan.zhang@intel.com>2020-04-29 14:00:03 +0100
committerDamjan Marion <dmarion@me.com>2020-04-30 14:38:33 +0000
commitf539578bac8b64886b57c460c9d74273e6613f8b (patch)
tree190e09705fe1ebb46ca86a48c19de49fcaf0cbb0 /src/vnet/crypto/crypto.c
parent162330f25aeec09694fffaaa31ba9b318620eb9c (diff)
crypto: introduce async crypto infra
Type: feature Signed-off-by: Damjan Marion <damarion@cisco.com> Signed-off-by: Filip Tehlar <ftehlar@cisco.com> Signed-off-by: Fan Zhang <roy.fan.zhang@intel.com> Signed-off-by: Piotr Bronowski <piotrx.bronowski@intel.com> Signed-off-by: Dariusz Kazimierski <dariuszx.kazimierski@intel.com> Signed-off-by: Piotr Kleski <piotrx.kleski@intel.com> Change-Id: I4c3fcccf55c36842b7b48aed260fef2802b5c54b
Diffstat (limited to 'src/vnet/crypto/crypto.c')
-rw-r--r--src/vnet/crypto/crypto.c296
1 files changed, 291 insertions, 5 deletions
diff --git a/src/vnet/crypto/crypto.c b/src/vnet/crypto/crypto.c
index 1caff71b3e2..288e227821b 100644
--- a/src/vnet/crypto/crypto.c
+++ b/src/vnet/crypto/crypto.c
@@ -61,7 +61,6 @@ vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
return rv;
}
-
static_always_inline u32
vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
vnet_crypto_op_chunk_t * chunks, u32 n_ops)
@@ -267,6 +266,44 @@ vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
}
void
+vnet_crypto_register_async_handler (vlib_main_t * vm, u32 engine_index,
+ vnet_crypto_async_op_id_t opt,
+ vnet_crypto_frame_enqueue_t * enqueue_hdl,
+ vnet_crypto_frame_dequeue_t * dequeue_hdl)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
+ vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
+ vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
+ CLIB_CACHE_LINE_BYTES);
+ vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
+ CLIB_CACHE_LINE_BYTES);
+
+ /* both enqueue hdl and dequeue hdl should present */
+ if (!enqueue_hdl && !dequeue_hdl)
+ return;
+
+ e->enqueue_handlers[opt] = enqueue_hdl;
+ e->dequeue_handlers[opt] = dequeue_hdl;
+ if (otd->active_engine_index_async == ~0)
+ {
+ otd->active_engine_index_async = engine_index;
+ cm->enqueue_handlers[opt] = enqueue_hdl;
+ cm->dequeue_handlers[opt] = dequeue_hdl;
+ }
+
+ ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
+ if (ae->priority < e->priority)
+ {
+ otd->active_engine_index_async = engine_index;
+ cm->enqueue_handlers[opt] = enqueue_hdl;
+ cm->dequeue_handlers[opt] = dequeue_hdl;
+ }
+
+ return;
+}
+
+void
vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
vnet_crypto_key_handler_t * key_handler)
{
@@ -318,10 +355,10 @@ vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
pool_get_zero (cm->keys, key);
index = key - cm->keys;
+ key->type = VNET_CRYPTO_KEY_TYPE_DATA;
key->alg = alg;
vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
clib_memcpy (key->data, data, length);
-
/* *INDENT-OFF* */
vec_foreach (engine, cm->engines)
if (engine->key_op_handler)
@@ -343,11 +380,218 @@ vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
/* *INDENT-ON* */
- clib_memset (key->data, 0, vec_len (key->data));
- vec_free (key->data);
+ if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
+ {
+ clib_memset (key->data, 0, vec_len (key->data));
+ vec_free (key->data);
+ }
+ else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
+ {
+ key->index_crypto = key->index_integ = 0;
+ }
+
pool_put (cm->keys, key);
}
+vnet_crypto_async_alg_t
+vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
+ vnet_crypto_alg_t integ_alg)
+{
+#define _(c, h, s, k ,d) \
+ if (crypto_alg == VNET_CRYPTO_ALG_##c && \
+ integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
+ return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
+ foreach_crypto_link_async_alg
+#undef _
+ return ~0;
+}
+
+u32
+vnet_crypto_key_add_linked (vlib_main_t * vm,
+ vnet_crypto_key_index_t index_crypto,
+ vnet_crypto_key_index_t index_integ)
+{
+ u32 index;
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *engine;
+ vnet_crypto_key_t *key_crypto, *key_integ, *key;
+ vnet_crypto_async_alg_t linked_alg;
+
+ key_crypto = pool_elt_at_index (cm->keys, index_crypto);
+ key_integ = pool_elt_at_index (cm->keys, index_integ);
+
+ if (!key_crypto || !key_integ)
+ return ~0;
+
+ linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
+ if (linked_alg == ~0)
+ return ~0;
+
+ pool_get_zero (cm->keys, key);
+ index = key - cm->keys;
+ key->type = VNET_CRYPTO_KEY_TYPE_LINK;
+ key->index_crypto = index_crypto;
+ key->index_integ = index_integ;
+ key->async_alg = linked_alg;
+
+ /* *INDENT-OFF* */
+ vec_foreach (engine, cm->engines)
+ if (engine->key_op_handler)
+ engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
+ /* *INDENT-ON* */
+
+ return index;
+}
+
+clib_error_t *
+crypto_dispatch_enable_disable (int is_enable)
+{
+ vlib_main_t *vm = vlib_get_main ();
+ vlib_thread_main_t *tm = vlib_get_thread_main ();
+ vlib_node_t *node = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
+ vnet_crypto_main_t *cm = &crypto_main;
+ u32 skip_master = vlib_num_workers () > 0, i;
+ u32 state_change = 0;
+ vlib_node_state_t state;
+
+ if (is_enable && cm->async_refcnt > 0)
+ {
+ state_change = 1;
+ state = VLIB_NODE_STATE_POLLING;
+ }
+
+ if (!is_enable && cm->async_refcnt == 0)
+ {
+ state_change = 1;
+ state = VLIB_NODE_STATE_DISABLED;
+ }
+
+ if (state_change)
+ for (i = skip_master; i < tm->n_vlib_mains; i++)
+ vlib_node_set_state (vlib_mains[i], node->index, state);
+
+ return 0;
+}
+
+static_always_inline void
+crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
+ vnet_crypto_async_op_id_t id, u32 ei)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
+
+ if (ce->enqueue_handlers[id] && ce->dequeue_handlers[id])
+ {
+ od->active_engine_index_async = ei;
+ cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
+ cm->dequeue_handlers[id] = ce->dequeue_handlers[id];
+ }
+}
+
+int
+vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
+{
+ uword *p;
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_async_alg_data_t *ad;
+ int i;
+
+ p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
+ if (!p)
+ return -1;
+
+ ad = vec_elt_at_index (cm->async_algs, p[0]);
+
+ p = hash_get_mem (cm->engine_index_by_name, engine);
+ if (!p)
+ return -1;
+
+ for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
+ {
+ vnet_crypto_async_op_data_t *od;
+ vnet_crypto_async_op_id_t id = ad->op_by_type[i];
+ if (id == 0)
+ continue;
+
+ od = cm->async_opt_data + id;
+ crypto_set_active_async_engine (od, id, p[0]);
+ }
+
+ return 0;
+}
+
+u32
+vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_async_next_node_t *nn = 0;
+ vlib_node_t *cc, *pn;
+ uword index = vec_len (cm->next_nodes);
+
+ pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
+ if (!pn)
+ return ~0;
+
+ /* *INDENT-OFF* */
+ vec_foreach (cm->next_nodes, nn)
+ {
+ if (nn->node_idx == pn->index)
+ return nn->next_idx;
+ }
+ /* *INDENT-ON* */
+
+ vec_validate (cm->next_nodes, index);
+ nn = vec_elt_at_index (cm->next_nodes, index);
+
+ cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
+ nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
+ nn->node_idx = pn->index;
+
+ return nn->next_idx;
+}
+
+void
+vnet_crypto_request_async_mode (int is_enable)
+{
+ vlib_main_t *vm = vlib_get_main ();
+ vlib_thread_main_t *tm = vlib_get_thread_main ();
+ vlib_node_t *node = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
+ vnet_crypto_main_t *cm = &crypto_main;
+ u32 skip_master = vlib_num_workers () > 0, i;
+ u32 state_change = 0;
+ vlib_node_state_t state;
+
+ if (is_enable && cm->async_refcnt == 0)
+ {
+ state_change = 1;
+ state = VLIB_NODE_STATE_POLLING;
+ }
+
+ if (!is_enable && cm->async_refcnt == 1)
+ {
+ state_change = 1;
+ state = VLIB_NODE_STATE_DISABLED;
+ }
+
+ if (state_change)
+ for (i = skip_master; i < tm->n_vlib_mains; i++)
+ vlib_node_set_state (vlib_mains[i], node->index, state);
+
+ if (is_enable)
+ cm->async_refcnt += 1;
+ else if (cm->async_refcnt > 0)
+ cm->async_refcnt -= 1;
+}
+
+int
+vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+
+ return (op < vec_len (cm->enqueue_handlers) &&
+ NULL != cm->enqueue_handlers[op]);
+}
+
static void
vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
vnet_crypto_op_id_t did, char *name, u8 is_aead)
@@ -392,16 +636,44 @@ vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
hash_set_mem (cm->alg_index_by_name, name, alg);
}
+static void
+vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
+ vnet_crypto_async_op_id_t eid,
+ vnet_crypto_async_op_id_t did, char *name)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+
+ cm->async_algs[alg].name = name;
+ cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
+ cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
+ cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
+ cm->async_opt_data[eid].alg = alg;
+ cm->async_opt_data[eid].active_engine_index_async = ~0;
+ cm->async_opt_data[eid].active_engine_index_async = ~0;
+ cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
+ cm->async_opt_data[did].alg = alg;
+ cm->async_opt_data[did].active_engine_index_async = ~0;
+ cm->async_opt_data[did].active_engine_index_async = ~0;
+ hash_set_mem (cm->async_alg_index_by_name, name, alg);
+}
+
clib_error_t *
vnet_crypto_init (vlib_main_t * vm)
{
vnet_crypto_main_t *cm = &crypto_main;
vlib_thread_main_t *tm = vlib_get_thread_main ();
+ vnet_crypto_thread_t *ct = 0;
cm->engine_index_by_name = hash_create_string ( /* size */ 0,
sizeof (uword));
cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
+ cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
+ vec_foreach (ct, cm->threads)
+ pool_alloc_aligned (ct->frame_pool, 256, CLIB_CACHE_LINE_BYTES);
vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
+ vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
+ clib_bitmap_validate (cm->async_active_ids, VNET_CRYPTO_ASYNC_OP_N_IDS - 1);
+
#define _(n, s, l) \
vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
VNET_CRYPTO_OP_##n##_ENC, \
@@ -419,7 +691,21 @@ vnet_crypto_init (vlib_main_t * vm)
VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
foreach_crypto_hmac_alg;
#undef _
- return 0;
+#define _(n, s, k, t, a) \
+ vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
+ VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
+ VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
+ s);
+ foreach_crypto_aead_async_alg
+#undef _
+#define _(c, h, s, k ,d) \
+ vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
+ VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
+ VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
+ s);
+ foreach_crypto_link_async_alg
+#undef _
+ return 0;
}
VLIB_INIT_FUNCTION (vnet_crypto_init);