summaryrefslogtreecommitdiffstats
path: root/src/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins')
-rw-r--r--src/plugins/crypto_ipsecmb/ipsecmb.c83
1 files changed, 83 insertions, 0 deletions
diff --git a/src/plugins/crypto_ipsecmb/ipsecmb.c b/src/plugins/crypto_ipsecmb/ipsecmb.c
index a9edd98e87f..11e52322775 100644
--- a/src/plugins/crypto_ipsecmb/ipsecmb.c
+++ b/src/plugins/crypto_ipsecmb/ipsecmb.c
@@ -263,6 +263,41 @@ foreach_ipsecmb_cbc_cipher_op;
#define _(a, b) \
static_always_inline u32 \
+ipsecmb_ops_gcm_cipher_enc_##a##_chained (vlib_main_t * vm, \
+ vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ \
+ ipsecmb_main_t *imbm = &ipsecmb_main; \
+ ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
+ vm->thread_index); \
+ MB_MGR *m = ptd->mgr; \
+ vnet_crypto_op_chunk_t *chp; \
+ u32 i, j; \
+ \
+ for (i = 0; i < n_ops; i++) \
+ { \
+ struct gcm_key_data *kd; \
+ struct gcm_context_data ctx; \
+ vnet_crypto_op_t *op = ops[i]; \
+ \
+ kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
+ IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
+ chp = chunks + op->chunk_index; \
+ for (j = 0; j < op->n_chunks; j++) \
+ { \
+ IMB_AES##b##_GCM_ENC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
+ chp->len); \
+ chp += 1; \
+ } \
+ IMB_AES##b##_GCM_ENC_FINALIZE(m, kd, &ctx, op->tag, op->tag_len); \
+ \
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
+ } \
+ \
+ return n_ops; \
+} \
+ \
+static_always_inline u32 \
ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
{ \
@@ -289,6 +324,48 @@ ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
} \
\
static_always_inline u32 \
+ipsecmb_ops_gcm_cipher_dec_##a##_chained (vlib_main_t * vm, \
+ vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ \
+ ipsecmb_main_t *imbm = &ipsecmb_main; \
+ ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
+ vm->thread_index); \
+ MB_MGR *m = ptd->mgr; \
+ vnet_crypto_op_chunk_t *chp; \
+ u32 i, j, n_failed = 0; \
+ \
+ for (i = 0; i < n_ops; i++) \
+ { \
+ struct gcm_key_data *kd; \
+ struct gcm_context_data ctx; \
+ vnet_crypto_op_t *op = ops[i]; \
+ u8 scratch[64]; \
+ \
+ kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
+ IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
+ chp = chunks + op->chunk_index; \
+ for (j = 0; j < op->n_chunks; j++) \
+ { \
+ IMB_AES##b##_GCM_DEC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
+ chp->len); \
+ chp += 1; \
+ } \
+ IMB_AES##b##_GCM_DEC_FINALIZE(m, kd, &ctx, scratch, op->tag_len); \
+ \
+ if ((memcmp (op->tag, scratch, op->tag_len))) \
+ { \
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
+ n_failed++; \
+ } \
+ else \
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
+ } \
+ \
+ return n_ops - n_failed; \
+} \
+ \
+static_always_inline u32 \
ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
{ \
@@ -493,6 +570,12 @@ crypto_ipsecmb_init (vlib_main_t * vm)
ipsecmb_ops_gcm_cipher_enc_##a); \
vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
ipsecmb_ops_gcm_cipher_dec_##a); \
+ vnet_crypto_register_chained_ops_handler \
+ (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
+ ipsecmb_ops_gcm_cipher_enc_##a##_chained); \
+ vnet_crypto_register_chained_ops_handler \
+ (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
+ ipsecmb_ops_gcm_cipher_dec_##a##_chained); \
ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
ad->data_size = sizeof (struct gcm_key_data); \
ad->aes_gcm_pre = m->gcm##b##_pre; \