summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorFilip Tehlar <ftehlar@cisco.com>2020-02-13 22:34:05 +0000
committerDamjan Marion <dmarion@me.com>2020-02-18 22:54:18 +0000
commit2fc4091319bdbbac25ce1132cfb73b5077426b75 (patch)
treee7a380421a39d3d319421fd9d9f0ebd20b86c6c9 /src
parent3fccd0278c38cb3fb07fc51a3f1686aef1ebb542 (diff)
crypto: add chained buffer support in ipsecmb (AES-GCM)
Type: feature Change-Id: Ia65caf38988c7e860e6d028f93659916825ef16b Signed-off-by: Filip Tehlar <ftehlar@cisco.com>
Diffstat (limited to 'src')
-rw-r--r--src/plugins/crypto_ipsecmb/ipsecmb.c83
1 files changed, 83 insertions, 0 deletions
diff --git a/src/plugins/crypto_ipsecmb/ipsecmb.c b/src/plugins/crypto_ipsecmb/ipsecmb.c
index a9edd98e87f..11e52322775 100644
--- a/src/plugins/crypto_ipsecmb/ipsecmb.c
+++ b/src/plugins/crypto_ipsecmb/ipsecmb.c
@@ -263,6 +263,41 @@ foreach_ipsecmb_cbc_cipher_op;
#define _(a, b) \
static_always_inline u32 \
+ipsecmb_ops_gcm_cipher_enc_##a##_chained (vlib_main_t * vm, \
+ vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ \
+ ipsecmb_main_t *imbm = &ipsecmb_main; \
+ ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
+ vm->thread_index); \
+ MB_MGR *m = ptd->mgr; \
+ vnet_crypto_op_chunk_t *chp; \
+ u32 i, j; \
+ \
+ for (i = 0; i < n_ops; i++) \
+ { \
+ struct gcm_key_data *kd; \
+ struct gcm_context_data ctx; \
+ vnet_crypto_op_t *op = ops[i]; \
+ \
+ kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
+ IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
+ chp = chunks + op->chunk_index; \
+ for (j = 0; j < op->n_chunks; j++) \
+ { \
+ IMB_AES##b##_GCM_ENC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
+ chp->len); \
+ chp += 1; \
+ } \
+ IMB_AES##b##_GCM_ENC_FINALIZE(m, kd, &ctx, op->tag, op->tag_len); \
+ \
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
+ } \
+ \
+ return n_ops; \
+} \
+ \
+static_always_inline u32 \
ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
{ \
@@ -289,6 +324,48 @@ ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
} \
\
static_always_inline u32 \
+ipsecmb_ops_gcm_cipher_dec_##a##_chained (vlib_main_t * vm, \
+ vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ \
+ ipsecmb_main_t *imbm = &ipsecmb_main; \
+ ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
+ vm->thread_index); \
+ MB_MGR *m = ptd->mgr; \
+ vnet_crypto_op_chunk_t *chp; \
+ u32 i, j, n_failed = 0; \
+ \
+ for (i = 0; i < n_ops; i++) \
+ { \
+ struct gcm_key_data *kd; \
+ struct gcm_context_data ctx; \
+ vnet_crypto_op_t *op = ops[i]; \
+ u8 scratch[64]; \
+ \
+ kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
+ IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
+ chp = chunks + op->chunk_index; \
+ for (j = 0; j < op->n_chunks; j++) \
+ { \
+ IMB_AES##b##_GCM_DEC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
+ chp->len); \
+ chp += 1; \
+ } \
+ IMB_AES##b##_GCM_DEC_FINALIZE(m, kd, &ctx, scratch, op->tag_len); \
+ \
+ if ((memcmp (op->tag, scratch, op->tag_len))) \
+ { \
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
+ n_failed++; \
+ } \
+ else \
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
+ } \
+ \
+ return n_ops - n_failed; \
+} \
+ \
+static_always_inline u32 \
ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
{ \
@@ -493,6 +570,12 @@ crypto_ipsecmb_init (vlib_main_t * vm)
ipsecmb_ops_gcm_cipher_enc_##a); \
vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
ipsecmb_ops_gcm_cipher_dec_##a); \
+ vnet_crypto_register_chained_ops_handler \
+ (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
+ ipsecmb_ops_gcm_cipher_enc_##a##_chained); \
+ vnet_crypto_register_chained_ops_handler \
+ (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
+ ipsecmb_ops_gcm_cipher_dec_##a##_chained); \
ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
ad->data_size = sizeof (struct gcm_key_data); \
ad->aes_gcm_pre = m->gcm##b##_pre; \