aboutsummaryrefslogtreecommitdiffstats
path: root/src/plugins/crypto_ipsecmb/ipsecmb.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/crypto_ipsecmb/ipsecmb.c')
-rw-r--r--src/plugins/crypto_ipsecmb/ipsecmb.c252
1 files changed, 252 insertions, 0 deletions
diff --git a/src/plugins/crypto_ipsecmb/ipsecmb.c b/src/plugins/crypto_ipsecmb/ipsecmb.c
index feca6255e62..ad5f7bfe006 100644
--- a/src/plugins/crypto_ipsecmb/ipsecmb.c
+++ b/src/plugins/crypto_ipsecmb/ipsecmb.c
@@ -426,6 +426,243 @@ ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
foreach_ipsecmb_gcm_cipher_op;
#undef _
+always_inline void
+ipsecmb_retire_aead_job (JOB_AES_HMAC *job, u32 *n_fail)
+{
+ vnet_crypto_op_t *op = job->user_data;
+ u32 len = op->tag_len;
+
+ if (PREDICT_FALSE (STS_COMPLETED != job->status))
+ {
+ op->status = ipsecmb_status_job (job->status);
+ *n_fail = *n_fail + 1;
+ return;
+ }
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
+ {
+ if (memcmp (op->tag, job->auth_tag_output, len))
+ {
+ *n_fail = *n_fail + 1;
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
+ return;
+ }
+ }
+
+ clib_memcpy_fast (op->tag, job->auth_tag_output, len);
+
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+}
+
+static_always_inline u32
+ipsecmb_ops_chacha_poly (vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops,
+ IMB_CIPHER_DIRECTION dir)
+{
+ ipsecmb_main_t *imbm = &ipsecmb_main;
+ ipsecmb_per_thread_data_t *ptd =
+ vec_elt_at_index (imbm->per_thread_data, vm->thread_index);
+ struct IMB_JOB *job;
+ MB_MGR *m = ptd->mgr;
+ u32 i, n_fail = 0, last_key_index = ~0;
+ u8 scratch[VLIB_FRAME_SIZE][16];
+ u8 iv_data[16];
+ u8 *key = 0;
+
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ __m128i iv;
+
+ job = IMB_GET_NEXT_JOB (m);
+ if (last_key_index != op->key_index)
+ {
+ vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index);
+
+ key = kd->data;
+ last_key_index = op->key_index;
+ }
+
+ job->cipher_direction = dir;
+ job->chain_order = IMB_ORDER_HASH_CIPHER;
+ job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305;
+ job->hash_alg = IMB_AUTH_CHACHA20_POLY1305;
+ job->enc_keys = job->dec_keys = key;
+ job->key_len_in_bytes = 32;
+
+ job->u.CHACHA20_POLY1305.aad = op->aad;
+ job->u.CHACHA20_POLY1305.aad_len_in_bytes = op->aad_len;
+ job->src = op->src;
+ job->dst = op->dst;
+
+ if ((dir == IMB_DIR_ENCRYPT) &&
+ (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV))
+ {
+ iv = ptd->cbc_iv;
+ _mm_storeu_si128 ((__m128i *) iv_data, iv);
+ clib_memcpy_fast (op->iv, iv_data, 12);
+ ptd->cbc_iv = _mm_aesenc_si128 (iv, iv);
+ }
+
+ job->iv = op->iv;
+ job->iv_len_in_bytes = 12;
+ job->msg_len_to_cipher_in_bytes = job->msg_len_to_hash_in_bytes =
+ op->len;
+ job->cipher_start_src_offset_in_bytes =
+ job->hash_start_src_offset_in_bytes = 0;
+
+ job->auth_tag_output = scratch[i];
+ job->auth_tag_output_len_in_bytes = 16;
+
+ job->user_data = op;
+
+ job = IMB_SUBMIT_JOB_NOCHECK (ptd->mgr);
+ if (job)
+ ipsecmb_retire_aead_job (job, &n_fail);
+
+ op++;
+ }
+
+ while ((job = IMB_FLUSH_JOB (ptd->mgr)))
+ ipsecmb_retire_aead_job (job, &n_fail);
+
+ return n_ops - n_fail;
+}
+
+static_always_inline u32
+ipsecmb_ops_chacha_poly_enc (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ u32 n_ops)
+{
+ return ipsecmb_ops_chacha_poly (vm, ops, n_ops, IMB_DIR_ENCRYPT);
+}
+
+static_always_inline u32
+ipsecmb_ops_chacha_poly_dec (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ u32 n_ops)
+{
+ return ipsecmb_ops_chacha_poly (vm, ops, n_ops, IMB_DIR_DECRYPT);
+}
+
+static_always_inline u32
+ipsecmb_ops_chacha_poly_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ IMB_CIPHER_DIRECTION dir)
+{
+ ipsecmb_main_t *imbm = &ipsecmb_main;
+ ipsecmb_per_thread_data_t *ptd =
+ vec_elt_at_index (imbm->per_thread_data, vm->thread_index);
+ MB_MGR *m = ptd->mgr;
+ u32 i, n_fail = 0, last_key_index = ~0;
+ u8 iv_data[16];
+ u8 *key = 0;
+
+ if (dir == IMB_DIR_ENCRYPT)
+ {
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ struct chacha20_poly1305_context_data ctx;
+ vnet_crypto_op_chunk_t *chp;
+ __m128i iv;
+ u32 j;
+
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS);
+
+ if (last_key_index != op->key_index)
+ {
+ vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index);
+
+ key = kd->data;
+ last_key_index = op->key_index;
+ }
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
+ {
+ iv = ptd->cbc_iv;
+ _mm_storeu_si128 ((__m128i *) iv_data, iv);
+ clib_memcpy_fast (op->iv, iv_data, 12);
+ ptd->cbc_iv = _mm_aesenc_si128 (iv, iv);
+ }
+
+ IMB_CHACHA20_POLY1305_INIT (m, key, &ctx, op->iv, op->aad,
+ op->aad_len);
+
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ IMB_CHACHA20_POLY1305_ENC_UPDATE (m, key, &ctx, chp->dst,
+ chp->src, chp->len);
+ chp += 1;
+ }
+
+ IMB_CHACHA20_POLY1305_ENC_FINALIZE (m, &ctx, op->tag, op->tag_len);
+
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ }
+ }
+ else /* dir == IMB_DIR_DECRYPT */
+ {
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ struct chacha20_poly1305_context_data ctx;
+ vnet_crypto_op_chunk_t *chp;
+ u8 scratch[16];
+ u32 j;
+
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS);
+
+ if (last_key_index != op->key_index)
+ {
+ vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index);
+
+ key = kd->data;
+ last_key_index = op->key_index;
+ }
+
+ IMB_CHACHA20_POLY1305_INIT (m, key, &ctx, op->iv, op->aad,
+ op->aad_len);
+
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ IMB_CHACHA20_POLY1305_DEC_UPDATE (m, key, &ctx, chp->dst,
+ chp->src, chp->len);
+ chp += 1;
+ }
+
+ IMB_CHACHA20_POLY1305_DEC_FINALIZE (m, &ctx, scratch, op->tag_len);
+
+ if (memcmp (op->tag, scratch, op->tag_len))
+ {
+ n_fail = n_fail + 1;
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
+ }
+ else
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ }
+ }
+
+ return n_ops - n_fail;
+}
+
+static_always_inline u32
+ipsec_mb_ops_chacha_poly_enc_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks,
+ u32 n_ops)
+{
+ return ipsecmb_ops_chacha_poly_chained (vm, ops, chunks, n_ops,
+ IMB_DIR_ENCRYPT);
+}
+
+static_always_inline u32
+ipsec_mb_ops_chacha_poly_dec_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks,
+ u32 n_ops)
+{
+ return ipsecmb_ops_chacha_poly_chained (vm, ops, chunks, n_ops,
+ IMB_DIR_DECRYPT);
+}
+
clib_error_t *
crypto_ipsecmb_iv_init (ipsecmb_main_t * imbm)
{
@@ -613,6 +850,21 @@ crypto_ipsecmb_init (vlib_main_t * vm)
foreach_ipsecmb_gcm_cipher_op;
#undef _
+ vnet_crypto_register_ops_handler (vm, eidx,
+ VNET_CRYPTO_OP_CHACHA20_POLY1305_ENC,
+ ipsecmb_ops_chacha_poly_enc);
+ vnet_crypto_register_ops_handler (vm, eidx,
+ VNET_CRYPTO_OP_CHACHA20_POLY1305_DEC,
+ ipsecmb_ops_chacha_poly_dec);
+ vnet_crypto_register_chained_ops_handler (
+ vm, eidx, VNET_CRYPTO_OP_CHACHA20_POLY1305_ENC,
+ ipsec_mb_ops_chacha_poly_enc_chained);
+ vnet_crypto_register_chained_ops_handler (
+ vm, eidx, VNET_CRYPTO_OP_CHACHA20_POLY1305_DEC,
+ ipsec_mb_ops_chacha_poly_dec_chained);
+ ad = imbm->alg_data + VNET_CRYPTO_ALG_CHACHA20_POLY1305;
+ ad->data_size = 0;
+
vnet_crypto_register_key_handler (vm, eidx, crypto_ipsecmb_key_handler);
return (NULL);
}