diff options
author | Kishor Dhanawade <kdhanawade@marvell.com> | 2024-09-26 15:48:34 +0530 |
---|---|---|
committer | Damjan Marion <dmarion@0xa5.net> | 2024-11-11 12:42:36 +0000 |
commit | 3ac40b94ce324df0db6c17add36fc4d82576193a (patch) | |
tree | 785e53e290409a7caa736cbc8590f2c4522dd153 | |
parent | 3462950ae8949bbedfa66a1d46b6b6b777e86faf (diff) |
octeon: add support for hmac_md5 and chachapoly
Added support for following algorithms
- aes-cbc hmac_md5
- chacha20_poly1305
Type: feature
Signed-off-by: Kishor Dhanawade <kdhanawade@marvell.com>
Signed-off-by: Nithinsen Kaithakadan <nkaithakadan@marvell.com>
Change-Id: I44702483dad8182d5f15aed39c6bb42f1ca15d3c
-rw-r--r-- | src/plugins/dev_octeon/crypto.c | 28 | ||||
-rw-r--r-- | src/plugins/dev_octeon/crypto.h | 12 |
2 files changed, 39 insertions, 1 deletions
diff --git a/src/plugins/dev_octeon/crypto.c b/src/plugins/dev_octeon/crypto.c index 8796704edf4..7d3790f3ec9 100644 --- a/src/plugins/dev_octeon/crypto.c +++ b/src/plugins/dev_octeon/crypto.c @@ -1219,6 +1219,13 @@ oct_crypto_link_session_update (vlib_main_t *vm, oct_crypto_sess_t *sess, auth_type = ROC_SE_SHA2_SHA512; digest_len = 32; break; + case VNET_CRYPTO_ALG_AES_128_CBC_MD5_TAG12: + case VNET_CRYPTO_ALG_AES_192_CBC_MD5_TAG12: + case VNET_CRYPTO_ALG_AES_256_CBC_MD5_TAG12: + enc_type = ROC_SE_AES_CBC; + auth_type = ROC_SE_MD5_TYPE; + digest_len = 12; + break; case VNET_CRYPTO_ALG_AES_128_CTR_SHA1_TAG12: case VNET_CRYPTO_ALG_AES_192_CTR_SHA1_TAG12: case VNET_CRYPTO_ALG_AES_256_CTR_SHA1_TAG12: @@ -1318,6 +1325,10 @@ oct_crypto_aead_session_update (vlib_main_t *vm, oct_crypto_sess_t *sess, sess->cpt_op = type; digest_len = 16; break; + case VNET_CRYPTO_ALG_CHACHA20_POLY1305: + enc_type = ROC_SE_CHACHA20; + auth_type = ROC_SE_POLY1305; + break; default: clib_warning ( "Cryptodev: Undefined cipher algo %u specified. Key index %u", @@ -1342,6 +1353,9 @@ oct_crypto_aead_session_update (vlib_main_t *vm, oct_crypto_sess_t *sess, return -1; } + if (enc_type == ROC_SE_CHACHA20) + sess->cpt_ctx.template_w4.s.opcode_minor |= BIT (5); + return 0; } @@ -1567,6 +1581,13 @@ oct_crypto_enqueue_aead_aad_12_enc (vlib_main_t *vm, } int +oct_crypto_enqueue_aead_aad_0_enc (vlib_main_t *vm, + vnet_crypto_async_frame_t *frame) +{ + return oct_crypto_enqueue_aead_aad_enc (vm, frame, 0); +} + +int oct_crypto_enqueue_aead_aad_8_dec (vlib_main_t *vm, vnet_crypto_async_frame_t *frame) { @@ -1580,6 +1601,13 @@ oct_crypto_enqueue_aead_aad_12_dec (vlib_main_t *vm, return oct_crypto_enqueue_aead_aad_dec (vm, frame, 12); } +int +oct_crypto_enqueue_aead_aad_0_dec (vlib_main_t *vm, + vnet_crypto_async_frame_t *frame) +{ + return oct_crypto_enqueue_aead_aad_dec (vm, frame, 0); +} + vnet_crypto_async_frame_t * oct_crypto_frame_dequeue (vlib_main_t *vm, u32 *nb_elts_processed, u32 *enqueue_thread_idx) diff --git a/src/plugins/dev_octeon/crypto.h b/src/plugins/dev_octeon/crypto.h index 8d17980a55f..27e1f600c68 100644 --- a/src/plugins/dev_octeon/crypto.h +++ b/src/plugins/dev_octeon/crypto.h @@ -20,7 +20,10 @@ _ (AES_192_GCM, 24, 16, 8) \ _ (AES_192_GCM, 24, 16, 12) \ _ (AES_256_GCM, 32, 16, 8) \ - _ (AES_256_GCM, 32, 16, 12) + _ (AES_256_GCM, 32, 16, 12) \ + _ (CHACHA20_POLY1305, 32, 16, 8) \ + _ (CHACHA20_POLY1305, 32, 16, 12) \ + _ (CHACHA20_POLY1305, 32, 16, 0) /* CRYPTO_ID, INTEG_ID, KEY_LENGTH_IN_BYTES, DIGEST_LEN */ #define foreach_oct_crypto_link_async_alg \ @@ -36,6 +39,9 @@ _ (AES_128_CBC, SHA512, 16, 32) \ _ (AES_192_CBC, SHA512, 24, 32) \ _ (AES_256_CBC, SHA512, 32, 32) \ + _ (AES_128_CBC, MD5, 16, 12) \ + _ (AES_192_CBC, MD5, 24, 12) \ + _ (AES_256_CBC, MD5, 32, 12) \ _ (3DES_CBC, MD5, 24, 12) \ _ (3DES_CBC, SHA1, 24, 12) \ _ (3DES_CBC, SHA256, 24, 16) \ @@ -162,10 +168,14 @@ int oct_crypto_enqueue_aead_aad_8_enc (vlib_main_t *vm, vnet_crypto_async_frame_t *frame); int oct_crypto_enqueue_aead_aad_12_enc (vlib_main_t *vm, vnet_crypto_async_frame_t *frame); +int oct_crypto_enqueue_aead_aad_0_enc (vlib_main_t *vm, + vnet_crypto_async_frame_t *frame); int oct_crypto_enqueue_aead_aad_8_dec (vlib_main_t *vm, vnet_crypto_async_frame_t *frame); int oct_crypto_enqueue_aead_aad_12_dec (vlib_main_t *vm, vnet_crypto_async_frame_t *frame); +int oct_crypto_enqueue_aead_aad_0_dec (vlib_main_t *vm, + vnet_crypto_async_frame_t *frame); vnet_crypto_async_frame_t *oct_crypto_frame_dequeue (vlib_main_t *vm, u32 *nb_elts_processed, u32 *enqueue_thread_idx); |