summaryrefslogtreecommitdiffstats
path: root/src/plugins
diff options
context:
space:
mode:
authorDamjan Marion <damarion@cisco.com>2019-04-25 18:28:31 +0200
committerFlorin Coras <florin.coras@gmail.com>2019-04-26 14:13:00 +0000
commitd97918ec678c4086001840e7263ba9ac3504ce24 (patch)
treeb490f0318dc8bae6b7d8f90f789c416414bf0b17 /src/plugins
parentaaed170828dfdb3d62295b76fd617f794dc76b3f (diff)
crypto, ipsec: change GCM IV handling
- nonce construction out of salt and iv is ipsec specific so it should be handled in ipsec code - fixes GCM unit tests - GCM IV is constructed out of simple counter, per RFC4106 section 3.1 Change-Id: Ib7712cc9612830daa737f5171d8384f1d361bb61 Signed-off-by: Damjan Marion <damarion@cisco.com>
Diffstat (limited to 'src/plugins')
-rw-r--r--src/plugins/crypto_ipsecmb/ipsecmb.c65
-rw-r--r--src/plugins/crypto_openssl/main.c6
2 files changed, 17 insertions, 54 deletions
diff --git a/src/plugins/crypto_ipsecmb/ipsecmb.c b/src/plugins/crypto_ipsecmb/ipsecmb.c
index 60b5bb96704..b826700cf9e 100644
--- a/src/plugins/crypto_ipsecmb/ipsecmb.c
+++ b/src/plugins/crypto_ipsecmb/ipsecmb.c
@@ -81,9 +81,9 @@ static ipsecmb_main_t ipsecmb_main = { };
* (Alg, key-len-bytes, iv-len-bytes)
*/
#define foreach_ipsecmb_gcm_cipher_op \
- _(AES_128_GCM, 128, 12) \
- _(AES_192_GCM, 192, 12) \
- _(AES_256_GCM, 256, 12)
+ _(AES_128_GCM, 128) \
+ _(AES_192_GCM, 192) \
+ _(AES_256_GCM, 256)
always_inline void
ipsecmb_retire_hmac_job (JOB_AES_HMAC * job, u32 * n_fail, u32 digest_size)
@@ -271,6 +271,7 @@ ipsecmb_retire_gcm_cipher_job (JOB_AES_HMAC * job,
{
op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
*n_fail = *n_fail + 1;
+ return;
}
else
op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
@@ -286,9 +287,8 @@ ipsecmb_retire_gcm_cipher_job (JOB_AES_HMAC * job,
}
static_always_inline u32
-ipsecmb_ops_gcm_cipher_inline (vlib_main_t * vm,
- vnet_crypto_op_t * ops[],
- u32 n_ops, u32 key_len, u32 iv_len,
+ipsecmb_ops_gcm_cipher_inline (vlib_main_t * vm, vnet_crypto_op_t * ops[],
+ u32 n_ops, u32 key_len,
JOB_CIPHER_DIRECTION direction)
{
ipsecmb_main_t *imbm = &ipsecmb_main;
@@ -306,8 +306,6 @@ ipsecmb_ops_gcm_cipher_inline (vlib_main_t * vm,
struct gcm_key_data *kd;
vnet_crypto_op_t *op = ops[i];
kd = (struct gcm_key_data *) imbm->key_data[op->key_index];
- u32 nonce[3];
- __m128i iv;
job = IMB_GET_NEXT_JOB (ptd->mgr);
@@ -321,30 +319,11 @@ ipsecmb_ops_gcm_cipher_inline (vlib_main_t * vm,
job->cipher_direction = direction;
job->chain_order = (direction == ENCRYPT ? CIPHER_HASH : HASH_CIPHER);
- if (direction == ENCRYPT)
- {
- if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
- {
- iv = ptd->cbc_iv;
- // only use 8 bytes of the IV
- clib_memcpy_fast (op->iv, &iv, 8);
- ptd->cbc_iv = _mm_aesenc_si128 (iv, iv);
- }
- nonce[0] = op->salt;
- clib_memcpy_fast (nonce + 1, op->iv, 8);
- job->iv = (u8 *) nonce;
- }
- else
- {
- nonce[0] = op->salt;
- clib_memcpy_fast (nonce + 1, op->iv, 8);
- job->iv = op->iv;
- }
-
+ job->iv = op->iv;
job->aes_key_len_in_bytes = key_len / 8;
job->aes_enc_key_expanded = kd;
job->aes_dec_key_expanded = kd;
- job->iv_len_in_bytes = iv_len;
+ job->iv_len_in_bytes = 12;
job->u.GCM.aad = op->aad;
job->u.GCM.aad_len_in_bytes = op->aad_len;
@@ -361,34 +340,22 @@ ipsecmb_ops_gcm_cipher_inline (vlib_main_t * vm,
ipsecmb_retire_gcm_cipher_job (job, &n_fail, direction);
}
- /*
- * .. then flush (i.e. complete) them
- * We will have queued enough to satisfy the 'multi' buffer
- */
while ((job = IMB_FLUSH_JOB (ptd->mgr)))
- {
- ipsecmb_retire_gcm_cipher_job (job, &n_fail, direction);
- }
+ ipsecmb_retire_gcm_cipher_job (job, &n_fail, direction);
return n_ops - n_fail;
}
-#define _(a, b, c) \
+#define _(a, b) \
static_always_inline u32 \
-ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, \
- vnet_crypto_op_t * ops[], \
+ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
-{ return ipsecmb_ops_gcm_cipher_inline (vm, ops, n_ops, b, c, ENCRYPT); } \
-
-foreach_ipsecmb_gcm_cipher_op;
-#undef _
-
-#define _(a, b, c) \
+{ return ipsecmb_ops_gcm_cipher_inline (vm, ops, n_ops, b, ENCRYPT); } \
+ \
static_always_inline u32 \
-ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, \
- vnet_crypto_op_t * ops[], \
+ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
-{ return ipsecmb_ops_gcm_cipher_inline (vm, ops, n_ops, b, c, DECRYPT); } \
+{ return ipsecmb_ops_gcm_cipher_inline (vm, ops, n_ops, b, DECRYPT); } \
foreach_ipsecmb_gcm_cipher_op;
#undef _
@@ -561,7 +528,7 @@ crypto_ipsecmb_init (vlib_main_t * vm)
foreach_ipsecmb_cbc_cipher_op;
#undef _
-#define _(a, b, c) \
+#define _(a, b) \
vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
ipsecmb_ops_gcm_cipher_enc_##a); \
vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
diff --git a/src/plugins/crypto_openssl/main.c b/src/plugins/crypto_openssl/main.c
index eaa16ceb2d8..0560f31a00a 100644
--- a/src/plugins/crypto_openssl/main.c
+++ b/src/plugins/crypto_openssl/main.c
@@ -118,18 +118,14 @@ openssl_ops_enc_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
{
vnet_crypto_op_t *op = ops[i];
vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
- u32 nonce[3];
int len;
if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
RAND_bytes (op->iv, 8);
- nonce[0] = op->salt;
- clib_memcpy_fast (nonce + 1, op->iv, 8);
-
EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
- EVP_EncryptInit_ex (ctx, 0, 0, key->data, (u8 *) nonce);
+ EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
if (op->aad_len)
EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);