aboutsummaryrefslogtreecommitdiffstats
path: root/src/plugins
diff options
context:
space:
mode:
authorFilip Tehlar <ftehlar@cisco.com>2020-02-04 09:36:04 +0000
committerDamjan Marion <dmarion@me.com>2020-02-11 23:07:38 +0000
commitefcad1a9d22c4a664f3004cafe09d9c3a68e1620 (patch)
tree5d0668c307083f096f6034d5ae8a608078640d18 /src/plugins
parent16d974ec59776f0103ad62d0d04dc57989eef7ed (diff)
ipsec: add support for chained buffers
Type: feature Change-Id: Ie072a7c2bbb1e4a77f7001754f01897efd30fc53 Signed-off-by: Filip Tehlar <ftehlar@cisco.com>
Diffstat (limited to 'src/plugins')
-rw-r--r--src/plugins/crypto_openssl/main.c191
-rw-r--r--src/plugins/unittest/crypto/aes_cbc.c15
-rw-r--r--src/plugins/unittest/crypto/aes_gcm.c20
-rw-r--r--src/plugins/unittest/crypto/crypto.h8
-rw-r--r--src/plugins/unittest/crypto/rfc2202_hmac_md5.c12
-rw-r--r--src/plugins/unittest/crypto/rfc2202_hmac_sha1.c15
-rw-r--r--src/plugins/unittest/crypto/rfc4231.c14
-rw-r--r--src/plugins/unittest/crypto_test.c427
8 files changed, 556 insertions, 146 deletions
diff --git a/src/plugins/crypto_openssl/main.c b/src/plugins/crypto_openssl/main.c
index 7362d6bd16e..7775958f9cd 100644
--- a/src/plugins/crypto_openssl/main.c
+++ b/src/plugins/crypto_openssl/main.c
@@ -58,13 +58,17 @@ static openssl_per_thread_data_t *per_thread_data = 0;
_(SHA512, EVP_sha512)
static_always_inline u32
-openssl_ops_enc_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
+openssl_ops_enc_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
+ vnet_crypto_op_chunk_t * chunks, u32 n_ops,
const EVP_CIPHER * cipher)
{
openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
vm->thread_index);
EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
- u32 i;
+ vnet_crypto_op_chunk_t *chp;
+ u32 i, j, curr_len = 0;
+ u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
+
for (i = 0; i < n_ops; i++)
{
vnet_crypto_op_t *op = ops[i];
@@ -81,22 +85,57 @@ openssl_ops_enc_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
RAND_bytes (op->iv, iv_len);
EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
- EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
- if (out_len < op->len)
- EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ EVP_CIPHER_CTX_set_padding (ctx, 0);
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ u32 offset = 0;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
+ chp->len);
+ curr_len = chp->len;
+ offset += out_len;
+ chp += 1;
+ }
+ if (out_len < curr_len)
+ EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
+
+ offset = 0;
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
+ offset += chp->len;
+ chp += 1;
+ }
+ }
+ else
+ {
+ EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
+ if (out_len < op->len)
+ EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
+ }
op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
}
return n_ops;
}
static_always_inline u32
-openssl_ops_dec_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
+openssl_ops_dec_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
+ vnet_crypto_op_chunk_t * chunks, u32 n_ops,
const EVP_CIPHER * cipher)
{
openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
vm->thread_index);
EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
- u32 i;
+ vnet_crypto_op_chunk_t *chp;
+ u32 i, j, curr_len = 0;
+ u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
+
for (i = 0; i < n_ops; i++)
{
vnet_crypto_op_t *op = ops[i];
@@ -104,22 +143,55 @@ openssl_ops_dec_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
int out_len;
EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
- EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
- if (out_len < op->len)
- EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ EVP_CIPHER_CTX_set_padding (ctx, 0);
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ u32 offset = 0;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
+ chp->len);
+ curr_len = chp->len;
+ offset += out_len;
+ chp += 1;
+ }
+ if (out_len < curr_len)
+ EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
+
+ offset = 0;
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
+ offset += chp->len;
+ chp += 1;
+ }
+ }
+ else
+ {
+ EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
+ if (out_len < op->len)
+ EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
+ }
op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
}
return n_ops;
}
static_always_inline u32
-openssl_ops_enc_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
+openssl_ops_enc_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[],
+ vnet_crypto_op_chunk_t * chunks, u32 n_ops,
const EVP_CIPHER * cipher)
{
openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
vm->thread_index);
EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
- u32 i;
+ vnet_crypto_op_chunk_t *chp;
+ u32 i, j;
for (i = 0; i < n_ops; i++)
{
vnet_crypto_op_t *op = ops[i];
@@ -134,7 +206,17 @@ openssl_ops_enc_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
if (op->aad_len)
EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
- EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
+ chp += 1;
+ }
+ }
+ else
+ EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_GET_TAG, op->tag_len, op->tag);
op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
@@ -143,13 +225,15 @@ openssl_ops_enc_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
}
static_always_inline u32
-openssl_ops_dec_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
+openssl_ops_dec_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[],
+ vnet_crypto_op_chunk_t * chunks, u32 n_ops,
const EVP_CIPHER * cipher)
{
openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
vm->thread_index);
EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
- u32 i, n_fail = 0;
+ vnet_crypto_op_chunk_t *chp;
+ u32 i, j, n_fail = 0;
for (i = 0; i < n_ops; i++)
{
vnet_crypto_op_t *op = ops[i];
@@ -161,7 +245,17 @@ openssl_ops_dec_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
if (op->aad_len)
EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
- EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
+ chp += 1;
+ }
+ }
+ else
+ EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_TAG, op->tag_len, op->tag);
if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
@@ -176,14 +270,16 @@ openssl_ops_dec_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
}
static_always_inline u32
-openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
+openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[],
+ vnet_crypto_op_chunk_t * chunks, u32 n_ops,
const EVP_MD * md)
{
u8 buffer[64];
openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
vm->thread_index);
HMAC_CTX *ctx = ptd->hmac_ctx;
- u32 i, n_fail = 0;
+ vnet_crypto_op_chunk_t *chp;
+ u32 i, j, n_fail = 0;
for (i = 0; i < n_ops; i++)
{
vnet_crypto_op_t *op = ops[i];
@@ -192,7 +288,17 @@ openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
- HMAC_Update (ctx, op->src, op->len);
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ HMAC_Update (ctx, chp->src, chp->len);
+ chp += 1;
+ }
+ }
+ else
+ HMAC_Update (ctx, op->src, op->len);
HMAC_Final (ctx, buffer, &out_len);
if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
@@ -211,14 +317,24 @@ openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops,
return n_ops - n_fail;
}
-#define _(m, a, b) \
-static u32 \
-openssl_ops_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
-{ return openssl_ops_enc_##m (vm, ops, n_ops, b ()); } \
-\
-u32 \
-openssl_ops_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
-{ return openssl_ops_dec_##m (vm, ops, n_ops, b ()); }
+#define _(m, a, b) \
+static u32 \
+openssl_ops_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
+{ return openssl_ops_enc_##m (vm, ops, 0, n_ops, b ()); } \
+ \
+u32 \
+openssl_ops_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
+{ return openssl_ops_dec_##m (vm, ops, 0, n_ops, b ()); } \
+ \
+static u32 \
+openssl_ops_enc_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b ()); } \
+ \
+static u32 \
+openssl_ops_dec_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b ()); }
foreach_openssl_evp_op;
#undef _
@@ -226,7 +342,11 @@ foreach_openssl_evp_op;
#define _(a, b) \
static u32 \
openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
-{ return openssl_ops_hmac (vm, ops, n_ops, b ()); } \
+{ return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
+static u32 \
+openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
foreach_openssl_hmac_op;
#undef _
@@ -244,17 +364,20 @@ crypto_openssl_init (vlib_main_t * vm)
u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
#define _(m, a, b) \
- vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
- openssl_ops_enc_##a); \
- vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
- openssl_ops_dec_##a);
+ vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
+ openssl_ops_enc_##a, \
+ openssl_ops_enc_chained_##a); \
+ vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
+ openssl_ops_dec_##a, \
+ openssl_ops_dec_chained_##a); \
foreach_openssl_evp_op;
#undef _
#define _(a, b) \
- vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
- openssl_ops_hmac_##a); \
+ vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
+ openssl_ops_hmac_##a, \
+ openssl_ops_hmac_chained_##a); \
foreach_openssl_hmac_op;
#undef _
diff --git a/src/plugins/unittest/crypto/aes_cbc.c b/src/plugins/unittest/crypto/aes_cbc.c
index b52f728a1cb..b3e95e4c093 100644
--- a/src/plugins/unittest/crypto/aes_cbc.c
+++ b/src/plugins/unittest/crypto/aes_cbc.c
@@ -122,6 +122,21 @@ UNITTEST_REGISTER_CRYPTO_TEST (nist_aes256_cbc) = {
.ciphertext = TEST_DATA (ciphertext256),
};
+UNITTEST_REGISTER_CRYPTO_TEST (nist_aes256_cbc_chained) = {
+ .name = "NIST SP 800-38A [chained]",
+ .alg = VNET_CRYPTO_ALG_AES_256_CBC,
+ .iv = TEST_DATA (iv),
+ .key = TEST_DATA (key256),
+ .is_chained = 1,
+ .pt_chunks = {
+ TEST_DATA_CHUNK (plaintext, 0, 32),
+ TEST_DATA_CHUNK (plaintext, 32, 32),
+ },
+ .ct_chunks = {
+ TEST_DATA_CHUNK (ciphertext256, 0, 32),
+ TEST_DATA_CHUNK (ciphertext256, 32, 32),
+ },
+};
/* *INDENT-ON* */
/*
diff --git a/src/plugins/unittest/crypto/aes_gcm.c b/src/plugins/unittest/crypto/aes_gcm.c
index 0a7aafca100..764ca9e97c5 100644
--- a/src/plugins/unittest/crypto/aes_gcm.c
+++ b/src/plugins/unittest/crypto/aes_gcm.c
@@ -244,6 +244,26 @@ UNITTEST_REGISTER_CRYPTO_TEST (aes_gcm256_tc4) = {
.aad = TEST_DATA(tc4_aad),
.tag = TEST_DATA (tc4_tag256),
};
+
+UNITTEST_REGISTER_CRYPTO_TEST (aes_gcm256_tc4_chain) = {
+ .name = "256-GCM Spec. TC4 [chained]",
+ .alg = VNET_CRYPTO_ALG_AES_256_GCM,
+ .iv = TEST_DATA (tc3_iv),
+ .key = TEST_DATA (tc3_key256),
+ .aad = TEST_DATA(tc4_aad),
+ .tag = TEST_DATA (tc4_tag256),
+ .is_chained = 1,
+ .pt_chunks = {
+ TEST_DATA_CHUNK (tc4_plaintext, 0, 20),
+ TEST_DATA_CHUNK (tc4_plaintext, 20, 20),
+ TEST_DATA_CHUNK (tc4_plaintext, 40, 20),
+ },
+ .ct_chunks = {
+ TEST_DATA_CHUNK (tc4_ciphertext256, 0, 20),
+ TEST_DATA_CHUNK (tc4_ciphertext256, 20, 20),
+ TEST_DATA_CHUNK (tc4_ciphertext256, 40, 20),
+ },
+};
/* *INDENT-ON* */
/*
diff --git a/src/plugins/unittest/crypto/crypto.h b/src/plugins/unittest/crypto/crypto.h
index f15e34b02f0..d95c994dfd2 100644
--- a/src/plugins/unittest/crypto/crypto.h
+++ b/src/plugins/unittest/crypto/crypto.h
@@ -17,6 +17,8 @@
#ifndef included_unittest_crypto_crypto_h
#define included_unittest_crypto_crypto_h
+#define CRYPTO_TEST_MAX_OP_CHUNKS 8
+
typedef struct
{
u32 length;
@@ -29,6 +31,11 @@ typedef struct unittest_crypto_test_registration
vnet_crypto_alg_t alg;
unittest_crypto_test_data_t iv, key, digest, plaintext, ciphertext, aad,
tag;
+ u8 is_chained;
+
+ /* plaintext and cipher text data used for testing chained buffers */
+ unittest_crypto_test_data_t pt_chunks[CRYPTO_TEST_MAX_OP_CHUNKS + 1];
+ unittest_crypto_test_data_t ct_chunks[CRYPTO_TEST_MAX_OP_CHUNKS + 1];
/* next */
struct unittest_crypto_test_registration *next;
@@ -52,6 +59,7 @@ typedef struct
extern crypto_test_main_t crypto_test_main;
#define TEST_DATA(n) { .data = (u8 *) n, .length = sizeof (n)}
+#define TEST_DATA_CHUNK(s,off,n) { .data = (u8 *) s + off, .length = n}
#define UNITTEST_REGISTER_CRYPTO_TEST(x) \
unittest_crypto_test_registration_t __unittest_crypto_test_##x; \
diff --git a/src/plugins/unittest/crypto/rfc2202_hmac_md5.c b/src/plugins/unittest/crypto/rfc2202_hmac_md5.c
index 76bd8a5f443..7a39aed3030 100644
--- a/src/plugins/unittest/crypto/rfc2202_hmac_md5.c
+++ b/src/plugins/unittest/crypto/rfc2202_hmac_md5.c
@@ -191,6 +191,18 @@ UNITTEST_REGISTER_CRYPTO_TEST (rfc_2202_md5_tc7) = {
.plaintext = TEST_DATA (md5_tc7_data),
.digest = TEST_DATA (md5_tc7_digest),
};
+
+UNITTEST_REGISTER_CRYPTO_TEST (rfc_2202_md5_tc7_chained) = {
+ .name = "RFC2202 HMAC-MD5 TC7 [chained]",
+ .alg = VNET_CRYPTO_ALG_HMAC_MD5,
+ .key = TEST_DATA (md5_tc6_key),
+ .digest = TEST_DATA (md5_tc7_digest),
+ .is_chained = 1,
+ .pt_chunks = {
+ TEST_DATA_CHUNK (md5_tc7_data, 0, 40),
+ TEST_DATA_CHUNK (md5_tc7_data, 40, 33)
+ },
+};
/* *INDENT-ON* */
/*
diff --git a/src/plugins/unittest/crypto/rfc2202_hmac_sha1.c b/src/plugins/unittest/crypto/rfc2202_hmac_sha1.c
index b3942aafa59..d009afe6af7 100644
--- a/src/plugins/unittest/crypto/rfc2202_hmac_sha1.c
+++ b/src/plugins/unittest/crypto/rfc2202_hmac_sha1.c
@@ -218,6 +218,21 @@ UNITTEST_REGISTER_CRYPTO_TEST (rfc_2202_sha1_tc7) = {
};
/* *INDENT-ON* */
+/* *INDENT-OFF* */
+UNITTEST_REGISTER_CRYPTO_TEST (rfc_2202_sha1_tc7_chained) = {
+ .name = "RFC2202 HMAC-SHA-1 TC7 [chained]",
+ .alg = VNET_CRYPTO_ALG_HMAC_SHA1,
+ .key = TEST_DATA (sha1_tc6_key),
+ .digest = TEST_DATA (sha1_tc7_digest),
+
+ .is_chained = 1,
+ .pt_chunks = {
+ TEST_DATA_CHUNK (sha1_tc7_data, 0, 40),
+ TEST_DATA_CHUNK (sha1_tc7_data, 40, 33)
+ },
+};
+/* *INDENT-ON* */
+
/*
* fd.io coding-style-patch-verification: ON
*
diff --git a/src/plugins/unittest/crypto/rfc4231.c b/src/plugins/unittest/crypto/rfc4231.c
index b247d62f2a9..127e1bfe521 100644
--- a/src/plugins/unittest/crypto/rfc4231.c
+++ b/src/plugins/unittest/crypto/rfc4231.c
@@ -584,6 +584,20 @@ UNITTEST_REGISTER_CRYPTO_TEST (rfc4231_tc7_sha512) = {
.plaintext = TEST_DATA (tc7_data),
.digest = TEST_DATA (tc7_digest_sha512),
};
+
+UNITTEST_REGISTER_CRYPTO_TEST (rfc4231_tc7_sha512_chain) = {
+ .name = "RFC4231 TC7 [chained]",
+ .alg = VNET_CRYPTO_ALG_HMAC_SHA512,
+ .key = TEST_DATA (tc7_key),
+ .digest = TEST_DATA (tc7_digest_sha512),
+ .is_chained = 1,
+ .pt_chunks = {
+ TEST_DATA_CHUNK (tc7_data, 0, 50),
+ TEST_DATA_CHUNK (tc7_data, 50, 50),
+ TEST_DATA_CHUNK (tc7_data, 100, 50),
+ TEST_DATA_CHUNK (tc7_data, 150, 2),
+ },
+};
/* *INDENT-ON* */
/*
diff --git a/src/plugins/unittest/crypto_test.c b/src/plugins/unittest/crypto_test.c
index 9030415aec8..3bc06593742 100644
--- a/src/plugins/unittest/crypto_test.c
+++ b/src/plugins/unittest/crypto_test.c
@@ -30,6 +30,137 @@ sort_registrations (void *a0, void *a1)
return (strncmp (r0[0]->name, r1[0]->name, 256));
}
+static void
+print_results (vlib_main_t * vm, unittest_crypto_test_registration_t ** rv,
+ vnet_crypto_op_t * ops, vnet_crypto_op_chunk_t * chunks,
+ u32 n_ops, int verbose)
+{
+ int i;
+ unittest_crypto_test_registration_t *r;
+ vnet_crypto_op_chunk_t *chp;
+ u8 *s = 0, *err = 0;
+ vnet_crypto_op_t *op;
+
+ vec_foreach (op, ops)
+ {
+ int fail = 0;
+ r = rv[op->user_data];
+ unittest_crypto_test_data_t *exp_pt = 0, *exp_ct = 0;
+ unittest_crypto_test_data_t *exp_digest = 0, *exp_tag = 0;
+ unittest_crypto_test_data_t *exp_pt_chunks = 0, *exp_ct_chunks = 0;
+
+ switch (vnet_crypto_get_op_type (op->op))
+ {
+ case VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT:
+ exp_tag = &r->tag;
+ /* fall through */
+ case VNET_CRYPTO_OP_TYPE_ENCRYPT:
+ exp_ct = &r->ciphertext;
+ exp_ct_chunks = r->ct_chunks;
+ break;
+ case VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT:
+ case VNET_CRYPTO_OP_TYPE_DECRYPT:
+ exp_pt = &r->plaintext;
+ exp_pt_chunks = r->pt_chunks;
+ break;
+ case VNET_CRYPTO_OP_TYPE_HMAC:
+ exp_digest = &r->digest;
+ break;
+ default:
+ ASSERT (0);
+ }
+
+ vec_reset_length (err);
+
+ if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
+ err = format (err, "%sengine error: %U", vec_len (err) ? ", " : "",
+ format_vnet_crypto_op_status, op->status);
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ if (exp_ct_chunks)
+ {
+ chp = vec_elt_at_index (chunks, op->chunk_index);
+ for (i = 0; i < op->n_chunks; i++)
+ {
+ if (memcmp (chp->dst, exp_ct_chunks[i].data, chp->len))
+ err = format (err, "%sciphertext mismatch [chunk %d]",
+ vec_len (err) ? ", " : "", i);
+ chp += 1;
+ }
+ }
+
+ if (exp_pt_chunks)
+ {
+ chp = vec_elt_at_index (chunks, op->chunk_index);
+ for (i = 0; i < op->n_chunks; i++)
+ {
+ if (memcmp (chp->dst, exp_pt_chunks[i].data, chp->len))
+ err = format (err, "%splaintext mismatch [chunk %d]",
+ vec_len (err) ? ", " : "", i);
+ chp += 1;
+ }
+ }
+ }
+ else
+ {
+ if (exp_ct && memcmp (op->dst, exp_ct->data, exp_ct->length) != 0)
+ err = format (err, "%sciphertext mismatch",
+ vec_len (err) ? ", " : "");
+
+ if (exp_pt && memcmp (op->dst, exp_pt->data, exp_pt->length) != 0)
+ err = format (err, "%splaintext mismatch",
+ vec_len (err) ? ", " : "");
+ }
+
+ if (exp_tag && memcmp (op->tag, exp_tag->data, exp_tag->length) != 0)
+ err = format (err, "%stag mismatch", vec_len (err) ? ", " : "");
+
+ if (exp_digest &&
+ memcmp (op->digest, exp_digest->data, exp_digest->length) != 0)
+ err = format (err, "%sdigest mismatch", vec_len (err) ? ", " : "");
+
+ vec_reset_length (s);
+ s = format (s, "%s (%U)", r->name, format_vnet_crypto_op, op->op,
+ r->is_chained);
+
+ if (vec_len (err))
+ fail = 1;
+
+ vlib_cli_output (vm, "%-60v%s%v", s, vec_len (err) ? "FAIL: " : "OK",
+ err);
+ if (verbose)
+ {
+ if (verbose == 2)
+ fail = 1;
+
+ if (exp_ct && fail)
+ vlib_cli_output (vm, "Expected ciphertext:\n%U"
+ "\nCalculated ciphertext:\n%U",
+ format_hexdump, exp_ct->data, exp_ct->length,
+ format_hexdump, op->dst, exp_ct->length);
+ if (exp_pt && fail)
+ vlib_cli_output (vm, "Expected plaintext:\n%U"
+ "\nCalculated plaintext:\n%U",
+ format_hexdump, exp_pt->data, exp_pt->length,
+ format_hexdump, op->dst, exp_pt->length);
+ if (r->tag.length && fail)
+ vlib_cli_output (vm, "Expected tag:\n%U"
+ "\nCalculated tag:\n%U",
+ format_hexdump, r->tag.data, r->tag.length,
+ format_hexdump, op->tag, op->tag_len);
+ if (exp_digest && fail)
+ vlib_cli_output (vm, "Expected digest:\n%U"
+ "\nCalculated Digest:\n%U",
+ format_hexdump, exp_digest->data,
+ exp_digest->length, format_hexdump, op->digest,
+ op->digest_len);
+ }
+ }
+ vec_free (err);
+ vec_free (s);
+}
+
static clib_error_t *
test_crypto (vlib_main_t * vm, crypto_test_main_t * tm)
{
@@ -37,11 +168,14 @@ test_crypto (vlib_main_t * vm, crypto_test_main_t * tm)
unittest_crypto_test_registration_t *r = tm->test_registrations;
unittest_crypto_test_registration_t **rv = 0;
vnet_crypto_alg_data_t *ad;
- vnet_crypto_op_t *ops = 0, *op;
+ vnet_crypto_op_t *ops = 0, *op, *chained_ops = 0;
+ vnet_crypto_op_t *current_chained_op = 0, *current_op = 0;
+ vnet_crypto_op_chunk_t *chunks = 0, ch;
vnet_crypto_key_index_t *key_indices = 0;
- u8 *computed_data = 0, *s = 0, *err = 0;
- u32 computed_data_total_len = 0, n_ops = 0;
- u32 i;
+ u8 *computed_data = 0;
+ u32 computed_data_total_len = 0, n_ops = 0, n_chained_ops = 0;
+ unittest_crypto_test_data_t *pt, *ct;
+ u32 i, j;
/* construct registration vector */
while (r)
@@ -61,17 +195,56 @@ test_crypto (vlib_main_t * vm, crypto_test_main_t * tm)
case VNET_CRYPTO_OP_TYPE_ENCRYPT:
case VNET_CRYPTO_OP_TYPE_DECRYPT:
case VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT:
- computed_data_total_len += r->ciphertext.length;
- n_ops += 1;
+ if (r->is_chained)
+ {
+ ct = r->ct_chunks;
+ j = 0;
+ while (ct->data)
+ {
+ if (j > CRYPTO_TEST_MAX_OP_CHUNKS)
+ return clib_error_return (0,
+ "test case '%s' exceeds extra data!",
+ r->name);
+ computed_data_total_len += ct->length;
+ ct++;
+ j++;
+ }
+ n_chained_ops += 1;
+ }
+ else
+ {
+ computed_data_total_len += r->ciphertext.length;
+ n_ops += 1;
+ }
break;
case VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT:
computed_data_total_len += r->ciphertext.length;
computed_data_total_len += r->tag.length;
- n_ops += 1;
+ if (r->is_chained)
+ {
+ ct = r->ct_chunks;
+ j = 0;
+ while (ct->data)
+ {
+ if (j > CRYPTO_TEST_MAX_OP_CHUNKS)
+ return clib_error_return (0,
+ "test case '%s' exceeds extra data!",
+ r->name);
+ computed_data_total_len += ct->length;
+ ct++;
+ j++;
+ }
+ n_chained_ops += 1;
+ }
+ else
+ n_ops += 1;
break;
case VNET_CRYPTO_OP_TYPE_HMAC:
computed_data_total_len += r->digest.length;
- n_ops += 1;
+ if (r->is_chained)
+ n_chained_ops += 1;
+ else
+ n_ops += 1;
break;
default:
break;
@@ -91,9 +264,12 @@ test_crypto (vlib_main_t * vm, crypto_test_main_t * tm)
vec_validate_aligned (computed_data, computed_data_total_len - 1,
CLIB_CACHE_LINE_BYTES);
vec_validate_aligned (ops, n_ops - 1, CLIB_CACHE_LINE_BYTES);
+ vec_validate_aligned (chained_ops, n_chained_ops - 1,
+ CLIB_CACHE_LINE_BYTES);
computed_data_total_len = 0;
- op = ops;
+ current_op = ops;
+ current_chained_op = chained_ops;
/* *INDENT-OFF* */
vec_foreach_index (i, rv)
{
@@ -107,7 +283,18 @@ test_crypto (vlib_main_t * vm, crypto_test_main_t * tm)
if (id == 0)
continue;
- vnet_crypto_op_init (op, id);
+ if (r->is_chained)
+ {
+ op = current_chained_op;
+ current_chained_op += 1;
+ }
+ else
+ {
+ op = current_op;
+ current_op += 1;
+ }
+
+ vnet_crypto_op_init (op, id);
switch (t)
{
@@ -118,14 +305,85 @@ test_crypto (vlib_main_t * vm, crypto_test_main_t * tm)
r->key.data,
r->key.length);
vec_add1 (key_indices, op->key_index);
- op->len = r->plaintext.length;
- op->src = t == VNET_CRYPTO_OP_TYPE_ENCRYPT ?
- r->plaintext.data : r->ciphertext.data;
- op->dst = computed_data + computed_data_total_len;
- computed_data_total_len += r->ciphertext.length;
+
+ if (r->is_chained)
+ {
+ pt = r->pt_chunks;
+ ct = r->ct_chunks;
+ op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
+ op->chunk_index = vec_len (chunks);
+ while (pt->data)
+ {
+ ch.src = t == VNET_CRYPTO_OP_TYPE_ENCRYPT ?
+ pt->data : ct->data;
+ ch.len = pt->length;
+ ch.dst = computed_data + computed_data_total_len;
+ computed_data_total_len += pt->length;
+ vec_add1 (chunks, ch);
+ op->n_chunks++;
+ pt++;
+ ct++;
+ }
+ }
+ else
+ {
+ op->len = r->plaintext.length;
+ op->src = t == VNET_CRYPTO_OP_TYPE_ENCRYPT ?
+ r->plaintext.data : r->ciphertext.data;
+ op->dst = computed_data + computed_data_total_len;
+ computed_data_total_len += r->ciphertext.length;
+ }
break;
case VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT:
case VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT:
+ if (r->is_chained)
+ {
+ op->iv = r->iv.data;
+ op->key_index = vnet_crypto_key_add (vm, r->alg,
+ r->key.data,
+ r->key.length);
+ vec_add1 (key_indices, op->key_index);
+ op->aad = r->aad.data;
+ op->aad_len = r->aad.length;
+ if (t == VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT)
+ {
+ pt = r->pt_chunks;
+ op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
+ op->chunk_index = vec_len (chunks);
+ while (pt->data)
+ {
+ ch.src = pt->data;
+ ch.len = pt->length;
+ ch.dst = computed_data + computed_data_total_len;
+ computed_data_total_len += pt->length;
+ vec_add1 (chunks, ch);
+ op->n_chunks++;
+ pt++;
+ }
+ op->tag = computed_data + computed_data_total_len;
+ computed_data_total_len += r->tag.length;
+ }
+ else
+ {
+ ct = r->ct_chunks;
+ op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
+ op->chunk_index = vec_len (chunks);
+ while (ct->data)
+ {
+ ch.src = ct->data;
+ ch.len = ct->length;
+ ch.dst = computed_data + computed_data_total_len;
+ computed_data_total_len += ct->length;
+ vec_add1 (chunks, ch);
+ op->n_chunks++;
+ ct++;
+ }
+ op->tag = r->tag.data;
+ }
+ op->tag_len = r->tag.length;
+ }
+ else
+ {
op->iv = r->iv.data;
op->key_index = vnet_crypto_key_add (vm, r->alg,
r->key.data,
@@ -136,135 +394,80 @@ test_crypto (vlib_main_t * vm, crypto_test_main_t * tm)
op->len = r->plaintext.length;
op->dst = computed_data + computed_data_total_len;
computed_data_total_len += r->ciphertext.length;
+
if (t == VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT)
{
- op->src = r->plaintext.data;
+ op->src = r->plaintext.data;
op->tag = computed_data + computed_data_total_len;
computed_data_total_len += r->tag.length;
}
else
{
- op->src = r->ciphertext.data;
- op->tag = r->tag.data;
+ op->tag = r->tag.data;
+ op->src = r->ciphertext.data;
}
op->tag_len = r->tag.length;
+ }
break;
case VNET_CRYPTO_OP_TYPE_HMAC:
+ if (r->is_chained)
+ {
op->key_index = vnet_crypto_key_add (vm, r->alg,
r->key.data,
r->key.length);
vec_add1 (key_indices, op->key_index);
- op->src = r->plaintext.data;
- op->len = r->plaintext.length;
- op->digest_len = r->digest.length;
- op->digest = computed_data + computed_data_total_len;
- computed_data_total_len += r->digest.length;
+ op->digest_len = r->digest.length;
+ op->digest = computed_data + computed_data_total_len;
+ computed_data_total_len += r->digest.length;
+ pt = r->pt_chunks;
+ op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
+ op->chunk_index = vec_len (chunks);
+ while (pt->data)
+ {
+ ch.src = pt->data;
+ ch.len = pt->length;
+ vec_add1 (chunks, ch);
+ op->n_chunks++;
+ pt++;
+ }
+ }
+ else
+ {
+ op->key_index = vnet_crypto_key_add (vm, r->alg,
+ r->key.data,
+ r->key.length);
+ vec_add1 (key_indices, op->key_index);
+ op->digest_len = r->digest.length;
+ op->digest = computed_data + computed_data_total_len;
+ computed_data_total_len += r->digest.length;
+ op->src = r->plaintext.data;
+ op->len = r->plaintext.length;
+ }
break;
default:
break;
};
op->user_data = i;
- op++;
}
}
/* *INDENT-ON* */
vnet_crypto_process_ops (vm, ops, vec_len (ops));
+ vnet_crypto_process_chained_ops (vm, chained_ops, chunks,
+ vec_len (chained_ops));
- /* *INDENT-OFF* */
- vec_foreach (op, ops)
- {
- int fail = 0;
- r = rv[op->user_data];
- unittest_crypto_test_data_t *exp_pt = 0, *exp_ct = 0;
- unittest_crypto_test_data_t *exp_digest = 0, *exp_tag = 0;
+ print_results (vm, rv, ops, chunks, vec_len (ops), tm->verbose);
+ print_results (vm, rv, chained_ops, chunks, vec_len (chained_ops),
+ tm->verbose);
- switch (vnet_crypto_get_op_type (op->op))
- {
- case VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT:
- exp_tag = &r->tag;
- /* fall through */
- case VNET_CRYPTO_OP_TYPE_ENCRYPT:
- exp_ct = &r->ciphertext;
- break;
- case VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT:
- case VNET_CRYPTO_OP_TYPE_DECRYPT:
- exp_pt = &r->plaintext;
- break;
- case VNET_CRYPTO_OP_TYPE_HMAC:
- exp_digest = &r->digest;
- break;
- default:
- break;
- }
-
- vec_reset_length (err);
-
- if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
- err = format (err, "%sengine error: %U", vec_len (err) ? ", " : "",
- format_vnet_crypto_op_status, op->status);
-
- if (exp_ct && memcmp (op->dst, exp_ct->data, exp_ct->length) != 0)
- err = format (err, "%sciphertext mismatch",
- vec_len (err) ? ", " : "");
-
- if (exp_pt && memcmp (op->dst, exp_pt->data, exp_pt->length) != 0)
- err = format (err, "%splaintext mismatch", vec_len (err) ? ", " : "");
-
- if (exp_tag && memcmp (op->tag, exp_tag->data, exp_tag->length) != 0)
- err = format (err, "%stag mismatch", vec_len (err) ? ", " : "");
-
- if (exp_digest &&
- memcmp (op->digest, exp_digest->data, exp_digest->length) != 0)
- err = format (err, "%sdigest mismatch", vec_len (err) ? ", " : "");
-
- vec_reset_length (s);
- s = format (s, "%s (%U)", r->name, format_vnet_crypto_op, op->op);
-
- if (vec_len (err))
- fail = 1;
-
- vlib_cli_output (vm, "%-60v%s%v", s, vec_len (err) ? "FAIL: " : "OK",
- err);
- if (tm->verbose)
- {
- if (tm->verbose == 2)
- fail = 1;
-
- if (exp_ct && fail)
- vlib_cli_output (vm, "Expected ciphertext:\n%U"
- "\nCalculated ciphertext:\n%U",
- format_hexdump, exp_ct->data, exp_ct->length,
- format_hexdump, op->dst, exp_ct->length);
- if (exp_pt && fail)
- vlib_cli_output (vm, "Expected plaintext:\n%U"
- "\nCalculated plaintext:\n%U",
- format_hexdump, exp_pt->data, exp_pt->length,
- format_hexdump, op->dst, exp_pt->length);
- if (r->tag.length && fail)
- vlib_cli_output (vm, "Expected tag:\n%U"
- "\nCalculated tag:\n%U",
- format_hexdump, r->tag.data, r->tag.length,
- format_hexdump, op->tag, op->tag_len);
- if (exp_digest && fail)
- vlib_cli_output (vm, "Expected digest:\n%U"
- "\nCalculated Digest:\n%U",
- format_hexdump, exp_digest->data,
- exp_digest->length, format_hexdump, op->digest,
- op->digest_len);
- }
- }
-
- vec_foreach_index (i, key_indices)
- vnet_crypto_key_del (vm, key_indices[i]);
- /* *INDENT-ON* */
+ vec_foreach_index (i, key_indices) vnet_crypto_key_del (vm, key_indices[i]);
vec_free (computed_data);
vec_free (ops);
- vec_free (err);
+ vec_free (chained_ops);
+ vec_free (chunks);
vec_free (rv);
- vec_free (s);
return 0;
}