aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBenoît Ganne <bganne@cisco.com>2021-01-22 18:03:09 +0100
committerNeale Ranns <neale@graphiant.com>2021-02-05 12:52:07 +0000
commit490b92738f3cc1c8d534abd6dee8dba942cb652d (patch)
tree84c72b4573e73ed63b31f80d0289c8efa1a5e0d5
parentb8ce5b784c8852f274ac25a22ee6d8806c2fab2e (diff)
ipsec: add support for AES CTR
Type: feature Change-Id: I9f7742cb12ce30592b0b022c314b71c81fa7223a Signed-off-by: Benoît Ganne <bganne@cisco.com>
-rw-r--r--src/vnet/ipsec/esp.h13
-rw-r--r--src/vnet/ipsec/esp_decrypt.c94
-rw-r--r--src/vnet/ipsec/esp_encrypt.c147
-rw-r--r--src/vnet/ipsec/ipsec.c21
-rw-r--r--src/vnet/ipsec/ipsec_sa.c5
-rw-r--r--src/vnet/ipsec/ipsec_sa.h41
-rw-r--r--test/template_ipsec.py2
-rw-r--r--test/test_ipsec_esp.py140
8 files changed, 321 insertions, 142 deletions
diff --git a/src/vnet/ipsec/esp.h b/src/vnet/ipsec/esp.h
index d24b5ea4102..51386e68844 100644
--- a/src/vnet/ipsec/esp.h
+++ b/src/vnet/ipsec/esp.h
@@ -59,6 +59,18 @@ typedef CLIB_PACKED (struct {
/* *INDENT-ON* */
/**
+ * AES counter mode nonce
+ */
+typedef struct
+{
+ u32 salt;
+ u64 iv;
+ u32 ctr; /* counter: 1 in big-endian for ctr, unused for gcm */
+} __clib_packed esp_ctr_nonce_t;
+
+STATIC_ASSERT_SIZEOF (esp_ctr_nonce_t, 16);
+
+/**
* AES GCM Additional Authentication data
*/
typedef struct esp_aead_t_
@@ -196,6 +208,7 @@ typedef struct
} esp_decrypt_packet_data_t;
STATIC_ASSERT_SIZEOF (esp_decrypt_packet_data_t, 3 * sizeof (u64));
+STATIC_ASSERT_OFFSET_OF (esp_decrypt_packet_data_t, seq, sizeof (u64));
/* we are forced to store the decrypt post data into 2 separate places -
vlib_opaque and opaque2. */
diff --git a/src/vnet/ipsec/esp_decrypt.c b/src/vnet/ipsec/esp_decrypt.c
index a0ae612a683..e5277b1e1c2 100644
--- a/src/vnet/ipsec/esp_decrypt.c
+++ b/src/vnet/ipsec/esp_decrypt.c
@@ -565,34 +565,29 @@ esp_decrypt_prepare_sync_op (vlib_main_t * vm, vlib_node_runtime_t * node,
op->key_index = sa0->crypto_key_index;
op->iv = payload;
- if (ipsec_sa_is_set_IS_AEAD (sa0))
+ if (ipsec_sa_is_set_IS_CTR (sa0))
{
- esp_header_t *esp0;
- esp_aead_t *aad;
- u8 *scratch;
-
- /*
- * construct the AAD and the nonce (Salt || IV) in a scratch
- * space in front of the IP header.
- */
- scratch = payload - esp_sz;
- esp0 = (esp_header_t *) (scratch);
-
- scratch -= (sizeof (*aad) + pd->hdr_sz);
- op->aad = scratch;
-
- op->aad_len = esp_aad_fill (op->aad, esp0, sa0);
-
- /*
- * we don't need to refer to the ESP header anymore so we
- * can overwrite it with the salt and use the IV where it is
- * to form the nonce = (Salt + IV)
- */
- op->iv -= sizeof (sa0->salt);
- clib_memcpy_fast (op->iv, &sa0->salt, sizeof (sa0->salt));
-
- op->tag = payload + len;
- op->tag_len = 16;
+ /* construct nonce in a scratch space in front of the IP header */
+ esp_ctr_nonce_t *nonce =
+ (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz -
+ sizeof (*nonce));
+ if (ipsec_sa_is_set_IS_AEAD (sa0))
+ {
+ /* constuct aad in a scratch space in front of the nonce */
+ esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
+ op->aad = (u8 *) nonce - sizeof (esp_aead_t);
+ op->aad_len = esp_aad_fill (op->aad, esp0, sa0);
+ op->tag = payload + len;
+ op->tag_len = 16;
+ }
+ else
+ {
+ nonce->ctr = clib_host_to_net_u32 (1);
+ }
+ nonce->salt = sa0->salt;
+ ASSERT (sizeof (u64) == iv_sz);
+ nonce->iv = *(u64 *) op->iv;
+ op->iv = (u8 *) nonce;
}
op->src = op->dst = payload += iv_sz;
op->len = len - iv_sz;
@@ -699,32 +694,27 @@ out:
len -= esp_sz;
iv = payload;
- if (ipsec_sa_is_set_IS_AEAD (sa0))
+ if (ipsec_sa_is_set_IS_CTR (sa0))
{
- esp_header_t *esp0;
- u8 *scratch;
-
- /*
- * construct the AAD and the nonce (Salt || IV) in a scratch
- * space in front of the IP header.
- */
- scratch = payload - esp_sz;
- esp0 = (esp_header_t *) (scratch);
-
- scratch -= (sizeof (esp_aead_t) + pd->hdr_sz);
- aad = scratch;
-
- esp_aad_fill (aad, esp0, sa0);
-
- /*
- * we don't need to refer to the ESP header anymore so we
- * can overwrite it with the salt and use the IV where it is
- * to form the nonce = (Salt + IV)
- */
- iv -= sizeof (sa0->salt);
- clib_memcpy_fast (iv, &sa0->salt, sizeof (sa0->salt));
-
- tag = payload + len;
+ /* construct nonce in a scratch space in front of the IP header */
+ esp_ctr_nonce_t *nonce =
+ (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz - sizeof (*nonce));
+ if (ipsec_sa_is_set_IS_AEAD (sa0))
+ {
+ /* constuct aad in a scratch space in front of the nonce */
+ esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
+ aad = (u8 *) nonce - sizeof (esp_aead_t);
+ esp_aad_fill (aad, esp0, sa0);
+ tag = payload + len;
+ }
+ else
+ {
+ nonce->ctr = clib_host_to_net_u32 (1);
+ }
+ nonce->salt = sa0->salt;
+ ASSERT (sizeof (u64) == iv_sz);
+ nonce->iv = *(u64 *) iv;
+ iv = (u8 *) nonce;
}
crypto_start_offset = (payload += iv_sz) - b->data;
diff --git a/src/vnet/ipsec/esp_encrypt.c b/src/vnet/ipsec/esp_encrypt.c
index e5cf1581a69..f291c08247a 100644
--- a/src/vnet/ipsec/esp_encrypt.c
+++ b/src/vnet/ipsec/esp_encrypt.c
@@ -292,14 +292,6 @@ esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
}
}
-typedef struct
-{
- u32 salt;
- u64 iv;
-} __clib_packed esp_gcm_nonce_t;
-
-STATIC_ASSERT_SIZEOF (esp_gcm_nonce_t, 12);
-
static_always_inline u32
esp_encrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
ipsec_sa_t * sa0, vlib_buffer_t * b,
@@ -384,13 +376,12 @@ esp_encrypt_chain_integ (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
}
always_inline void
-esp_prepare_sync_op (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
- vnet_crypto_op_t ** crypto_ops,
- vnet_crypto_op_t ** integ_ops, ipsec_sa_t * sa0,
- u8 * payload, u16 payload_len, u8 iv_sz, u8 icv_sz,
- vlib_buffer_t ** bufs, vlib_buffer_t ** b,
- vlib_buffer_t * lb, u32 hdr_len, esp_header_t * esp,
- esp_gcm_nonce_t * nonce)
+esp_prepare_sync_op (vlib_main_t *vm, ipsec_per_thread_data_t *ptd,
+ vnet_crypto_op_t **crypto_ops,
+ vnet_crypto_op_t **integ_ops, ipsec_sa_t *sa0,
+ u8 *payload, u16 payload_len, u8 iv_sz, u8 icv_sz,
+ vlib_buffer_t **bufs, vlib_buffer_t **b,
+ vlib_buffer_t *lb, u32 hdr_len, esp_header_t *esp)
{
if (sa0->crypto_enc_op_id)
{
@@ -403,21 +394,30 @@ esp_prepare_sync_op (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
op->len = payload_len - icv_sz;
op->user_data = b - bufs;
- if (ipsec_sa_is_set_IS_AEAD (sa0))
+ if (ipsec_sa_is_set_IS_CTR (sa0))
{
- /*
- * construct the AAD in a scratch space in front
- * of the IP header.
- */
- op->aad = payload - hdr_len - sizeof (esp_aead_t);
- op->aad_len = esp_aad_fill (op->aad, esp, sa0);
-
- op->tag = payload + op->len;
- op->tag_len = 16;
+ ASSERT (sizeof (u64) == iv_sz);
+ /* construct nonce in a scratch space in front of the IP header */
+ esp_ctr_nonce_t *nonce =
+ (esp_ctr_nonce_t *) (payload - sizeof (u64) - hdr_len -
+ sizeof (*nonce));
+ u64 *pkt_iv = (u64 *) (payload - sizeof (u64));
+
+ if (ipsec_sa_is_set_IS_AEAD (sa0))
+ {
+ /* constuct aad in a scratch space in front of the nonce */
+ op->aad = (u8 *) nonce - sizeof (esp_aead_t);
+ op->aad_len = esp_aad_fill (op->aad, esp, sa0);
+ op->tag = payload + op->len;
+ op->tag_len = 16;
+ }
+ else
+ {
+ nonce->ctr = clib_host_to_net_u32 (1);
+ }
- u64 *iv = (u64 *) (payload - iv_sz);
nonce->salt = sa0->salt;
- nonce->iv = *iv = clib_host_to_net_u64 (sa0->gcm_iv_counter++);
+ nonce->iv = *pkt_iv = clib_host_to_net_u64 (sa0->ctr_iv_counter++);
op->iv = (u8 *) nonce;
}
else
@@ -493,61 +493,67 @@ esp_prepare_async_frame (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
crypto_total_len = integ_total_len = payload_len - icv_sz;
tag = payload + crypto_total_len;
- /* aead */
- if (ipsec_sa_is_set_IS_AEAD (sa))
- {
- esp_gcm_nonce_t *nonce;
- u64 *pkt_iv = (u64 *) (payload - iv_sz);
+ key_index = sa->linked_key_index;
- aad = payload - hdr_len - sizeof (esp_aead_t);
- esp_aad_fill (aad, esp, sa);
- nonce = (esp_gcm_nonce_t *) (aad - sizeof (*nonce));
- nonce->salt = sa->salt;
- nonce->iv = *pkt_iv = clib_host_to_net_u64 (sa->gcm_iv_counter++);
- iv = (u8 *) nonce;
- key_index = sa->crypto_key_index;
+ if (ipsec_sa_is_set_IS_CTR (sa))
+ {
+ ASSERT (sizeof (u64) == iv_sz);
+ /* construct nonce in a scratch space in front of the IP header */
+ esp_ctr_nonce_t *nonce = (esp_ctr_nonce_t *) (payload - sizeof (u64) -
+ hdr_len - sizeof (*nonce));
+ u64 *pkt_iv = (u64 *) (payload - sizeof (u64));
- if (lb != b)
+ if (ipsec_sa_is_set_IS_AEAD (sa))
{
- /* chain */
- flag |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
- tag = vlib_buffer_get_tail (lb) - icv_sz;
- crypto_total_len = esp_encrypt_chain_crypto (vm, ptd, sa, b, lb,
- icv_sz, payload,
- payload_len, 0);
+ /* constuct aad in a scratch space in front of the nonce */
+ aad = (u8 *) nonce - sizeof (esp_aead_t);
+ esp_aad_fill (aad, esp, sa);
+ key_index = sa->crypto_key_index;
+ }
+ else
+ {
+ nonce->ctr = clib_host_to_net_u32 (1);
}
- goto out;
- }
- /* cipher then hash */
- iv = payload - iv_sz;
- integ_start_offset = crypto_start_offset - iv_sz - sizeof (esp_header_t);
- integ_total_len += iv_sz + sizeof (esp_header_t);
- flag |= VNET_CRYPTO_OP_FLAG_INIT_IV;
- key_index = sa->linked_key_index;
+ nonce->salt = sa->salt;
+ nonce->iv = *pkt_iv = clib_host_to_net_u64 (sa->ctr_iv_counter++);
+ iv = (u8 *) nonce;
+ }
+ else
+ {
+ iv = payload - iv_sz;
+ flag |= VNET_CRYPTO_OP_FLAG_INIT_IV;
+ }
- if (b != lb)
+ if (lb != b)
{
+ /* chain */
flag |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
- crypto_total_len = esp_encrypt_chain_crypto (vm, ptd, sa, b, lb,
- icv_sz, payload,
- payload_len, 0);
tag = vlib_buffer_get_tail (lb) - icv_sz;
- integ_total_len = esp_encrypt_chain_integ (vm, ptd, sa, b, lb, icv_sz,
- payload - iv_sz -
- sizeof (esp_header_t),
- payload_len + iv_sz +
- sizeof (esp_header_t),
- tag, 0);
+ crypto_total_len = esp_encrypt_chain_crypto (vm, ptd, sa, b, lb, icv_sz,
+ payload, payload_len, 0);
}
- else if (ipsec_sa_is_set_USE_ESN (sa) && !ipsec_sa_is_set_IS_AEAD (sa))
+
+ if (sa->integ_op_id)
{
- u32 seq_hi = clib_net_to_host_u32 (sa->seq_hi);
- clib_memcpy_fast (tag, &seq_hi, sizeof (seq_hi));
- integ_total_len += sizeof (seq_hi);
+ integ_start_offset = crypto_start_offset - iv_sz - sizeof (esp_header_t);
+ integ_total_len += iv_sz + sizeof (esp_header_t);
+
+ if (b != lb)
+ {
+ integ_total_len = esp_encrypt_chain_integ (
+ vm, ptd, sa, b, lb, icv_sz,
+ payload - iv_sz - sizeof (esp_header_t),
+ payload_len + iv_sz + sizeof (esp_header_t), tag, 0);
+ }
+ else if (ipsec_sa_is_set_USE_ESN (sa))
+ {
+ u32 seq_hi = clib_net_to_host_u32 (sa->seq_hi);
+ clib_memcpy_fast (tag, &seq_hi, sizeof (seq_hi));
+ integ_total_len += sizeof (seq_hi);
+ }
}
-out:
return vnet_crypto_async_add_to_frame (vm, async_frame, key_index,
crypto_total_len,
integ_total_len - crypto_total_len,
@@ -567,7 +573,6 @@ esp_encrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node,
u32 n_left = frame->n_vectors;
vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
- esp_gcm_nonce_t nonces[VLIB_FRAME_SIZE], *nonce = nonces;
u32 thread_index = vm->thread_index;
u16 buffer_data_size = vlib_buffer_get_default_data_size (vm);
u32 current_sa_index = ~0, current_sa_packets = 0;
@@ -970,7 +975,7 @@ esp_encrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node,
{
esp_prepare_sync_op (vm, ptd, crypto_ops, integ_ops, sa0, payload,
payload_len, iv_sz, icv_sz, bufs, b, lb,
- hdr_len, esp, nonce++);
+ hdr_len, esp);
}
vlib_buffer_advance (b[0], 0LL - hdr_len);
diff --git a/src/vnet/ipsec/ipsec.c b/src/vnet/ipsec/ipsec.c
index b63b2a71160..39f272ec30c 100644
--- a/src/vnet/ipsec/ipsec.c
+++ b/src/vnet/ipsec/ipsec.c
@@ -478,6 +478,27 @@ ipsec_init (vlib_main_t * vm)
a->alg = VNET_CRYPTO_ALG_AES_256_CBC;
a->iv_size = a->block_align = 16;
+ a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CTR_128;
+ a->enc_op_id = VNET_CRYPTO_OP_AES_128_CTR_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_AES_128_CTR_DEC;
+ a->alg = VNET_CRYPTO_ALG_AES_128_CTR;
+ a->iv_size = 8;
+ a->block_align = 1;
+
+ a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CTR_192;
+ a->enc_op_id = VNET_CRYPTO_OP_AES_192_CTR_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_AES_192_CTR_DEC;
+ a->alg = VNET_CRYPTO_ALG_AES_192_CTR;
+ a->iv_size = 8;
+ a->block_align = 1;
+
+ a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CTR_256;
+ a->enc_op_id = VNET_CRYPTO_OP_AES_256_CTR_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_AES_256_CTR_DEC;
+ a->alg = VNET_CRYPTO_ALG_AES_256_CTR;
+ a->iv_size = 8;
+ a->block_align = 1;
+
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_GCM_128;
a->enc_op_id = VNET_CRYPTO_OP_AES_128_GCM_ENC;
a->dec_op_id = VNET_CRYPTO_OP_AES_128_GCM_DEC;
diff --git a/src/vnet/ipsec/ipsec_sa.c b/src/vnet/ipsec/ipsec_sa.c
index d950af64df3..515eb25d323 100644
--- a/src/vnet/ipsec/ipsec_sa.c
+++ b/src/vnet/ipsec/ipsec_sa.c
@@ -108,8 +108,13 @@ ipsec_sa_set_crypto_alg (ipsec_sa_t * sa, ipsec_crypto_alg_t crypto_alg)
if (IPSEC_CRYPTO_ALG_IS_GCM (crypto_alg))
{
sa->integ_icv_size = im->crypto_algs[crypto_alg].icv_size;
+ ipsec_sa_set_IS_CTR (sa);
ipsec_sa_set_IS_AEAD (sa);
}
+ else if (IPSEC_CRYPTO_ALG_IS_CTR (crypto_alg))
+ {
+ ipsec_sa_set_IS_CTR (sa);
+ }
}
void
diff --git a/src/vnet/ipsec/ipsec_sa.h b/src/vnet/ipsec/ipsec_sa.h
index 28ac93185ec..7a52e831c77 100644
--- a/src/vnet/ipsec/ipsec_sa.h
+++ b/src/vnet/ipsec/ipsec_sa.h
@@ -48,6 +48,11 @@ typedef enum
(_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) || \
(_alg == IPSEC_CRYPTO_ALG_AES_GCM_256)))
+#define IPSEC_CRYPTO_ALG_IS_CTR(_alg) \
+ (((_alg == IPSEC_CRYPTO_ALG_AES_CTR_128) || \
+ (_alg == IPSEC_CRYPTO_ALG_AES_CTR_192) || \
+ (_alg == IPSEC_CRYPTO_ALG_AES_CTR_256)))
+
#define foreach_ipsec_integ_alg \
_ (0, NONE, "none") \
_ (1, MD5_96, "md5-96") /* RFC2403 */ \
@@ -86,16 +91,17 @@ typedef struct ipsec_key_t_
* else IPv4 tunnel only valid if is_tunnel is non-zero
* enable UDP encapsulation for NAT traversal
*/
-#define foreach_ipsec_sa_flags \
- _ (0, NONE, "none") \
- _ (1, USE_ESN, "esn") \
- _ (2, USE_ANTI_REPLAY, "anti-replay") \
- _ (4, IS_TUNNEL, "tunnel") \
- _ (8, IS_TUNNEL_V6, "tunnel-v6") \
- _ (16, UDP_ENCAP, "udp-encap") \
- _ (32, IS_PROTECT, "Protect") \
- _ (64, IS_INBOUND, "inbound") \
- _ (128, IS_AEAD, "aead") \
+#define foreach_ipsec_sa_flags \
+ _ (0, NONE, "none") \
+ _ (1, USE_ESN, "esn") \
+ _ (2, USE_ANTI_REPLAY, "anti-replay") \
+ _ (4, IS_TUNNEL, "tunnel") \
+ _ (8, IS_TUNNEL_V6, "tunnel-v6") \
+ _ (16, UDP_ENCAP, "udp-encap") \
+ _ (32, IS_PROTECT, "Protect") \
+ _ (64, IS_INBOUND, "inbound") \
+ _ (128, IS_AEAD, "aead") \
+ _ (256, IS_CTR, "ctr")
typedef enum ipsec_sad_flags_t_
{
@@ -104,7 +110,7 @@ typedef enum ipsec_sad_flags_t_
#undef _
} __clib_packed ipsec_sa_flags_t;
-STATIC_ASSERT (sizeof (ipsec_sa_flags_t) == 1, "IPSEC SA flags > 1 byte");
+STATIC_ASSERT (sizeof (ipsec_sa_flags_t) == 2, "IPSEC SA flags != 2 byte");
typedef struct
{
@@ -116,8 +122,11 @@ typedef struct
u8 crypto_iv_size;
u8 esp_block_align;
u8 integ_icv_size;
+
+ u8 __pad1[3];
+
u32 thread_index;
- u32 __pad_u32;
+
u32 spi;
u32 seq;
u32 seq_hi;
@@ -150,9 +159,9 @@ typedef struct
u64 crypto_op_data;
};
- CLIB_CACHE_LINE_ALIGN_MARK (cacheline1);
+ CLIB_CACHE_LINE_ALIGN_MARK (cacheline1);
- u64 gcm_iv_counter;
+ u64 ctr_iv_counter;
union
{
ip4_header_t ip4_hdr;
@@ -160,13 +169,13 @@ typedef struct
};
udp_header_t udp_hdr;
- /* Salt used in GCM modes - stored in network byte order */
+ /* Salt used in CTR modes (incl. GCM) - stored in network byte order */
u32 salt;
ipsec_protocol_t protocol;
tunnel_encap_decap_flags_t tunnel_flags;
ip_dscp_t dscp;
- u8 __pad[1];
+ u8 __pad2[1];
/* data accessed by dataplane code should be above this comment */
CLIB_CACHE_LINE_ALIGN_MARK (cacheline2);
diff --git a/test/template_ipsec.py b/test/template_ipsec.py
index 9a9fbd070a6..918c99383af 100644
--- a/test/template_ipsec.py
+++ b/test/template_ipsec.py
@@ -94,7 +94,7 @@ class IPsecIPv6Params:
def mk_scapy_crypt_key(p):
- if p.crypt_algo == "AES-GCM":
+ if p.crypt_algo in ("AES-GCM", "AES-CTR"):
return p.crypt_key + struct.pack("!I", p.salt)
else:
return p.crypt_key
diff --git a/test/test_ipsec_esp.py b/test/test_ipsec_esp.py
index 78da401ea8a..178b1d248bf 100644
--- a/test/test_ipsec_esp.py
+++ b/test/test_ipsec_esp.py
@@ -610,7 +610,34 @@ class MyParameters():
'scapy-crypto': "NULL",
'scapy-integ': "HMAC-SHA1-96",
'salt': 0,
- 'key': b"JPjyOWBeVEQiMe7h00112233"}}
+ 'key': b"JPjyOWBeVEQiMe7h00112233"},
+ 'AES-CTR-128/SHA1-96': {
+ 'vpp-crypto': (VppEnum.vl_api_ipsec_crypto_alg_t.
+ IPSEC_API_CRYPTO_ALG_AES_CTR_128),
+ 'vpp-integ': (VppEnum.vl_api_ipsec_integ_alg_t.
+ IPSEC_API_INTEG_ALG_SHA1_96),
+ 'scapy-crypto': "AES-CTR",
+ 'scapy-integ': "HMAC-SHA1-96",
+ 'salt': 0,
+ 'key': b"JPjyOWBeVEQiMe7h"},
+ 'AES-CTR-192/SHA1-96': {
+ 'vpp-crypto': (VppEnum.vl_api_ipsec_crypto_alg_t.
+ IPSEC_API_CRYPTO_ALG_AES_CTR_192),
+ 'vpp-integ': (VppEnum.vl_api_ipsec_integ_alg_t.
+ IPSEC_API_INTEG_ALG_SHA1_96),
+ 'scapy-crypto': "AES-CTR",
+ 'scapy-integ': "HMAC-SHA1-96",
+ 'salt': 1010,
+ 'key': b"JPjyOWBeVEQiMe7hJPjyOWBe"},
+ 'AES-CTR-256/SHA1-96': {
+ 'vpp-crypto': (VppEnum.vl_api_ipsec_crypto_alg_t.
+ IPSEC_API_CRYPTO_ALG_AES_CTR_256),
+ 'vpp-integ': (VppEnum.vl_api_ipsec_integ_alg_t.
+ IPSEC_API_INTEG_ALG_SHA1_96),
+ 'scapy-crypto': "AES-CTR",
+ 'scapy-integ': "HMAC-SHA1-96",
+ 'salt': 2020,
+ 'key': b"JPjyOWBeVEQiMe7hJPjyOWBeVEQiMe7h"}}
class RunTestIpsecEspAll(ConfigIpsecESP,
@@ -723,7 +750,8 @@ class RunTestIpsecEspAll(ConfigIpsecESP,
# GEN for FLG in noESN ESN; do for ALG in AES-GCM-128/NONE \
# GEN AES-GCM-192/NONE AES-GCM-256/NONE AES-CBC-128/MD5-96 \
# GEN AES-CBC-192/SHA1-96 AES-CBC-256/SHA1-96 \
-# GEN 3DES-CBC/SHA1-96 NONE/SHA1-96; do \
+# GEN 3DES-CBC/SHA1-96 NONE/SHA1-96 \
+# GEN AES-CTR-128/SHA1-96 AES-CTR-192/SHA1-96 AES-CTR-256/SHA1-96; do \
# GEN [[ ${FLG} == "ESN" && ${ALG} == *"NONE" ]] && continue
# GEN echo -e "\n\nclass Test_${ENG}_${FLG}_${ALG}(RunTestIpsecEspAll):" |
# GEN sed -e 's/-/_/g' -e 's#/#_#g' ; \
@@ -781,6 +809,24 @@ class Test_ia32_noESN_NONE_SHA1_96(RunTestIpsecEspAll):
self.run_test()
+class Test_ia32_noESN_AES_CTR_128_SHA1_96(RunTestIpsecEspAll):
+ """ia32 noESN AES-CTR-128/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ia32_noESN_AES_CTR_192_SHA1_96(RunTestIpsecEspAll):
+ """ia32 noESN AES-CTR-192/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ia32_noESN_AES_CTR_256_SHA1_96(RunTestIpsecEspAll):
+ """ia32 noESN AES-CTR-256/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
class Test_ia32_ESN_AES_CBC_128_MD5_96(RunTestIpsecEspAll):
"""ia32 ESN AES-CBC-128/MD5-96 IPSec test"""
def test_ipsec(self):
@@ -811,6 +857,24 @@ class Test_ia32_ESN_NONE_SHA1_96(RunTestIpsecEspAll):
self.run_test()
+class Test_ia32_ESN_AES_CTR_128_SHA1_96(RunTestIpsecEspAll):
+ """ia32 ESN AES-CTR-128/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ia32_ESN_AES_CTR_192_SHA1_96(RunTestIpsecEspAll):
+ """ia32 ESN AES-CTR-192/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ia32_ESN_AES_CTR_256_SHA1_96(RunTestIpsecEspAll):
+ """ia32 ESN AES-CTR-256/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
class Test_ipsecmb_noESN_AES_GCM_128_NONE(RunTestIpsecEspAll):
"""ipsecmb noESN AES-GCM-128/NONE IPSec test"""
def test_ipsec(self):
@@ -859,6 +923,24 @@ class Test_ipsecmb_noESN_NONE_SHA1_96(RunTestIpsecEspAll):
self.run_test()
+class Test_ipsecmb_noESN_AES_CTR_128_SHA1_96(RunTestIpsecEspAll):
+ """ipsecmb noESN AES-CTR-128/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ipsecmb_noESN_AES_CTR_192_SHA1_96(RunTestIpsecEspAll):
+ """ipsecmb noESN AES-CTR-192/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ipsecmb_noESN_AES_CTR_256_SHA1_96(RunTestIpsecEspAll):
+ """ipsecmb noESN AES-CTR-256/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
class Test_ipsecmb_ESN_AES_CBC_128_MD5_96(RunTestIpsecEspAll):
"""ipsecmb ESN AES-CBC-128/MD5-96 IPSec test"""
def test_ipsec(self):
@@ -889,6 +971,24 @@ class Test_ipsecmb_ESN_NONE_SHA1_96(RunTestIpsecEspAll):
self.run_test()
+class Test_ipsecmb_ESN_AES_CTR_128_SHA1_96(RunTestIpsecEspAll):
+ """ipsecmb ESN AES-CTR-128/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ipsecmb_ESN_AES_CTR_192_SHA1_96(RunTestIpsecEspAll):
+ """ipsecmb ESN AES-CTR-192/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_ipsecmb_ESN_AES_CTR_256_SHA1_96(RunTestIpsecEspAll):
+ """ipsecmb ESN AES-CTR-256/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
class Test_openssl_noESN_AES_GCM_128_NONE(RunTestIpsecEspAll):
"""openssl noESN AES-GCM-128/NONE IPSec test"""
def test_ipsec(self):
@@ -937,6 +1037,24 @@ class Test_openssl_noESN_NONE_SHA1_96(RunTestIpsecEspAll):
self.run_test()
+class Test_openssl_noESN_AES_CTR_128_SHA1_96(RunTestIpsecEspAll):
+ """openssl noESN AES-CTR-128/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_openssl_noESN_AES_CTR_192_SHA1_96(RunTestIpsecEspAll):
+ """openssl noESN AES-CTR-192/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_openssl_noESN_AES_CTR_256_SHA1_96(RunTestIpsecEspAll):
+ """openssl noESN AES-CTR-256/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
class Test_openssl_ESN_AES_CBC_128_MD5_96(RunTestIpsecEspAll):
"""openssl ESN AES-CBC-128/MD5-96 IPSec test"""
def test_ipsec(self):
@@ -967,5 +1085,23 @@ class Test_openssl_ESN_NONE_SHA1_96(RunTestIpsecEspAll):
self.run_test()
+class Test_openssl_ESN_AES_CTR_128_SHA1_96(RunTestIpsecEspAll):
+ """openssl ESN AES-CTR-128/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_openssl_ESN_AES_CTR_192_SHA1_96(RunTestIpsecEspAll):
+ """openssl ESN AES-CTR-192/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
+class Test_openssl_ESN_AES_CTR_256_SHA1_96(RunTestIpsecEspAll):
+ """openssl ESN AES-CTR-256/SHA1-96 IPSec test"""
+ def test_ipsec(self):
+ self.run_test()
+
+
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)