aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/vnet/crypto/crypto.h36
-rw-r--r--src/vnet/crypto/node.c8
2 files changed, 26 insertions, 18 deletions
diff --git a/src/vnet/crypto/crypto.h b/src/vnet/crypto/crypto.h
index 529c70ac088..eeb120400a8 100644
--- a/src/vnet/crypto/crypto.h
+++ b/src/vnet/crypto/crypto.h
@@ -300,13 +300,6 @@ typedef struct
typedef struct
{
- vnet_crypto_op_status_t status:8;
- u32 key_index;
- i16 crypto_start_offset; /* first buffer offset */
- i16 integ_start_offset;
- u32 crypto_total_length;
- /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
- u16 integ_length_adj;
u8 *iv;
union
{
@@ -314,18 +307,33 @@ typedef struct
u8 *tag;
};
u8 *aad;
+ u32 key_index;
+ u32 crypto_total_length;
+ i16 crypto_start_offset; /* first buffer offset */
+ i16 integ_start_offset;
+ /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
+ u16 integ_length_adj;
+ vnet_crypto_op_status_t status : 8;
u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
} vnet_crypto_async_frame_elt_t;
+/* Assert the size so the compiler will warn us when it changes */
+STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
+
+typedef enum vnet_crypto_async_frame_state_t_
+{
+ VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
+ /* frame waiting to be processed */
+ VNET_CRYPTO_FRAME_STATE_PENDING,
+ VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
+ VNET_CRYPTO_FRAME_STATE_SUCCESS,
+ VNET_CRYPTO_FRAME_STATE_ELT_ERROR
+} __clib_packed vnet_crypto_async_frame_state_t;
+
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
-#define VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED 0
-#define VNET_CRYPTO_FRAME_STATE_PENDING 1 /* frame waiting to be processed */
-#define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS 2
-#define VNET_CRYPTO_FRAME_STATE_SUCCESS 3
-#define VNET_CRYPTO_FRAME_STATE_ELT_ERROR 4
- u8 state;
+ vnet_crypto_async_frame_state_t state;
vnet_crypto_async_op_id_t op:8;
u16 n_elts;
vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
@@ -339,7 +347,7 @@ typedef struct
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
vnet_crypto_async_frame_t *frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
vnet_crypto_async_frame_t *frame_pool;
- u32 *buffer_indice;
+ u32 *buffer_indices;
u16 *nexts;
} vnet_crypto_thread_t;
diff --git a/src/vnet/crypto/node.c b/src/vnet/crypto/node.c
index 63ed95e7d93..e1186f49c0a 100644
--- a/src/vnet/crypto/node.c
+++ b/src/vnet/crypto/node.c
@@ -87,9 +87,9 @@ crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
{
if (cf)
{
- vec_validate (ct->buffer_indice, n_cache + cf->n_elts);
+ vec_validate (ct->buffer_indices, n_cache + cf->n_elts);
vec_validate (ct->nexts, n_cache + cf->n_elts);
- clib_memcpy_fast (ct->buffer_indice + n_cache, cf->buffer_indices,
+ clib_memcpy_fast (ct->buffer_indices + n_cache, cf->buffer_indices,
sizeof (u32) * cf->n_elts);
if (cf->state == VNET_CRYPTO_FRAME_STATE_SUCCESS)
{
@@ -114,7 +114,7 @@ crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
n_cache += cf->n_elts;
if (n_cache >= VLIB_FRAME_SIZE)
{
- vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indice,
+ vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indices,
ct->nexts, n_cache);
n_cache = 0;
}
@@ -167,7 +167,7 @@ VLIB_NODE_FN (crypto_dispatch_node) (vlib_main_t * vm,
}
/* *INDENT-ON* */
if (n_cache)
- vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indice, ct->nexts,
+ vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indices, ct->nexts,
n_cache);
return n_dispatched;